def putHandler(self, request_iterator, context): serverlist = self.node.get_active_node_ids_for_push() st_idx = 0 counter = 0 for req in request_iterator: if counter % 3 == 0 or not serverlist: self.pushDataToExternalCluster(req) counter = counter + 1 continue if not serverlist: return server_pb2.Response(code=2) node_id = serverlist[st_idx] while (True): if self.pushDataToNode(req, node_id): st_idx = st_idx + 1 if st_idx > len(serverlist) - 1: st_idx = 0 break else: self.node.markNodeAsFull(node_id) serverlist.pop(st_idx) if not serverlist: return server_pb2.Response(code=2) if st_idx > len(serverlist) - 1: st_idx = 0 node_id = serverlist[st_idx] counter += 1 return server_pb2.Response(code=1)
def PutToLocalCluster(self, request_iterator, context): for req in request_iterator: if (mongoTestNew.get_mongo_connection().mesowest.command("dbstats") ["dataSize"] > space): return server_pb2.Response(code=2) mongoTestNew.put_data(req.putRequest.datFragment) return server_pb2.Response(code=1)
def putHandler(self, request_iterator, context): serverlist = self.node.get_active_node_ids_for_push() print("serverlist ", serverlist) print("Inside put handler") st_idx = 0 for req in request_iterator: if not serverlist: return server_pb2.Response(code=2) node_id = serverlist[st_idx] while (True): if self.pushDataToNode(req, node_id): st_idx = st_idx + 1 if st_idx > len(serverlist) - 1: st_idx = 0 break else: print("Marking node as full ", node_id) self.node.markNodeAsFull(node_id) serverlist.pop(st_idx) if not serverlist: return server_pb2.Response(code=2) node_id = serverlist[st_idx] return server_pb2.Response(code=1)
def PutToLocalCluster(self, request_iterator, context): for req in request_iterator: if (mongoTestNew.get_mongo_connection().mesowest.command("dbstats") ["dataSize"] > space): print("Inside PutToLocalCluster returening node full") return server_pb2.Response(code=2) mongoTestNew.put_data( (req.putRequest.datFragment.data).decode('utf-8')) return server_pb2.Response(code=1)
def provideJson(chunk): jsonChunk = [] for dline in ( chunk.datFragment.data).decode('utf-8').strip('\n').split('\n'): dlineValues = dline.replace('"', "'").split(',') dlineJsonValues = { 'STN': dlineValues[0], 'TIMESTAMP': dlineValues[1], 'MNET': dlineValues[2], 'SLAT': dlineValues[3], 'SLON': dlineValues[4], 'SELV': dlineValues[5], 'TMPF': dlineValues[6], 'SKNT': dlineValues[7], 'DRCT': dlineValues[8], 'GUST': dlineValues[9], 'PMSL': dlineValues[10], 'ALTI': dlineValues[11], 'DWPF': dlineValues[12], 'RELH': dlineValues[13], 'WTHR': dlineValues[14], 'P24I': dlineValues[15] } jsonChunk.append(json.dumps(dlineJsonValues)) responseChunk = server_pb2.Response( code=1, metaData=server_pb2.MetaData(uuid="", numOfFragment=int( chunk.metaData.numOfFragment)), datFragment=server_pb2.DatFragment( timestamp_utc="", data=str((',').join(jsonChunk)).encode(encoding='utf_8'))) return responseChunk
def GetFromLocalCluster(self, request, context): print("Inside GetFromLocalCluster") print((request.getRequest.queryParams)) fromTimestamp = getEpochTime(request.getRequest.queryParams.from_utc) toTimestamp = getEpochTime(request.getRequest.queryParams.to_utc) #fromTimestamp, toTimestamp = 1328114400000, 1328155200000 data_count = mongoTestNew.get_count_of_data(fromTimestamp, toTimestamp) print("Data count is", data_count) #TODO Move to config offset = 0 limit = 2000 yield_count = 1 while (offset <= data_count): query_data = mongoTestNew.get_data(fromTimestamp, toTimestamp, offset, limit) response = server_pb2.Response( code=1, msg="froms-1", metaData=server_pb2.MetaData(uuid="", numOfFragment=int(data_count)), datFragment=server_pb2.DatFragment( timestamp_utc="", data=str(query_data).encode(encoding='utf_8'))) print("yield count", yield_count) yield_count += 1 yield (response) offset = offset + limit
def PutToLocalCluster(self, request_iterator, context): print("server inside PutToLocalCluster") for req in request_iterator: print((req.putRequest.datFragment.data).decode('utf-8')) mongoTestNew.put_data( (req.putRequest.datFragment.data).decode('utf-8')) return server_pb2.Response(code=1)
def putHandler(self, request_iterator, context): serverlist = self.node.get_active_node_ids_for_push() print("serverlist ", serverlist) print("Inside put handler") st_idx = 0 counter = 0 for req in request_iterator: if counter % 3 == 0 or not serverlist: self.pushDataToExternalCluster(req) counter = counter + 1 continue #temp code #l_data = (req.putRequest.datFragment.data).decode("utf-8") #print (l_data) #print ("length of data is ",len(l_data.split('\n'))) if not serverlist: return server_pb2.Response(code=2) print("st_idx before node", st_idx) node_id = serverlist[st_idx] while (True): if self.pushDataToNode(req, node_id): st_idx = st_idx + 1 print("increm,enting st_idx", st_idx) if st_idx > len(serverlist) - 1: print("chanhing st_idx to 0", st_idx) st_idx = 0 break else: print("Marking node as full ", node_id) self.node.markNodeAsFull(node_id) serverlist.pop(st_idx) if not serverlist: return server_pb2.Response(code=2) if st_idx > len(serverlist) - 1: print("chanhing st_idx to 0", st_idx) st_idx = 0 node_id = serverlist[st_idx] counter += 1 return server_pb2.Response(code=1)
def GetFromLocalCluster(self, request, context): fromTimestamp = getEpochTime(request.getRequest.queryParams.from_utc) toTimestamp = getEpochTime(request.getRequest.queryParams.to_utc) data_count = mongoTestNew.get_count_of_data( fromTimestamp, toTimestamp, request.getRequest.queryParams.params_json) offset = 0 limit = 2000 yield_count = 1 while (offset <= data_count): query_data = mongoTestNew.get_data( fromTimestamp, toTimestamp, offset, limit, request.getRequest.queryParams.params_json) response = server_pb2.Response( code=1, metaData=server_pb2.MetaData(uuid="", numOfFragment=int(data_count)), datFragment=server_pb2.DatFragment( timestamp_utc="", data=str(query_data).encode(encoding='utf_8'))) yield_count += 1 yield (response) offset = offset + limit
def ping(self, req, context): return server_pb2.Response(code=1)
def ping(self, req, context): print("Inside server ping") return server_pb2.Response(code=1)