def provideJson(chunk): jsonChunk = [] for dline in ( chunk.datFragment.data).decode('utf-8').strip('\n').split('\n'): dlineValues = dline.replace('"', "'").split(',') dlineJsonValues = { 'STN': dlineValues[0], 'TIMESTAMP': dlineValues[1], 'MNET': dlineValues[2], 'SLAT': dlineValues[3], 'SLON': dlineValues[4], 'SELV': dlineValues[5], 'TMPF': dlineValues[6], 'SKNT': dlineValues[7], 'DRCT': dlineValues[8], 'GUST': dlineValues[9], 'PMSL': dlineValues[10], 'ALTI': dlineValues[11], 'DWPF': dlineValues[12], 'RELH': dlineValues[13], 'WTHR': dlineValues[14], 'P24I': dlineValues[15] } jsonChunk.append(json.dumps(dlineJsonValues)) responseChunk = server_pb2.Response( code=1, metaData=server_pb2.MetaData(uuid="", numOfFragment=int( chunk.metaData.numOfFragment)), datFragment=server_pb2.DatFragment( timestamp_utc="", data=str((',').join(jsonChunk)).encode(encoding='utf_8'))) return responseChunk
def GetFromLocalCluster(self, request, context): print("Inside GetFromLocalCluster") print((request.getRequest.queryParams)) fromTimestamp = getEpochTime(request.getRequest.queryParams.from_utc) toTimestamp = getEpochTime(request.getRequest.queryParams.to_utc) #fromTimestamp, toTimestamp = 1328114400000, 1328155200000 data_count = mongoTestNew.get_count_of_data(fromTimestamp, toTimestamp) print("Data count is", data_count) #TODO Move to config offset = 0 limit = 2000 yield_count = 1 while (offset <= data_count): query_data = mongoTestNew.get_data(fromTimestamp, toTimestamp, offset, limit) response = server_pb2.Response( code=1, msg="froms-1", metaData=server_pb2.MetaData(uuid="", numOfFragment=int(data_count)), datFragment=server_pb2.DatFragment( timestamp_utc="", data=str(query_data).encode(encoding='utf_8'))) print("yield count", yield_count) yield_count += 1 yield (response) offset = offset + limit
def create_streaming_request_for_local_put(self, data): req = server_pb2.Request( fromSender=self.host, putRequest=server_pb2.PutRequest( metaData=server_pb2.MetaData(uuid=''), datFragment=server_pb2.DatFragment(data=str(data).encode( encoding='utf_8')))) yield req
def create_streaming_request(self,putData): req = server_pb2.Request( fromSender='some put sender', toReceiver='some put receiver', putRequest=server_pb2.PutRequest( metaData=server_pb2.MetaData(uuid='14829'), datFragment=server_pb2.DatFragment(data= str(putData).encode(encoding='utf_8')) )) yield req
def create_streaming_request(self, file): for x in chunktest.process(None, request=False, name=file): req = server_pb2.Request( fromSender=self.host, putRequest=server_pb2.PutRequest( metaData=server_pb2.MetaData(uuid=''), datFragment=server_pb2.DatFragment( data=str("".join(x)).encode(encoding='utf_8')))) yield req
def GetFromLocalCluster(self, request, context): fromTimestamp = getEpochTime(request.getRequest.queryParams.from_utc) toTimestamp = getEpochTime(request.getRequest.queryParams.to_utc) data_count = mongoTestNew.get_count_of_data( fromTimestamp, toTimestamp, request.getRequest.queryParams.params_json) offset = 0 limit = 2000 yield_count = 1 while (offset <= data_count): query_data = mongoTestNew.get_data( fromTimestamp, toTimestamp, offset, limit, request.getRequest.queryParams.params_json) response = server_pb2.Response( code=1, metaData=server_pb2.MetaData(uuid="", numOfFragment=int(data_count)), datFragment=server_pb2.DatFragment( timestamp_utc="", data=str(query_data).encode(encoding='utf_8'))) yield_count += 1 yield (response) offset = offset + limit