def listen(self): while 1: data, addr = self.client.recvfrom(1024) value = json.loads(data) path = value[0] if path in self.paths: row_key, column = convert_muse_data_to_cassandra_column( path, value) if len(self.batches[path]) < BATCH_MAX_SIZE: self.batches[path][row_key] = column # time stats timestamp = row_key.split('_')[1] #print "column added to batch for %s -- %s ms" % (path, timestamp) elif len(self.batches[path]) == BATCH_MAX_SIZE: self.muse_cassandra_repo.add_batch(self.batches[path]) self.batches[path] = {} # time stats timestamp = row_key.split('_')[1] print "column batch stored in cassandra for %s -- %s ms" % ( path, timestamp)
def handle_value(self, string_value): value = json.loads(string_value) path = value[0] if path in self.paths: row_key, column = convert_muse_data_to_cassandra_column(path, value) if len(self.batches[path]) < BATCH_MAX_SIZE: self.batches[path][row_key] = column # time stats timestamp = row_key.split('_')[1] print "column added to batch for %s -- %s ms" % (path, timestamp) elif len(self.batches[path]) == BATCH_MAX_SIZE: self.muse_cassandra_repo.add_batch(self.batches[path]) self.batches[path] = {} # time stats timestamp = row_key.split('_')[1] print "column batch stored in cassandra for %s -- %s ms" % (path, timestamp)
def handle_value(self, string_value): value = json.loads(string_value) path = value[0] if path in self.paths: row_key, column = convert_muse_data_to_cassandra_column( path, value) if len(self.batches[path]) < BATCH_MAX_SIZE: self.batches[path][row_key] = column # time stats timestamp = row_key.split('_')[1] print "column added to batch for %s -- %s ms" % (path, timestamp) elif len(self.batches[path]) == BATCH_MAX_SIZE: self.muse_cassandra_repo.add_batch(self.batches[path]) self.batches[path] = {} # time stats timestamp = row_key.split('_')[1] print "column batch stored in cassandra for %s -- %s ms" % ( path, timestamp)
def listen(self): while 1: data, addr = self.client.recvfrom(1024) value = json.loads(data) path = value[0] if path in self.paths: row_key, column = convert_muse_data_to_cassandra_column(path, value) if len(self.batches[path]) < BATCH_MAX_SIZE: self.batches[path][row_key] = column # time stats timestamp = row_key.split('_')[1] #print "column added to batch for %s -- %s ms" % (path, timestamp) elif len(self.batches[path]) == BATCH_MAX_SIZE: self.muse_cassandra_repo.add_batch(self.batches[path]) self.batches[path] = {} # time stats timestamp = row_key.split('_')[1] print "column batch stored in cassandra for %s -- %s ms" % (path, timestamp)