def unbake(message=None): json_msg = json.loads(message) if 'leader_id' in json_msg['payload']: leader_uuid = json_msg['payload']['leader_id'] if leader_uuid is not None: json_msg['payload']['leader_id'] = time_uuid.TimeUUID(leader_uuid) if 'identity' in json_msg['payload']: identity_uuid = json_msg['payload']['identity'] if identity_uuid is not None: json_msg['payload']['identity'] = time_uuid.TimeUUID(identity_uuid) if 'dead_uuid' in json_msg['payload']: dead_uuid = json_msg['payload']['dead_uuid'] if dead_uuid is not None: json_msg['payload']['dead_uuid'] = time_uuid.TimeUUID(dead_uuid) if 'ledger' in json_msg['payload']: ledger = {} ledger_response = json_msg['payload']['ledger'] if ledger_response: for uuid, values in ledger_response.iteritems(): ledger[time_uuid.TimeUUID(uuid)] = values json_msg['payload']['ledger'] = ledger action_dispatcher, action_payload = json_msg['type'], json_msg['payload'] return action_dispatcher, action_payload
def get_sensors(): """ Fetch the sensor data from the Cassandra cluster. The user can filter by sensor, and the number of days in the past. """ sensor = str(request.args['sensor']) days = int(request.args['days']) day = datetime.datetime.utcnow() - datetime.timedelta(days=days) ts = time_uuid.TimeUUID.with_utc(day, randomize=False, lowest_val=True) query = """SELECT * FROM sensor_data WHERE sensor_id=%(sensor)s and time>%(time)s ORDER BY time DESC""" values = { 'time': ts, 'sensor': sensor } rows = session.execute(query, values) reply = { 'rows' : [] } for r in rows: ts = time_uuid.TimeUUID(str(r.time)) dt = str(ts.get_datetime()) reply['rows'].append({ 'sensor' : str(r.sensor_id), 'reading': str(r.reading), 'time': str(dt) }) return json.dumps(reply)
def get_timestamp_from_uuid(uuid): try: uuid_instance = time_uuid.TimeUUID(uuid) except ValueError: return None else: return uuid_instance.get_datetime()
def put_sensors(): """ Insert some sensor data into Cassandra. The sensor data is encoded as a JSON string. """ value = request.form['value'] value_parsed = json.loads(value) query = """INSERT INTO sensor_data (sensor_id, time, reading) VALUES (%(sensor_id)s, %(time)s, %(reading)s)""" values = { 'sensor_id':str(value_parsed['sensor']), 'time': time_uuid.TimeUUID(value_parsed['time']), 'reading': float(value_parsed['reading']) } session.execute(query, values) return ""
def recovery(self): logging.debug('RECOVERY') for chunk_server in self.chunk_servers: self.connect_chunk_server(chunk_server) dir_dict = self.chunk_server.readdir() file_list = dir_dict['files'] size_list = dir_dict['size'] chunk_ids_list = [] for file, file_size in zip(file_list, size_list): uuid_string, file_path = file.split("--") uuid = time_uuid.TimeUUID(uuid_string) path = base64.urlsafe_b64decode(file_path) chunk_id = (uuid, file_path) chunk_ids_list.append(chunk_id) if uuid not in self.chunk_table.keys(): self.chunk_table[uuid] = [] self.chunk_table[uuid].append(chunk_server) if path not in self.file_table.keys(): self.file_table[path] = [] self.file_table[path + 'size'] = 0 if chunk_id not in self.file_table[path]: self.file_table[path].append(chunk_id) self.file_table[path + 'size'] = self.file_table[ path + 'size'] + int(file_size) dir = os.path.dirname(self.root + path) if not os.path.exists(dir): os.mkdirs(dir) if not os.path.exists(self.root + path): fd = os.open(self.root + path, os.O_CREAT | os.O_RDWR) os.write(fd, "updating") os.close(fd) self.chunk_server_table[chunk_server] = chunk_ids_list
def get_uuid_time(the_uuid_string): return time_uuid.TimeUUID(the_uuid_string).get_datetime()
def get_unix_timestamp_from_timeuuid(timeuuid): return int(time_uuid.TimeUUID(timeuuid).get_timestamp() * 100000000)