def offload_sensor_data(sensor_data): logger.info("Saving data to database...") count = 0 # Connect to the database ddb = DryadDatabase() if ddb.connect(CUSTOM_DATABASE_NAME) == False: logger.error("Failed to update device info") return False for node_data in sensor_data: source_id = node_data['node_id'] data = node_data['data'] for sensor_data in data: ts = sensor_data['time'] read_data = '{ "type" : "%s", "value" : %f }' % ( sensor_data["sensor"], sensor_data["reading"] ) ddb.add_data(read_data, source=source_id, timestamp=ts) if (count % 5) == 0: logger.info("Data saved:{0} records".format(count)) count += 1 # Disconnect from the database ddb.disconnect() return True
def offload_data(self): db = DryadDatabase() session_data = db.get_session_data() self.logger.debug(session_data) blk_count = 0 curr_session = 0 prev_session = 0 n_params = 13 # ideal number of parameters per data block data_blocks = {} offloaded_data = {} for reading in session_data: # Save the current session id curr_session = reading.session_id # Extract the data type and value from the 'content' string data_key = reading.content.split(":")[0].strip() data_val = reading.content.split(":")[1].strip() data_source_id = reading.source_id # Boolean indicator whether data key exists in a data block key_exists = True # Check if source id exists in current data blocks if data_source_id in data_blocks.keys(): source_id_readings = data_blocks[data_source_id] for i in range(len(source_id_readings)): if data_key in source_id_readings[i].keys(): # Go to the next source id reading continue # If the data key is not existing on the source id readings key_exists = False data_blocks[data_source_id][i][data_key] = data_val # Add block to offloaded_data if complete if len(data_blocks[data_source_id][i]) == n_params: if data_source_id not in offloaded_data.keys(): offloaded_data[data_source_id] = [data_blocks[data_source_id][i]] else: offloaded_data[data_source_id].append(data_blocks[data_source_id][i]) # Remove datum that has been offloaded del data_blocks[data_source_id][i] # Go to the next reading break if key_exists is True: data_blocks[data_source_id].append({data_key: data_val}) # Initialize data block if source id not existing else: data_blocks[data_source_id] = [{data_key: data_val}] # Add remaining data blocks to offload for key, block in data_blocks.items(): for reading_set in block: if len(reading_set) is not 0: if key not in offloaded_data.keys(): offloaded_data[key] = [reading_set] else: offloaded_data[key].append(reading_set) blk_count = 0 # Add offloaded data to database for source, block in offloaded_data.items(): for reading_set in block: # Save the block to the database db.add_data( blk_id=blk_count, session_id=curr_session, source_id=source, content=str(reading_set), timestamp=reading.timestamp ) blk_count += 1 db.clear_session_data() db.close_session() return