def handle_req_download(self, link, content): limit = None offset = None start_id = 0 end_id = 100000000000000 # Parse our argument list download_args = content.split(',') if len(download_args) > 0: for arg in download_args: if arg.lower().startswith("limit="): limit = int(arg.split('=')[1]) elif arg.lower().startswith("start_id="): start_id = int(arg.split('=')[1]) elif arg.lower().startswith("end_id="): end_id = int(arg.split('=')[1]) elif arg.lower().startswith("offset="): offset = int(arg.split('=')[1]) db = DryadDatabase() matched_data = db.get_data(limit=limit, offset=offset, start_id=start_id, end_id=end_id) db.close_session() data = [] data_str = "" data_block = {} for reading in matched_data: # TODO Format it here data_block['rec_id'] = reading.id data_block['timestamp'] = reading.end_time data_block['sampling_site'] = reading.site_name # TODO data_block['data'] = json.loads(reading.content.replace("'", '"')) data_block['origin'] = { 'name': reading.name, 'lat': reading.lat, 'lon': reading.lon, 'addr': "---" } if 'ph' not in data_block['data']: data_block['data']['ph'] = None if 'bl_batt' not in data_block['data']: data_block['data']['bl_batt'] = None data.append(data_block) data_block = {} return link.send_response("RDATA:{};\r\n".format(json.dumps(data)))
def handle_req_info_list(self, link, content): params = None db = DryadDatabase() params = db.get_all_system_info() db.close_session() param_list = {} for p in params: param_list[p.name] = p.value return link.send_response("RINFO:{};\r\n".format(param_list))
def handle_req_state(self, link, content): # Retrive details about the cache node from the database db = DryadDatabase() node_matches = db.get_nodes(node_class='SELF') data = db.get_data() if len(node_matches) <= 0: db.close_session() self.logger.error("Failed to load data for 'SELF'") return link.send_response("RSTAT:FAIL\r\n") db.close_session() # Retrive uptime self_uptime = os.popen(SYS_CMD_UPTIME).read().strip() node_data = node_matches[0] # Format the string to return state = "'name':'{}','state':'{}','batt':{},'version':'{}'," state += "'lat':{},'lon':{},'sys_time':'{}','uptime':'{}'," state += "'next_sleep_time':'{}','next_collect_time':'{}'," state += "'size':{}" state = state.format(node_data.name, self.task_node.get_state_str(), -99.0, self.version, node_data.lat, node_data.lon, ctime(), self_uptime, ctime(self.task_node.get_idle_out_time()), ctime(self.task_node.get_collect_time()), len(data)) return link.send_response("RSTAT:{" + state + "};\r\n")
def offload_sensor_data(sensor_data): logger.info("Saving data to database...") count = 0 # Connect to the database ddb = DryadDatabase() if ddb.connect(CUSTOM_DATABASE_NAME) == False: logger.error("Failed to update device info") return False for node_data in sensor_data: source_id = node_data['node_id'] data = node_data['data'] for sensor_data in data: ts = sensor_data['time'] read_data = '{ "type" : "%s", "value" : %f }' % ( sensor_data["sensor"], sensor_data["reading"] ) ddb.add_data(read_data, source=source_id, timestamp=ts) if (count % 5) == 0: logger.info("Data saved:{0} records".format(count)) count += 1 # Disconnect from the database ddb.disconnect() return True
def cache_reading(self, reading): self.readings.append( reading ) # Store the timestamp parameter ts = reading['ts'] # Store all other values db = DryadDatabase() for key in reading: if key == 'ts': continue result = db.add_session_data( self.parent.get_name(), str("{}: {}".format(key, reading[key])), ts ) if result == False: print("Failed to add data") db.close_session() return
def handle_req_remove_sensor(self, link, content): params = { "rpi_name": None, "sn_name": None, } content = content.strip(";") remove_args = content.split(',') if len(remove_args) > 0: for arg in remove_args: if "=" in arg: param = arg.split("=")[0] val = arg.split("=")[1] if param in params.keys(): if param == "lat" or param == "lon": val = float(val) params[param] = val else: params[param] = val.strip("'").strip('"') db = DryadDatabase() result = db.delete_device(params["sn_name"]) if result == False: self.logger.error("Failed to remove device") link.send_response("RDLTE:FAIL;\r\n") db.close_session() return False result = db.delete_node(params["sn_name"]) if result == False: self.logger.error("Failed to remove node") link.send_response("RDLTE:FAIL;\r\n") db.close_session() return False db.close_session() return link.send_response("RDLTE:OK;\r\n")
def setup_worker_threads(self): self.worker_threads = [] db = DryadDatabase() if db.get_current_session() != False: self.logger.error("A previous session is still active. Closing it...") db.terminate_session() db.start_session() db.close_session() for i in range(self.node_queue_size): t = Thread(target=self.process_node) t.start() self.worker_threads.append(t) return
def save_new_sensor_nodes(node_list): ddb = DryadDatabase() if ddb.connect(CUSTOM_DATABASE_NAME) == False: logger.error("Load Sensor Node Info Failed") return False for address, name in node_list: # If this device already exists, then skip it if ddb.get_node_info(address): continue ddb.add_node_info(address, name, "UNKNOWN") ddb.disconnect() return True
def handle_req_update_cache(self, link, content): params = {"name": None, "lat": None, "lon": None, "site_name": None} # remove trailing ";" content = content.strip(';') update_args = content.split(',') if len(update_args) > 0: for arg in update_args: if "=" in arg: param = arg.split("=")[0] val = arg.split("=")[1] if param in params.keys(): if param == "lat" or param == "lon": val = float(val) params[param] = val else: params[param] = val.strip("'").strip('"') db = DryadDatabase() node_matches = db.get_nodes(node_class='SELF') if len(node_matches) <= 0: self.logger.error("Failed to load data for 'SELF'") db.close_session() return link.send_response("RCUPD:FAIL;\r\n") # Extract only the relevant node record node_data = node_matches[0] # Update cache node details in the DB result = db.insert_or_update_node(name=node_data.name, node_class=node_data.node_class, site_name=params['site_name'], lat=params['lat'], lon=params['lon']) db.close_session() if result == False: self.logger.error("Failed to update cache node details") link.send_response("RCUPD:FAIL;\r\n") return False return link.send_response("RCUPD:OK;\r\n")
def load_sensor_nodes(): ddb = DryadDatabase() if ddb.connect(CUSTOM_DATABASE_NAME) == False: logger.error("Load Sensor Node Info Failed") return False node_list = ddb.get_nodes('C_TYPE = "UNKNOWN" OR C_TYPE = "SENSOR"') ddb.disconnect() return node_list
def cleanup_worker_threads(self): for i in range(self.node_queue_size): self.node_queue.put(None) for t in self.worker_threads: self.logger.debug("Cleaning up thread: {}".format(t.name)) t.join() db = DryadDatabase() db.terminate_session() db.close_session() return
def update_scanned_devices(self, scanned_devices): db = DryadDatabase() for device in scanned_devices: record_exists = False # Check if this node already exists in the database result = db.get_devices(address=device.addr.upper()) if (result != False) or \ ((type(result) == type(list)) and (len(result) > 0)): self.logger.debug(str(result)) self.logger.info("Node already exists: [{}] {}/{}".format( result.device_type, result.node_id, result.address)) continue # Get the name of the device first node_id = device.getValueText(ADTYPE_LOCAL_NAME) if node_id == None: self.logger.error("Could not obtain device name: {}".format( device.addr)) continue node_id = node_id.strip('\x00') # Add a node record in the database result = db.insert_or_update_node(name=node_id, node_class="UNKNOWN", site_name="????", lat=14.37, lon=120.58) if result == False: self.logger.error("Unable to add node record") continue # Add a node device record in the database result = db.insert_or_update_device(address=device.addr.upper(), node_id=node_id, device_type="UNKNOWN") if result == False: self.logger.error("Unable to add node device record") continue db.close_session() return
def handle_req_update_sensor(self, link, content): params = { "name": None, "site_name": None, "state": None, "lat": None, "lon": None, } # remove trailing ";" if ";" in content: content = content[:-1] update_args = content.split(',') if len(update_args) > 0: for arg in update_args: if "=" in arg: param = arg.split("=")[0] val = arg.split("=")[1] if param in params.keys(): if param == "lat" or param == "lon": val = float(val) params[param] = val else: params[param] = val.strip("'").strip('"') db = DryadDatabase() dt = DataTransformation() result = db.insert_or_update_node(name=params['name'], node_class=CLASS_SENSOR, site_name=params['site_name'], lat=params['lat'], lon=params['lon']) if result == False: self.logger.error("Failed to add node") link.send_response("RSUPD:FAIL;\r\n") db.close_session() return False db.close_session() return link.send_response("RSUPD:OK;\r\n")
def reload_network_info(self): db = DryadDatabase() # Get all nodes node_records = db.get_nodes() device_records = db.get_devices() if (node_records == False) or (device_records == False): db.close_session() return # Reload the running node list self.node_list = [] for device in device_records: # Get the matching node in the node records list node_name = device.node_id node_addr = device.address node_type = device.device_type # This will contain an Enum node_class = "UNKNOWN" # This will contain an Enum for node in node_records: if node.name == device.node_id: node_class = node.node_class # Add the node to the list self.node_list.append({ "id": node_name, "addr": node_addr, "type": node_type.name, "class": node_class.name }) self.logger.debug(str(self.node_list)) db.close_session() return True
def init(): # Initialize the logger if init_logger() == False: logger.error("Initialization Failed") return False # Setup the database ddb = DryadDatabase() if ddb.connect(CUSTOM_DATABASE_NAME) == False: logger.error("Initialization Failed") return False if ddb.setup() == False: logger.error("Initialization Failed") return False ddb.disconnect() logger.info("Initialization Finished") return True
def cache_reading(self, reading): node_name = self.parent.get_name() node_address = self.parent.get_address() db = DryadDatabase() matched_devices = db.get_devices(address=node_address) node = matched_devices[0] print("Reading: {}".format(reading)) result = db.insert_or_update_device(node.address, node.node_id, node.device_type, reading['pf_batt']) db.close_session() if result == False: self.logger.error("Failed to save power reading") ReadThread.cache_reading(self, reading) return
def init_network_records(self): db = DryadDatabase() # Check if SELF record already exists records = db.get_nodes(node_class="SELF") if (records != False) and (len(records) > 0): db.close_session() return False # If it does not, then create a node record for it self_name = os.popen(SYS_CMD_BNAME).read().split(' ')[0].strip() result = db.insert_or_update_node(name=self_name, node_class="SELF", site_name="????", lat=14.37, lon=120.58) if result == False: self.logger.error("Unable to create own node record") db.close_session() return False # Create a node device record for it as well self_address = os.popen(SYS_CMD_ADDR).read().strip() result = db.insert_or_update_device(address=self_address, node_id=self_name, device_type="RPI") if result == False: self.logger.error("Unable to create own device record") db.close_session() return False db.close_session() return True
def set_param(name, val): db = DryadDatabase() result = db.insert_or_update_system_param(name, val) db.close_session() return result
def gather_sensor_data(address, name, ntype, nclass): # Skip unnamed devices if name == "": return # If the device type is UNKNOWN, then we have to know what kind # of device we're dealing with and remember it for the future if ntype == "UNKNOWN": nclass = "UNKNOWN" device = None ddb = DryadDatabase() if ddb.connect(CUSTOM_DATABASE_NAME) == False: logger.error("Failed to update device info") return device_type = ble.check_device_type(address, name) if device_type == "BLUNO_BEETLE" or device_type == "PARROT_FP": nclass = device_type if ddb.update_node(address, node_type="SENSOR", node_class=device_type) == False: logger.error("Failed to update device info") ddb.disconnect() # Setup the device object based on the node class if nclass == "BLUNO_BEETLE": # Initialize our Bluno device device = bluno_ble.Bluno(address) elif nclass == "PARROT_FP": # Initialize our Parrot Flower Power device device = parrot_ble.Parrot(address) else: logger.error("Cannot initialize unknown device class") return # Obtain the event handle handle_event = device.get_event_hdl() # Start the device if device.start() == False: logger.error("Sensor Node Initialize failed") device.stop() return # Read the device name logger.info("Reading device name...") logger.info("Name:{0}".format(device.get_name())) if nclass == "PARROT_FP": if USE_LED: device.trigger_led(True) logger.info("Triggering Parrot flower LED") logger.info("Reading data...") counter = MAX_SAMPLE_COUNT try: stop_time = time.time() + 12.0 while (counter > 0): handle_event.wait(2) counter -= 1 if time.time() > (stop_time): logger.info("Time limit reached.") break handle_event.clear() except KeyboardInterrupt: logger.exception("Keyboard Interrupt detected") if nclass == "PARROT_FP": if USE_LED: device.trigger_led(False) logger.info("Finishing up live measurements...") device.stop() logger.info("Done.") return {"node_id" : name, "data" : device.get_data()}
def handle_req_download(self, link, content): limit = None start_id = None end_id = None unsent_only = False download_args = content.split(',') # Parse our argument list if len(download_args) > 0: for arg in download_args: # TODO This part needs to be cleaned up if arg.lower().startswith("limit="): limit = int(arg.split('=')[1]) elif arg.lower().startswith("start_id="): # TODO Not yet used! start_id = int(arg.split('=')[1]) elif arg.lower().startswith("end_id="): # TODO Not yet used! end_id = int(arg.split('=')[1]) elif arg.lower().startswith("unsent_only="): # TODO Not yet used! val = arg.split('=')[1] if val == "True": unsent_only = True else: unsent_only = False db = DryadDatabase() if db.connect(self.dbname) == False: return False if limit == None: limit = 20 records = db.get_data(limit=limit) while records != None: proc_ids = [] resp_data = [] # Compose our response content for rec in records: proc_ids.append(rec[0]) resp_data.append({ "timestamp": rec[2], "source": rec[1], "data": json.loads(rec[3]) }) # Send our response try: resp = '"content" : ' resp += json.dumps(resp_data) if link.send_response(resp) == False: self.logger.error("Failed to send response") db.disconnect() return False except: self.logger.error( "Failed to send response due to an exception") db.disconnect() return False # Once data is sent successfully, we can mark off the records whose # IDs we took note of earlier in our database for rec_id in proc_ids: db.set_data_uploaded(rec_id) # Get a new batch of unsent records records = db.get_data(20) if len(records) < 1: self.logger.info("No more data to send") break db.disconnect() return True
def handle_req_list_sensors(self, link, content): db = DryadDatabase() node_matches = db.get_nodes(node_class='SENSOR') sensors = "{'sensors':[" if len(node_matches) <= 0: sensors += "]}" db.close_session() return link.send_response("RNLST:" + sensors + ";\r\n") snode_list = [] for node in node_matches: pf_addr = "????" bl_addr = "????" pf_batt = -99.0 bl_batt = -99.0 # Get the matching devices sharing the node's name device_matches = db.get_devices(name=node.name) if device_matches == None: self.logger.warn( "Node does not have any associated devices: {}".format( node.name)) continue if len(device_matches) <= 0: self.logger.warn( "Node does not have any associated devices: {}".format( node.name)) continue # For each matching device, extract the parrot fp address and the # bluno address and then store them in separate variables for device in device_matches: device_type = str(device.device_type.name) if device_type == 'BLUNO': bl_addr = device.address elif device_type == "PARROT": pf_addr = device.address pf_batt = device.power snode = "'name':'{}', 'state':'{}'," snode += "'site_name':'{}','lat':'{}', 'lon':'{}'," snode += "'pf_addr':'{}', 'bl_addr':'{}', 'pf_batt':'{}'," snode += "'bl_batt':'{}', 'pf_comms':'{}', 'bl_comms':'{}'" snode = snode.format(node.name, self.task_node.get_state_str(), node.site_name, node.lat, node.lon, pf_addr, bl_addr, pf_batt, -99.0, ctime(0.0), ctime(0.0)) snode_list.append("{" + snode + "}") # Build the sensor node list string snodes_all = ",".join(snode_list) # Build the final string sensors += snodes_all sensors += "]}" # print(sensors) db.close_session() return link.send_response("RNLST:" + sensors + ";\r\n")
def handle_req_setup_sensor(self, link, content): params = { "name": None, "site_name": None, "pf_addr": None, "bl_addr": None, "state": None, "lat": None, "lon": None, "updated": None, } # remove trailing ";" if ";" in content: content = content[:-1] update_args = content.split(',') # TODO WTF is this magickery??? if len(update_args) > 0: for arg in update_args: if "=" in arg: param = arg.split("=")[0] val = arg.split("=")[1] if param in params.keys(): if param == "lat" or param == "lon": val = float(val) params[param] = val else: params[param] = val.strip("'").strip('"') db = DryadDatabase() dt = DataTransformation() bl_addr = dt.conv_mac(params["bl_addr"].upper()) pf_addr = dt.conv_mac(params["pf_addr"].upper()) result = db.insert_or_update_node(name=params['name'], node_class=CLASS_SENSOR, site_name=params['site_name'], lat=params['lat'], lon=params['lon']) if result == False: self.logger.error("Failed to add node") link.send_response("RQRSN:FAIL;\r\n") db.close_session() return False result = db.insert_or_update_device(address=bl_addr, node_id=params['name'], device_type=TYPE_BLUNO) if result == False: self.logger.error("Failed to add node device") link.send_response("RQRSN:FAIL;\r\n") db.close_session() return False result = db.insert_or_update_device(address=pf_addr, node_id=params['name'], device_type=TYPE_PARROT) if result == False: self.logger.error("Failed to add node device") link.send_response("RQRSN:FAIL;\r\n") db.close_session() return False return link.send_response("RQRSN:OK;\r\n")
def classify_node(self, node): db = DryadDatabase() self.logger.info("Discovering node classification...") try: node['type'], node['class'] = ble_utils.discover_node_category(node['addr'], node['id']) except Exception as e: self.logger.error("Failed to discover node classification: {}".format(e)) db.close_session() return False # Update the database node information result = db.insert_or_update_node( name = node['id'], node_class = node['class'] ) if result == False: self.logger.error("Unable to update node record") db.close_session() return False # Update the node device record in the database result = db.insert_or_update_device( address = node['addr'], node_id = node['id'], device_type = node['type'] ) if result == False: self.logger.error("Unable to update node device record") db.close_session() return False db.close_session() return True
def get_param(name): db = DryadDatabase() result = db.get_system_param(name) db.close_session() return result
def offload_data(self): db = DryadDatabase() session_data = db.get_session_data() self.logger.debug(session_data) blk_count = 0 curr_session = 0 prev_session = 0 n_params = 13 # ideal number of parameters per data block data_blocks = {} offloaded_data = {} for reading in session_data: # Save the current session id curr_session = reading.session_id # Extract the data type and value from the 'content' string data_key = reading.content.split(":")[0].strip() data_val = reading.content.split(":")[1].strip() data_source_id = reading.source_id # Boolean indicator whether data key exists in a data block key_exists = True # Check if source id exists in current data blocks if data_source_id in data_blocks.keys(): source_id_readings = data_blocks[data_source_id] for i in range(len(source_id_readings)): if data_key in source_id_readings[i].keys(): # Go to the next source id reading continue # If the data key is not existing on the source id readings key_exists = False data_blocks[data_source_id][i][data_key] = data_val # Add block to offloaded_data if complete if len(data_blocks[data_source_id][i]) == n_params: if data_source_id not in offloaded_data.keys(): offloaded_data[data_source_id] = [data_blocks[data_source_id][i]] else: offloaded_data[data_source_id].append(data_blocks[data_source_id][i]) # Remove datum that has been offloaded del data_blocks[data_source_id][i] # Go to the next reading break if key_exists is True: data_blocks[data_source_id].append({data_key: data_val}) # Initialize data block if source id not existing else: data_blocks[data_source_id] = [{data_key: data_val}] # Add remaining data blocks to offload for key, block in data_blocks.items(): for reading_set in block: if len(reading_set) is not 0: if key not in offloaded_data.keys(): offloaded_data[key] = [reading_set] else: offloaded_data[key].append(reading_set) blk_count = 0 # Add offloaded data to database for source, block in offloaded_data.items(): for reading_set in block: # Save the block to the database db.add_data( blk_id=blk_count, session_id=curr_session, source_id=source, content=str(reading_set), timestamp=reading.timestamp ) blk_count += 1 db.clear_session_data() db.close_session() return