def handle_req_state(self, link, content): # Retrive details about the cache node from the database db = DryadDatabase() node_matches = db.get_nodes(node_class='SELF') data = db.get_data() if len(node_matches) <= 0: db.close_session() self.logger.error("Failed to load data for 'SELF'") return link.send_response("RSTAT:FAIL\r\n") db.close_session() # Retrive uptime self_uptime = os.popen(SYS_CMD_UPTIME).read().strip() node_data = node_matches[0] # Format the string to return state = "'name':'{}','state':'{}','batt':{},'version':'{}'," state += "'lat':{},'lon':{},'sys_time':'{}','uptime':'{}'," state += "'next_sleep_time':'{}','next_collect_time':'{}'," state += "'size':{}" state = state.format(node_data.name, self.task_node.get_state_str(), -99.0, self.version, node_data.lat, node_data.lon, ctime(), self_uptime, ctime(self.task_node.get_idle_out_time()), ctime(self.task_node.get_collect_time()), len(data)) return link.send_response("RSTAT:{" + state + "};\r\n")
def handle_req_download(self, link, content): limit = None offset = None start_id = 0 end_id = 100000000000000 # Parse our argument list download_args = content.split(',') if len(download_args) > 0: for arg in download_args: if arg.lower().startswith("limit="): limit = int(arg.split('=')[1]) elif arg.lower().startswith("start_id="): start_id = int(arg.split('=')[1]) elif arg.lower().startswith("end_id="): end_id = int(arg.split('=')[1]) elif arg.lower().startswith("offset="): offset = int(arg.split('=')[1]) db = DryadDatabase() matched_data = db.get_data(limit=limit, offset=offset, start_id=start_id, end_id=end_id) db.close_session() data = [] data_str = "" data_block = {} for reading in matched_data: # TODO Format it here data_block['rec_id'] = reading.id data_block['timestamp'] = reading.end_time data_block['sampling_site'] = reading.site_name # TODO data_block['data'] = json.loads(reading.content.replace("'", '"')) data_block['origin'] = { 'name': reading.name, 'lat': reading.lat, 'lon': reading.lon, 'addr': "---" } if 'ph' not in data_block['data']: data_block['data']['ph'] = None if 'bl_batt' not in data_block['data']: data_block['data']['bl_batt'] = None data.append(data_block) data_block = {} return link.send_response("RDATA:{};\r\n".format(json.dumps(data)))
def handle_req_download(self, link, content): limit = None start_id = None end_id = None unsent_only = False download_args = content.split(',') # Parse our argument list if len(download_args) > 0: for arg in download_args: # TODO This part needs to be cleaned up if arg.lower().startswith("limit="): limit = int(arg.split('=')[1]) elif arg.lower().startswith("start_id="): # TODO Not yet used! start_id = int(arg.split('=')[1]) elif arg.lower().startswith("end_id="): # TODO Not yet used! end_id = int(arg.split('=')[1]) elif arg.lower().startswith("unsent_only="): # TODO Not yet used! val = arg.split('=')[1] if val == "True": unsent_only = True else: unsent_only = False db = DryadDatabase() if db.connect(self.dbname) == False: return False if limit == None: limit = 20 records = db.get_data(limit=limit) while records != None: proc_ids = [] resp_data = [] # Compose our response content for rec in records: proc_ids.append(rec[0]) resp_data.append({ "timestamp": rec[2], "source": rec[1], "data": json.loads(rec[3]) }) # Send our response try: resp = '"content" : ' resp += json.dumps(resp_data) if link.send_response(resp) == False: self.logger.error("Failed to send response") db.disconnect() return False except: self.logger.error( "Failed to send response due to an exception") db.disconnect() return False # Once data is sent successfully, we can mark off the records whose # IDs we took note of earlier in our database for rec_id in proc_ids: db.set_data_uploaded(rec_id) # Get a new batch of unsent records records = db.get_data(20) if len(records) < 1: self.logger.info("No more data to send") break db.disconnect() return True