def send_out_queue_data(self, data): print "callback routed to send_out_queue_data for queue-up:" + str( data) # work out what the data is and either pass it to a specific module for mandling, or handle it here immediately. # we assume everything coming back from the websocket is a dict. If the data does not take this form then bail out if not type(data) is dict: print("websocket data is not of type dict: {0}".format(data)) return # this is passing the data off to the "mavlink" module to handle this, as we don't know how to do that. if "mavlink_data" in data.keys(): if "mavpackettype" in data["mavlink_data"].keys(): msg_type = data["mavlink_data"]["mavpackettype"] else: # mavlink_data requires mavpackettype return if msg_type == 'HEARTBEAT': # the following is an example of 'sending' a mavlink msg from # a module with no direct mavlink connection msg = self.mavlink.heartbeat_encode( mavutil.mavlink.MAV_TYPE_ONBOARD_CONTROLLER, mavutil.mavlink.MAV_AUTOPILOT_INVALID, base_mode=mavutil.mavlink.MAV_MODE_FLAG_TEST_ENABLED, custom_mode=0, system_status=4) self.out_queue.put_nowait( json_wrap_with_target(msg, target='mavlink')) # if its a block of config-file type data, we'll just write it to disk now. elif "config" in data.keys(): config = data["config"] write_config(config) # if it's something else calling itself json_data, then we will handle it here and pretend it came from somwhere else elif "json_data" in data.keys(): # folder = os.path.join(os.path.expanduser('~'), '.ssh') # make it if we don't have it. cred_name = 'id_apsync' # load this from config? cred_path = os.path.join(folder, cred_name + '.pub') # only expose the public key? if not os.path.isfile(cred_path): make_ssh_key(folder, cred_name) cred = file_get_contents(cred_path) j = '{"json_data":{"result":"' + base64.b64encode( cred) + '","replyto":"getIdentityResponse"}}' print j msg = json.loads(j) # send it back out the websocket immediately, no need to wrap it, as it's not being routed beyond tornado and browser. websocket_send_message(msg) # its dfsync related, forward it to the dfsync module for processing elif "dfsync_register" in data.keys(): self.out_queue.put_nowait( json_wrap_with_target(data, target='dfsync')) else: pass
def process_in_queue_data(self, data): print('{0} module got the following data: {1}'.format(self.name, data)) if 'dfsync_register' in data.keys(): for key in data['dfsync_register'].keys(): self.config[key] = data['dfsync_register'][key] self.update_config() self.get_ssh_creds() # attempt registration with server if self.have_path_to_cloud: self.cloudsync_session = create_session( self.cloudsync_url_base, self.client) if self.cloudsync_session: payload = { 'email': self.config['cloudsync_email'], 'public_key': base64.b64encode(self.ssh_cred), '_xsrf': self.client.cookies['_xsrf'] } ret = register(self.cloudsync_url_register, self.client, payload) if ret: # registration was OK self.config['cloudsync_account_registered'] = True self.update_config() j = { 'message': ret['msg'], 'current_time': time.time(), 'replyto': 'dfsyncSyncRegister' } self.out_queue.put_nowait( json_wrap_with_target({'json_data': j}, target='webserver')) self.log('cloudsync registration attempt successful', 'INFO') return self.config['cloudsync_account_registered'] = False self.update_config() # TODO: report some useful details on how to fix it... j = { 'message': 'Registration with cloudsync server failed', 'current_time': time.time(), 'replyto': 'dfsyncSyncRegister' } self.out_queue.put_nowait( json_wrap_with_target({'json_data': j}, target='webserver')) self.log('cloudsync registration attempt failed', 'INFO') # look at mavlink and set self.is_not_armed # look at network and set have_path_to_cloud # look at webserver and set syncing_enabled pass
def process_mavlink_connection_in(self): inputready, outputready, exceptready = select.select( [self.connection.control_connection.port], [], [], 0.1) # block for 0.1 sec if there is nothing on the connection # otherwise we just dive right in... for s in inputready: msg = self.connection.control_connection.recv_msg() if msg: msg_type = msg.get_type() if msg_type == "ATTITUDE": self.out_queue.put_nowait( json_wrap_with_target(msg.to_dict(), target='webserver'))
def log(self, message, level='INFO'): # CRITICAL # ERROR # WARNING # INFO # DEBUG # NOTSET self.out_queue.put_nowait( json_wrap_with_target({ 'msg': message, 'level': level }, target='logging'))
def forward_data(self, data, target): self.out_queue.put_nowait( json_wrap_with_target(json.dumps(data).encode('utf8'), target=target))
def send_out_queue_data(self, data): self.out_queue.put_nowait(json_wrap_with_target(data, target='mavlink'))
def main(self): if self.have_path_to_cloud and self.config['cloudsync_syncing_enabled']: self.cloudsync_session = create_session(self.cloudsync_url_base, self.client) if (self.cloudsync_session and self.config['cloudsync_account_registered'] and not self.cloudsync_account_verified): payload = { 'public_key_fingerprint': base64.b64encode(self.ssh_cred_fingerprint), '_xsrf': self.client.cookies['_xsrf'] } verify_response = verify(self.cloudsync_url_verify, self.client, payload) if verify_response: if verify_response['verify']: self.config['cloudsync_vehicle_id'] = verify_response[ 'vehicle_id'] self.config['cloudsync_user_id'] = verify_response[ 'user_id'] self.cloudsync_account_verified = True self.update_config() j = { 'message': verify_response['msg'], 'current_time': time.time(), 'replyto': 'dfsyncSyncRegister' } self.out_queue.put_nowait( json_wrap_with_target({'json_data': j}, target='webserver')) self.log('Cloudsync account verified', 'INFO') else: self.cloudsync_account_verified = False self.update_config() if time.time() >= (self.last_verify_message + self.verify_message_interval): j = { 'message': verify_response['msg'], 'current_time': time.time(), 'replyto': 'dfsyncSyncRegister' } self.out_queue.put_nowait( json_wrap_with_target({'json_data': j}, target='webserver')) self.log( 'Cloudsync credentials need to be verified! Please verify them by clicking on the link sent to your email address', 'INFO') self.last_verify_message = time.time( ) + self.verify_message_interval stat_file_info = self.stat_files_in_dir(self.datalog_dir) for key in stat_file_info.keys(): if key in self.datalogs: if (stat_file_info[key]['size'] == self.datalogs[key]['size'] and stat_file_info[key]['modify'] == self.datalogs[key]['modify']): stat_file_info[key]['age'] = time.time( ) - self.datalogs[key]['time'] stat_file_info[key]['time'] = self.datalogs[key]['time'] self.datalogs[key] = stat_file_info[key] else: stat_file_info[key]['age'] = time.time( ) - stat_file_info[key]['time'] self.datalogs[key] = stat_file_info[key] self.files_to_sync = {} for key in self.datalogs.keys(): if self.datalogs[key]['age'] > self.old_time: self.files_to_sync[key] = self.datalogs[key]['modify'] # we have a dict of file names and last modified times self.files_to_sync = sorted(self.files_to_sync.items(), key=lambda x: x[1]) if (len(self.files_to_sync) == 0 or not self.okay_to_sync()): time.sleep(2) return payload = { 'public_key_fingerprint': base64.b64encode(self.ssh_cred_fingerprint), '_xsrf': self.client.cookies['_xsrf'] } upload_response = upload_request(self.cloudsync_url_upload, self.client, payload) self.log(upload_response, 'DEBUG') if not upload_response: time.sleep(3) return # sync the oldest file first file_to_send = self.files_to_sync[-1][0] send_path = os.path.join(self.datalog_dir, file_to_send) # upload_response archive_folder = upload_response['archive_folder'] rsynccmd = 'rsync -ahHzv --progress -e "ssh -o IdentitiesOnly=yes -o StrictHostKeyChecking=no -F /dev/null -i {0} -p {1}" "{2}" {3}@{4}:{5}'.format( self.config['cloudsync_ssh_identity_file'], self.config['cloudsync_port'], send_path, self.config['cloudsync_user'], self.config['cloudsync_address'], self.cloudsync_remote_dir) self.datalogs.pop(file_to_send) status_update = { 'percent_sent': '0', 'current_time': time.time(), 'file': file_to_send, 'status': 'starting', 'replyto': 'dfsyncSyncUpdate' } self.out_queue.put_nowait( json_wrap_with_target({'json_data': status_update}, target='webserver')) rsyncproc = subprocess.Popen( rsynccmd, shell=True, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) self.rsync_pid = rsyncproc.pid while self.okay_to_sync(): next_line = rsyncproc.stdout.readline().decode('utf-8') # TODO: log all of stdout to disk if self.rsync_time.search(next_line): # we found a line containing a status update current_status = next_line.strip().split() current_status = current_status[:4] current_status[1] = current_status[1].strip('%') current_status.append(str(time.time())) current_status.append(file_to_send) current_status.append('progress') current_status.append('dfsyncSyncUpdate') # send this to the webserver... status_update = dict( zip([ 'data_sent', 'percent_sent', 'sending_rate', 'time_remaining', 'current_time', 'file', 'status', 'replyto' ], current_status)) self.out_queue.put_nowait( json_wrap_with_target({'json_data': status_update}, target='webserver')) self.log({'dfsyncSyncUpdate': status_update}, 'DEBUG') if not next_line: break if self.okay_to_sync(): # wait until process is really terminated exitcode = rsyncproc.wait() # check exit code if exitcode == 0: # archive the log on the CC target_path = os.path.join(self.datalog_archive_dir, archive_folder) mkdir_p(target_path) archive_file_path = os.path.join(target_path, file_to_send) shutil.move(send_path, archive_file_path) msg = '{0} - Datalog rsync complete. Original datalog archived at {1}\n'.format( file_to_send, archive_file_path) status_update = { 'percent_sent': '100', 'current_time': time.time(), 'file': file_to_send, 'message': msg, 'status': 'complete', 'replyto': 'dfsyncSyncUpdate' } self.out_queue.put_nowait( json_wrap_with_target({"json_data": status_update}, target='webserver')) self.log(msg, 'INFO') else: error_lines = rsyncproc.stderr.readlines() err_trace = '' for line in error_lines: err_trace += line.decode('utf-8') msg = '{0} - An error during datalog rsync. Exit code: {1}. Error trace: \n{2}\n'.format( file_to_send, exitcode, err_trace) status_update = { 'error': err_trace, 'current_time': time.time(), 'file': file_to_send, 'status': 'error', 'message': msg, 'replyto': 'dfsyncSyncUpdate' } self.out_queue.put_nowait( json_wrap_with_target({"json_data": status_update}, target='webserver')) self.log(msg, 'WARNING') else: self.request_rsync_exit()