def save_uploaded_log(con, cur, ulog_file, formdict): """ Save a log that's already persisted on the filesystem into the database and into a folder we control. :param con: DB connection :param cur: DB cursor :param ulog_file: File-like object containing ULog :param formdict: Dict of options passed from upload page :param preserve_old_files: (Default False) Whether to leave the persisted copy on disk :return log_id: ID of the newly saved ULog file """ # generate a log ID and persistence filename while True: log_id = str(uuid.uuid4()) new_file_name = get_log_filename(log_id) if not os.path.exists(new_file_name): break # if preserve_old_files: # print('Copying old file to', new_file_name) # ulog_file.copy(new_file_name) # else: print('Moving uploaded file to', new_file_name) ulog_file.move(new_file_name) # Load the ulog file but only if not uploaded via CI. ulog = None if formdict['source'] != 'CI': ulog_file_name = get_log_filename(log_id) ulog = load_ulog_file(ulog_file_name) # generate a token: secure random string (url-safe) token = str(binascii.hexlify(os.urandom(16)), 'ascii') # put additional data into a DB cur.execute( 'insert into Logs (Id, Title, Description, ' 'OriginalFilename, Date, AllowForAnalysis, Obfuscated, ' 'Source, Email, WindSpeed, Rating, Feedback, Type, ' 'videoUrl, ErrorLabels, Public, Token) values ' '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [ log_id, formdict['title'], formdict['description'], ulog_file.get_filename(), datetime.datetime.now(), 1, 0, formdict['source'], formdict['email'], formdict['wind_speed'], formdict['rating'], formdict['feedback'], formdict['upload_type'], formdict['video_url'], formdict['error_labels'], formdict['is_public'], token ]) if ulog is not None: vehicle_data = update_vehicle_db_entry(cur, ulog, log_id, formdict['vehicle_name']) vehicle_name = vehicle_data.name con.commit() generate_db_data_from_log_file(log_id, con) con.commit() return log_id
def delete_log_entry(log_id, token): """ delete a log entry (DB & file), validate token first :return: True on success """ con = sqlite3.connect(get_db_filename(), detect_types=sqlite3.PARSE_DECLTYPES) cur = con.cursor() cur.execute('select Token from Logs where Id = ?', (log_id,)) db_tuple = cur.fetchone() if db_tuple is None: return False if token != db_tuple[0]: # validate token return False # kml file kml_path = get_kml_filepath() kml_file_name = os.path.join(kml_path, log_id.replace('/', '.')+'.kml') if os.path.exists(kml_file_name): os.unlink(kml_file_name) log_file_name = get_log_filename(log_id) print('deleting log entry {} and file {}'.format(log_id, log_file_name)) os.unlink(log_file_name) cur.execute("DELETE FROM LogsGenerated WHERE Id = ?", (log_id,)) cur.execute("DELETE FROM Logs WHERE Id = ?", (log_id,)) con.commit() cur.close() con.close() # need to clear the cache as well clear_ulog_cache() return True
def from_log_file(cls, log_id): """ initialize from a log file """ obj = cls() ulog_file_name = get_log_filename(log_id) ulog = load_ulog_file(ulog_file_name) px4_ulog = PX4ULog(ulog) # extract information obj.duration_s = int( (ulog.last_timestamp - ulog.start_timestamp) / 1e6) obj.mav_type = px4_ulog.get_mav_type() obj.estimator = px4_ulog.get_estimator() obj.sys_autostart_id = ulog.initial_parameters.get('SYS_AUTOSTART', 0) obj.sys_hw = cgi.escape(ulog.msg_info_dict.get('ver_hw', '')) obj.ver_sw = cgi.escape(ulog.msg_info_dict.get('ver_sw', '')) version_info = ulog.get_version_info() if version_info is not None: obj.ver_sw_release = 'v{}.{}.{} {}'.format(*version_info) obj.num_logged_errors = 0 obj.num_logged_warnings = 0 if 'sys_uuid' in ulog.msg_info_dict: obj.vehicle_uuid = cgi.escape(ulog.msg_info_dict['sys_uuid']) for m in ulog.logged_messages: if m.log_level <= ord('3'): obj.num_logged_errors += 1 if m.log_level == ord('4'): obj.num_logged_warnings += 1 try: cur_dataset = ulog.get_dataset('commander_state') flight_mode_changes = cur_dataset.list_value_changes('main_state') obj.flight_modes = set([x[1] for x in flight_mode_changes]) # get the durations # make sure the first entry matches the start of the logging if len(flight_mode_changes) > 0: flight_mode_changes[0] = (ulog.start_timestamp, flight_mode_changes[0][1]) flight_mode_changes.append((ulog.last_timestamp, -1)) for i in range(len(flight_mode_changes) - 1): flight_mode = flight_mode_changes[i][1] flight_mode_duration = int( (flight_mode_changes[i + 1][0] - flight_mode_changes[i][0]) / 1e6) obj.flight_mode_durations.append( (flight_mode, flight_mode_duration)) except (KeyError, IndexError) as error: obj.flight_modes = set() return obj
def from_log_file(cls, log_id): """ initialize from a log file """ obj = cls() ulog_file_name = get_log_filename(log_id) ulog = load_ulog_file(ulog_file_name) px4_ulog = PX4ULog(ulog) # extract information obj.duration_s = int( (ulog.last_timestamp - ulog.start_timestamp) / 1e6) obj.mav_type = px4_ulog.get_mav_type() obj.estimator = px4_ulog.get_estimator() obj.sys_autostart_id = ulog.initial_parameters.get('SYS_AUTOSTART', 0) obj.sys_hw = cgi.escape(ulog.msg_info_dict.get('ver_hw', '')) obj.ver_sw = cgi.escape(ulog.msg_info_dict.get('ver_sw', '')) version_info = ulog.get_version_info() if version_info is not None: obj.ver_sw_release = 'v{}.{}.{} {}'.format(*version_info) obj.num_logged_errors = 0 obj.num_logged_warnings = 0 for m in ulog.logged_messages: if m.log_level <= ord('3'): obj.num_logged_errors += 1 if m.log_level == ord('4'): obj.num_logged_warnings += 1 try: cur_dataset = ulog.get_dataset('commander_state') flight_mode_changes = cur_dataset.list_value_changes('main_state') obj.flight_modes = set([x[1] for x in flight_mode_changes]) except (KeyError, IndexError) as error: obj.flight_modes = set() return obj
def post(self): if self.multipart_streamer: try: self.multipart_streamer.data_complete() form_data = self.multipart_streamer.get_values([ 'description', 'email', 'allowForAnalysis', 'obfuscated', 'source', 'type', 'feedback', 'windSpeed', 'rating', 'videoUrl', 'public', 'vehicleName' ]) description = cgi.escape( form_data['description'].decode("utf-8")) email = form_data['email'].decode("utf-8") upload_type = 'personal' if 'type' in form_data: upload_type = form_data['type'].decode("utf-8") source = 'webui' title = '' # may be used in future... if 'source' in form_data: source = form_data['source'].decode("utf-8") obfuscated = 0 if 'obfuscated' in form_data: if form_data['obfuscated'].decode("utf-8") == 'true': obfuscated = 1 allow_for_analysis = 0 if 'allowForAnalysis' in form_data: if form_data['allowForAnalysis'].decode("utf-8") == 'true': allow_for_analysis = 1 feedback = '' if 'feedback' in form_data: feedback = cgi.escape( form_data['feedback'].decode("utf-8")) wind_speed = -1 rating = '' stored_email = '' video_url = '' is_public = 0 vehicle_name = '' if upload_type == 'flightreport': try: wind_speed = int( cgi.escape(form_data['windSpeed'].decode("utf-8"))) except ValueError: wind_speed = -1 rating = cgi.escape(form_data['rating'].decode("utf-8")) if rating == 'notset': rating = '' stored_email = email # get video url & check if valid video_url = cgi.escape( form_data['videoUrl'].decode("utf-8"), quote=True) if not validate_url(video_url): video_url = '' if 'vehicleName' in form_data: vehicle_name = cgi.escape( form_data['vehicleName'].decode("utf-8")) # always allow for statistical analysis allow_for_analysis = 1 if 'public' in form_data: if form_data['public'].decode("utf-8") == 'true': is_public = 1 file_obj = self.multipart_streamer.get_parts_by_name( 'filearg')[0] upload_file_name = file_obj.get_filename() while True: log_id = str(uuid.uuid4()) new_file_name = get_log_filename(log_id) if not os.path.exists(new_file_name): break # read file header & check if really an ULog file header_len = len(ULog.HEADER_BYTES) if (file_obj.get_payload_partial(header_len) != ULog.HEADER_BYTES): if upload_file_name[-7:].lower() == '.px4log': raise CustomHTTPError( 400, 'Invalid File. This seems to be a px4log file. ' 'Upload it to <a href="http://logs.uaventure.com" ' 'target="_blank">logs.uaventure.com</a>.') raise CustomHTTPError(400, 'Invalid File') print('Moving uploaded file to', new_file_name) file_obj.move(new_file_name) if obfuscated == 1: # TODO: randomize gps data, ... pass # generate a token: secure random string (url-safe) token = str(binascii.hexlify(os.urandom(16)), 'ascii') # Load the ulog file but only if not uploaded via CI. # Then we open the DB connection. ulog = None if source != 'CI': ulog_file_name = get_log_filename(log_id) ulog = load_ulog_file(ulog_file_name) # put additional data into a DB con = sqlite3.connect(get_db_filename()) cur = con.cursor() cur.execute( 'insert into Logs (Id, Title, Description, ' 'OriginalFilename, Date, AllowForAnalysis, Obfuscated, ' 'Source, Email, WindSpeed, Rating, Feedback, Type, ' 'videoUrl, Public, Token) values ' '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [ log_id, title, description, upload_file_name, datetime.datetime.now(), allow_for_analysis, obfuscated, source, stored_email, wind_speed, rating, feedback, upload_type, video_url, is_public, token ]) if ulog is not None: update_vehicle_db_entry(cur, ulog, log_id, vehicle_name) con.commit() url = '/plot_app?log=' + log_id full_plot_url = 'http://' + get_domain_name() + url delete_url = 'http://'+get_domain_name()+ \ '/edit_entry?action=delete&log='+log_id+'&token='+token if upload_type == 'flightreport' and is_public: send_flightreport_email( email_notifications_config['public_flightreport'], full_plot_url, description, feedback, DBData.rating_str_static(rating), DBData.wind_speed_str_static(wind_speed), delete_url, stored_email) # also generate the additional DB entry # (we may have the log already loaded in 'ulog', however the # lru cache will make it very quick to load it again) generate_db_data_from_log_file(log_id, con) con.commit() cur.close() con.close() # TODO: now that we have loaded the ulog already, add more # information to the notification email (airframe, ...) # send notification emails send_notification_email(email, full_plot_url, description, delete_url) # do not redirect for QGC if source != 'QGroundControl': self.redirect(url) except CustomHTTPError: raise except: print('Error when handling POST data', sys.exc_info()[0], sys.exc_info()[1]) raise CustomHTTPError(500) finally: self.multipart_streamer.release_parts()
def get(self): log_id = self.get_argument('log') if not validate_log_id(log_id): raise tornado.web.HTTPError(400, 'Invalid Parameter') log_file_name = get_log_filename(log_id) download_type = self.get_argument('type', default='0') if not os.path.exists(log_file_name): raise tornado.web.HTTPError(404, 'Log not found') if download_type == '1': # download the parameters ulog = load_ulog_file(log_file_name) param_keys = sorted(ulog.initial_parameters.keys()) self.set_header("Content-Type", "text/plain") self.set_header('Content-Disposition', 'inline; filename=params.txt') delimiter = ', ' for param_key in param_keys: self.write(param_key) self.write(delimiter) self.write(str(ulog.initial_parameters[param_key])) self.write('\n') elif download_type == '2': # download the kml file kml_path = get_kml_filepath() kml_file_name = os.path.join(kml_path, log_id.replace('/', '.') + '.kml') # check if chached file exists if not os.path.exists(kml_file_name): print('need to create kml file', kml_file_name) def kml_colors(flight_mode): """ flight mode colors for KML file """ if not flight_mode in flight_modes_table: flight_mode = 0 color_str = flight_modes_table[flight_mode][1][ 1:] # color in form 'ff00aa' # increase brightness to match colors with template rgb = [ int(color_str[2 * x:2 * x + 2], 16) for x in range(3) ] for i in range(3): rgb[i] += 40 if rgb[i] > 255: rgb[i] = 255 color_str = "".join(map(lambda x: format(x, '02x'), rgb)) return 'ff' + color_str[4:6] + color_str[2:4] + color_str[ 0:2] # KML uses aabbggrr style = {'line_width': 2} # create in random temporary file, then move it (to avoid races) try: temp_file_name = kml_file_name + '.' + str(uuid.uuid4()) convert_ulog2kml(log_file_name, temp_file_name, 'vehicle_global_position', kml_colors, style=style) shutil.move(temp_file_name, kml_file_name) except: print('Error creating KML file', sys.exc_info()[0], sys.exc_info()[1]) raise CustomHTTPError(400, 'No Position Data in log') # send the whole KML file self.set_header("Content-Type", "application/vnd.google-earth.kml+xml") self.set_header('Content-Disposition', 'attachment; filename=track.kml') with open(kml_file_name, 'rb') as kml_file: while True: data = kml_file.read(4096) if not data: break self.write(data) self.finish() elif download_type == '3': # download the non-default parameters ulog = load_ulog_file(log_file_name) param_keys = sorted(ulog.initial_parameters.keys()) self.set_header("Content-Type", "text/plain") self.set_header('Content-Disposition', 'inline; filename=params.txt') default_params = get_default_parameters() delimiter = ', ' for param_key in param_keys: try: param_value = str(ulog.initial_parameters[param_key]) is_default = False if param_key in default_params: default_param = default_params[param_key] if default_param['type'] == 'FLOAT': is_default = abs( float(default_param['default']) - float(param_value)) < 0.00001 else: is_default = int( default_param['default']) == int(param_value) if not is_default: self.write(param_key) self.write(delimiter) self.write(param_value) self.write('\n') except: pass else: # download the log file self.set_header('Content-Type', 'application/octet-stream') self.set_header("Content-Description", "File Transfer") self.set_header( 'Content-Disposition', 'attachment; filename={}'.format( os.path.basename(log_file_name))) with open(log_file_name, 'rb') as log_file: while True: data = log_file.read(4096) if not data: break self.write(data) self.finish()
def from_log_file(cls, log_id): """ initialize from a log file """ obj = cls() ulog_file_name = get_log_filename(log_id) ulog = load_ulog_file(ulog_file_name) px4_ulog = PX4ULog(ulog) # extract information obj.duration_s = int( (ulog.last_timestamp - ulog.start_timestamp) / 1e6) obj.mav_type = px4_ulog.get_mav_type() obj.estimator = px4_ulog.get_estimator() obj.sys_autostart_id = ulog.initial_parameters.get('SYS_AUTOSTART', 0) obj.sys_hw = escape(ulog.msg_info_dict.get('ver_hw', '')) obj.ver_sw = escape(ulog.msg_info_dict.get('ver_sw', '')) version_info = ulog.get_version_info() if version_info is not None: obj.ver_sw_release = 'v{}.{}.{} {}'.format(*version_info) obj.num_logged_errors = 0 obj.num_logged_warnings = 0 if 'sys_uuid' in ulog.msg_info_dict: obj.vehicle_uuid = escape(ulog.msg_info_dict['sys_uuid']) for m in ulog.logged_messages: if m.log_level <= ord('3'): obj.num_logged_errors += 1 if m.log_level == ord('4'): obj.num_logged_warnings += 1 try: cur_dataset = ulog.get_dataset('vehicle_status') flight_mode_changes = cur_dataset.list_value_changes('nav_state') obj.flight_modes = {int(x[1]) for x in flight_mode_changes} # get the durations # make sure the first entry matches the start of the logging if len(flight_mode_changes) > 0: flight_mode_changes[0] = (ulog.start_timestamp, flight_mode_changes[0][1]) flight_mode_changes.append((ulog.last_timestamp, -1)) for i in range(len(flight_mode_changes) - 1): flight_mode = flight_mode_changes[i][1] flight_mode_duration = int( (flight_mode_changes[i + 1][0] - flight_mode_changes[i][0]) / 1e6) obj.flight_mode_durations.append( (flight_mode, flight_mode_duration)) except (KeyError, IndexError) as error: obj.flight_modes = set() # logging start time & date try: # get the first non-zero timestamp gps_data = ulog.get_dataset('vehicle_gps_position') indices = np.nonzero(gps_data.data['time_utc_usec']) if len(indices[0]) > 0: obj.start_time_utc = int( gps_data.data['time_utc_usec'][indices[0][0]] / 1000000) except: # Ignore. Eg. if topic not found pass return obj
def get(self, *args, **kwargs): """ GET request callback """ # load the log file log_id = self.get_argument('log') if not validate_log_id(log_id): raise tornado.web.HTTPError(400, 'Invalid Parameter') log_file_name = get_log_filename(log_id) ulog = load_ulog_file(log_file_name) # extract the necessary information from the log try: # required topics: none of these are optional gps_pos = ulog.get_dataset('vehicle_gps_position').data vehicle_global_position = ulog.get_dataset( 'vehicle_global_position').data attitude = ulog.get_dataset('vehicle_attitude').data except (KeyError, IndexError, ValueError) as error: raise CustomHTTPError( 400, 'The log does not contain all required topics<br />' '(vehicle_gps_position, vehicle_global_position, ' 'vehicle_attitude)') from error # manual control setpoint is optional manual_control_setpoint = None try: manual_control_setpoint = ulog.get_dataset( 'manual_control_setpoint').data except (KeyError, IndexError, ValueError) as error: pass # Get the takeoff location. We use the first position with a valid fix, # and assume that the vehicle is not in the air already at that point takeoff_index = 0 gps_indices = np.nonzero(gps_pos['fix_type'] > 2) if len(gps_indices[0]) > 0: takeoff_index = gps_indices[0][0] takeoff_altitude = '{:.3f}' \ .format(gps_pos['alt'][takeoff_index] * 1.e-3) takeoff_latitude = '{:.10f}'.format(gps_pos['lat'][takeoff_index] * 1.e-7) takeoff_longitude = '{:.10f}'.format(gps_pos['lon'][takeoff_index] * 1.e-7) # calculate UTC time offset (assume there's no drift over the entire log) utc_offset = int(gps_pos['time_utc_usec'][takeoff_index]) - \ int(gps_pos['timestamp'][takeoff_index]) # flight modes flight_mode_changes = get_flight_mode_changes(ulog) flight_modes_str = '[ ' for t, mode in flight_mode_changes: t += utc_offset utctimestamp = datetime.datetime.utcfromtimestamp( t / 1.e6).replace(tzinfo=datetime.timezone.utc) if mode in flight_modes_table: mode_name, color = flight_modes_table[mode] else: mode_name = '' color = '#ffffff' flight_modes_str += '["{:}", "{:}"], ' \ .format(utctimestamp.isoformat(), mode_name) flight_modes_str += ' ]' # manual control setpoints (stick input) manual_control_setpoints_str = '[ ' if manual_control_setpoint: for i in range(len(manual_control_setpoint['timestamp'])): manual_x = manual_control_setpoint['x'][i] manual_y = manual_control_setpoint['y'][i] manual_z = manual_control_setpoint['z'][i] manual_r = manual_control_setpoint['r'][i] t = manual_control_setpoint['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp( t / 1.e6).replace(tzinfo=datetime.timezone.utc) manual_control_setpoints_str += '["{:}", {:.3f}, {:.3f}, {:.3f}, {:.3f}], ' \ .format(utctimestamp.isoformat(), manual_x, manual_y, manual_z, manual_r) manual_control_setpoints_str += ' ]' # position # Note: alt_ellipsoid from gps_pos would be the better match for # altitude, but it's not always available. And since we add an offset # (to match the takeoff location with the ground altitude) it does not # matter as much. position_data = '[ ' # TODO: use vehicle_global_position? If so, then: # - altitude requires an offset (to match the GPS data) # - it's worse for some logs where the estimation is bad -> acro flights # (-> add both: user-selectable between GPS & estimated trajectory?) for i in range(len(gps_pos['timestamp'])): lon = gps_pos['lon'][i] * 1.e-7 lat = gps_pos['lat'][i] * 1.e-7 alt = gps_pos['alt'][i] * 1.e-3 t = gps_pos['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp( t / 1.e6).replace(tzinfo=datetime.timezone.utc) if i == 0: start_timestamp = utctimestamp end_timestamp = utctimestamp position_data += '["{:}", {:.10f}, {:.10f}, {:.3f}], ' \ .format(utctimestamp.isoformat(), lon, lat, alt) position_data += ' ]' start_timestamp_str = '"{:}"'.format(start_timestamp.isoformat()) boot_timestamp = datetime.datetime.utcfromtimestamp( utc_offset / 1.e6).replace(tzinfo=datetime.timezone.utc) boot_timestamp_str = '"{:}"'.format(boot_timestamp.isoformat()) end_timestamp_str = '"{:}"'.format(end_timestamp.isoformat()) # orientation as quaternion attitude_data = '[ ' for i in range(len(attitude['timestamp'])): att_qw = attitude['q[0]'][i] att_qx = attitude['q[1]'][i] att_qy = attitude['q[2]'][i] att_qz = attitude['q[3]'][i] t = attitude['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp( t / 1.e6).replace(tzinfo=datetime.timezone.utc) # Cesium uses (x, y, z, w) attitude_data += '["{:}", {:.6f}, {:.6f}, {:.6f}, {:.6f}], ' \ .format(utctimestamp.isoformat(), att_qx, att_qy, att_qz, att_qw) attitude_data += ' ]' # handle different vehicle types # the model_scale_factor should scale the different models to make them # equal in size (in proportion) mav_type = ulog.initial_parameters.get('MAV_TYPE', None) if mav_type == 1: # fixed wing model_scale_factor = 0.06 model_uri = 'plot_app/static/cesium/SampleData/models/CesiumAir/Cesium_Air.glb' elif mav_type == 2: # quad model_scale_factor = 1 model_uri = 'plot_app/static/cesium/models/iris/iris.glb' elif mav_type == 22: # delta-quad # TODO: use the delta-quad model model_scale_factor = 0.06 model_uri = 'plot_app/static/cesium/SampleData/models/CesiumAir/Cesium_Air.glb' else: # TODO: handle more types model_scale_factor = 1 model_uri = 'plot_app/static/cesium/models/iris/iris.glb' template = get_jinja_env().get_template(THREED_TEMPLATE) self.write( template.render( flight_modes=flight_modes_str, manual_control_setpoints=manual_control_setpoints_str, takeoff_altitude=takeoff_altitude, takeoff_longitude=takeoff_longitude, takeoff_latitude=takeoff_latitude, position_data=position_data, start_timestamp=start_timestamp_str, boot_timestamp=boot_timestamp_str, end_timestamp=end_timestamp_str, attitude_data=attitude_data, model_scale_factor=model_scale_factor, model_uri=model_uri, log_id=log_id, bing_api_key=get_bing_maps_api_key(), cesium_api_key=get_cesium_api_key()))
def get(self, *args, **kwargs): """ GET request callback """ log_id = self.get_argument('log') if not validate_log_id(log_id): raise tornado.web.HTTPError(400, 'Invalid Parameter') log_file_name = get_log_filename(log_id) download_type = self.get_argument('type', default='0') if not os.path.exists(log_file_name): raise tornado.web.HTTPError(404, 'Log not found') def get_original_filename(default_value, new_file_suffix): """ get the uploaded file name & exchange the file extension """ try: con = sqlite3.connect(get_db_filename(), detect_types=sqlite3.PARSE_DECLTYPES) cur = con.cursor() cur.execute('select OriginalFilename ' 'from Logs where Id = ?', [log_id]) db_tuple = cur.fetchone() if db_tuple is not None: original_file_name = escape(db_tuple[0]) if original_file_name[-4:].lower() == '.ulg': original_file_name = original_file_name[:-4] return original_file_name + new_file_suffix cur.close() con.close() except: print("DB access failed:", sys.exc_info()[0], sys.exc_info()[1]) return default_value if download_type == '1': # download the parameters ulog = load_ulog_file(log_file_name) param_keys = sorted(ulog.initial_parameters.keys()) self.set_header("Content-Type", "text/plain") self.set_header('Content-Disposition', 'inline; filename=params.txt') delimiter = ', ' for param_key in param_keys: self.write(param_key) self.write(delimiter) self.write(str(ulog.initial_parameters[param_key])) self.write('\n') elif download_type == '2': # download the kml file kml_path = get_kml_filepath() kml_file_name = os.path.join(kml_path, log_id.replace('/', '.')+'.kml') # check if chached file exists if not os.path.exists(kml_file_name): print('need to create kml file', kml_file_name) def kml_colors(flight_mode): """ flight mode colors for KML file """ if not flight_mode in flight_modes_table: flight_mode = 0 color_str = flight_modes_table[flight_mode][1][1:] # color in form 'ff00aa' # increase brightness to match colors with template rgb = [int(color_str[2*x:2*x+2], 16) for x in range(3)] for i in range(3): rgb[i] += 40 if rgb[i] > 255: rgb[i] = 255 color_str = "".join(map(lambda x: format(x, '02x'), rgb)) return 'ff'+color_str[4:6]+color_str[2:4]+color_str[0:2] # KML uses aabbggrr style = {'line_width': 2} # create in random temporary file, then move it (to avoid races) try: temp_file_name = kml_file_name+'.'+str(uuid.uuid4()) convert_ulog2kml(log_file_name, temp_file_name, 'vehicle_global_position', kml_colors, style=style, camera_trigger_topic_name='camera_capture') shutil.move(temp_file_name, kml_file_name) except Exception as e: print('Error creating KML file', sys.exc_info()[0], sys.exc_info()[1]) raise CustomHTTPError(400, 'No Position Data in log') from e kml_dl_file_name = get_original_filename('track.kml', '.kml') # send the whole KML file self.set_header("Content-Type", "application/vnd.google-earth.kml+xml") self.set_header('Content-Disposition', 'attachment; filename='+kml_dl_file_name) with open(kml_file_name, 'rb') as kml_file: while True: data = kml_file.read(4096) if not data: break self.write(data) self.finish() elif download_type == '3': # download the non-default parameters ulog = load_ulog_file(log_file_name) param_keys = sorted(ulog.initial_parameters.keys()) self.set_header("Content-Type", "text/plain") self.set_header('Content-Disposition', 'inline; filename=params.txt') default_params = get_default_parameters() delimiter = ', ' for param_key in param_keys: try: param_value = str(ulog.initial_parameters[param_key]) is_default = False if param_key in default_params: default_param = default_params[param_key] if default_param['type'] == 'FLOAT': is_default = abs(float(default_param['default']) - float(param_value)) < 0.00001 else: is_default = int(default_param['default']) == int(param_value) if not is_default: self.write(param_key) self.write(delimiter) self.write(param_value) self.write('\n') except: pass else: # download the log file self.set_header('Content-Type', 'application/octet-stream') self.set_header("Content-Description", "File Transfer") self.set_header('Content-Disposition', 'attachment; filename={}'.format( os.path.basename(log_file_name))) with open(log_file_name, 'rb') as log_file: while True: data = log_file.read(4096) if not data: break self.write(data) self.finish()
def post(self, *args, **kwargs): """ POST request callback """ if self.multipart_streamer: try: self.multipart_streamer.data_complete() form_data = self.multipart_streamer.get_values( ['description', 'email', 'allowForAnalysis', 'obfuscated', 'source', 'type', 'feedback', 'windSpeed', 'rating', 'videoUrl', 'public', 'vehicleName']) description = escape(form_data['description'].decode("utf-8")) email = form_data['email'].decode("utf-8") upload_type = 'personal' if 'type' in form_data: upload_type = form_data['type'].decode("utf-8") source = 'webui' title = '' # may be used in future... if 'source' in form_data: source = form_data['source'].decode("utf-8") obfuscated = 0 if 'obfuscated' in form_data: if form_data['obfuscated'].decode("utf-8") == 'true': obfuscated = 1 allow_for_analysis = 0 if 'allowForAnalysis' in form_data: if form_data['allowForAnalysis'].decode("utf-8") == 'true': allow_for_analysis = 1 feedback = '' if 'feedback' in form_data: feedback = escape(form_data['feedback'].decode("utf-8")) wind_speed = -1 rating = '' stored_email = '' video_url = '' is_public = 0 vehicle_name = '' error_labels = '' if upload_type == 'flightreport': try: wind_speed = int(escape(form_data['windSpeed'].decode("utf-8"))) except ValueError: wind_speed = -1 rating = escape(form_data['rating'].decode("utf-8")) if rating == 'notset': rating = '' stored_email = email # get video url & check if valid video_url = escape(form_data['videoUrl'].decode("utf-8"), quote=True) if not validate_url(video_url): video_url = '' if 'vehicleName' in form_data: vehicle_name = escape(form_data['vehicleName'].decode("utf-8")) # always allow for statistical analysis allow_for_analysis = 1 if 'public' in form_data: if form_data['public'].decode("utf-8") == 'true': is_public = 1 file_obj = self.multipart_streamer.get_parts_by_name('filearg')[0] upload_file_name = file_obj.get_filename() while True: log_id = str(uuid.uuid4()) new_file_name = get_log_filename(log_id) if not os.path.exists(new_file_name): break # read file header & check if really an ULog file header_len = len(ULog.HEADER_BYTES) if (file_obj.get_payload_partial(header_len) != ULog.HEADER_BYTES): if upload_file_name[-7:].lower() == '.px4log': raise CustomHTTPError( 400, 'Invalid File. This seems to be a px4log file. ' 'Upload it to <a href="http://logs.uaventure.com" ' 'target="_blank">logs.uaventure.com</a>.') raise CustomHTTPError(400, 'Invalid File') print('Moving uploaded file to', new_file_name) file_obj.move(new_file_name) if obfuscated == 1: # TODO: randomize gps data, ... pass # generate a token: secure random string (url-safe) token = str(binascii.hexlify(os.urandom(16)), 'ascii') # Load the ulog file but only if not uploaded via CI. # Then we open the DB connection. ulog = None if source != 'CI': ulog_file_name = get_log_filename(log_id) ulog = load_ulog_file(ulog_file_name) # put additional data into a DB con = sqlite3.connect(get_db_filename()) cur = con.cursor() cur.execute( 'insert into Logs (Id, Title, Description, ' 'OriginalFilename, Date, AllowForAnalysis, Obfuscated, ' 'Source, Email, WindSpeed, Rating, Feedback, Type, ' 'videoUrl, ErrorLabels, Public, Token) values ' '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [log_id, title, description, upload_file_name, datetime.datetime.now(), allow_for_analysis, obfuscated, source, stored_email, wind_speed, rating, feedback, upload_type, video_url, error_labels, is_public, token]) if ulog is not None: vehicle_data = update_vehicle_db_entry(cur, ulog, log_id, vehicle_name) vehicle_name = vehicle_data.name con.commit() url = '/plot_app?log='+log_id full_plot_url = get_http_protocol()+'://'+get_domain_name()+url print(full_plot_url) delete_url = get_http_protocol()+'://'+get_domain_name()+ \ '/edit_entry?action=delete&log='+log_id+'&token='+token # information for the notification email info = {} info['description'] = description info['feedback'] = feedback info['upload_filename'] = upload_file_name info['type'] = '' info['airframe'] = '' info['hardware'] = '' info['uuid'] = '' info['software'] = '' info['rating'] = rating if len(vehicle_name) > 0: info['vehicle_name'] = vehicle_name if ulog is not None: px4_ulog = PX4ULog(ulog) info['type'] = px4_ulog.get_mav_type() airframe_name_tuple = get_airframe_name(ulog) if airframe_name_tuple is not None: airframe_name, airframe_id = airframe_name_tuple if len(airframe_name) == 0: info['airframe'] = airframe_id else: info['airframe'] = airframe_name sys_hardware = '' if 'ver_hw' in ulog.msg_info_dict: sys_hardware = escape(ulog.msg_info_dict['ver_hw']) info['hardware'] = sys_hardware if 'sys_uuid' in ulog.msg_info_dict and sys_hardware != 'SITL': info['uuid'] = escape(ulog.msg_info_dict['sys_uuid']) branch_info = '' if 'ver_sw_branch' in ulog.msg_info_dict: branch_info = ' (branch: '+ulog.msg_info_dict['ver_sw_branch']+')' if 'ver_sw' in ulog.msg_info_dict: ver_sw = escape(ulog.msg_info_dict['ver_sw']) info['software'] = ver_sw + branch_info if upload_type == 'flightreport' and is_public: destinations = set(email_notifications_config['public_flightreport']) if rating in ['unsatisfactory', 'crash_sw_hw', 'crash_pilot']: destinations = destinations | \ set(email_notifications_config['public_flightreport_bad']) send_flightreport_email( list(destinations), full_plot_url, DBData.rating_str_static(rating), DBData.wind_speed_str_static(wind_speed), delete_url, stored_email, info) # also generate the additional DB entry # (we may have the log already loaded in 'ulog', however the # lru cache will make it very quick to load it again) generate_db_data_from_log_file(log_id, con) # also generate the preview image IOLoop.instance().add_callback(generate_overview_img_from_id, log_id) con.commit() cur.close() con.close() # send notification emails send_notification_email(email, full_plot_url, delete_url, info) # do not redirect for QGC if source != 'QGroundControl': self.redirect(url) except CustomHTTPError: raise except ULogException: raise CustomHTTPError( 400, 'Failed to parse the file. It is most likely corrupt.') except: print('Error when handling POST data', sys.exc_info()[0], sys.exc_info()[1]) raise CustomHTTPError(500) finally: self.multipart_streamer.release_parts()
def post(self, *args, **kwargs): """ POST request callback """ if self.multipart_streamer: try: self.multipart_streamer.data_complete() form_data = self.multipart_streamer.get_values([ 'description', 'email', 'allowForAnalysis', 'obfuscated', 'source', 'type', 'feedback', 'windSpeed', 'rating', 'videoUrl', 'public', 'vehicleName' ]) description = escape(form_data['description'].decode("utf-8")) email = form_data.get('email', bytes("(no email provided)", 'utf-8')).decode("utf-8") upload_type = form_data.get('type', bytes("personal", 'utf-8')).decode("utf-8") source = form_data.get('source', bytes("webui", 'utf-8')).decode("utf-8") title = '' # may be used in future... obfuscated = { 'true': 1, 'false': 0 }.get( form_data.get('obfuscated', b'false').decode('utf-8'), 0) allow_for_analysis = { 'true': 1, 'false': 0 }.get( form_data.get('allowForAnalysis', b'false').decode('utf-8'), 0) feedback = escape( form_data.get('feedback', b'').decode("utf-8")) wind_speed = -1 rating = '' video_url = '' is_public = 1 vehicle_name = escape( form_data.get('vehicleName', bytes("", 'utf-8')).decode("utf-8")) error_labels = '' # TODO: make the format of formdict a little more compatible with form_data above formdict = {} formdict['description'] = description formdict['email'] = email formdict['upload_type'] = upload_type formdict['source'] = source formdict['title'] = title formdict['obfuscated'] = obfuscated formdict['allow_for_analysis'] = allow_for_analysis formdict['feedback'] = feedback formdict['wind_speed'] = wind_speed formdict['rating'] = rating formdict['video_url'] = video_url formdict['is_public'] = is_public formdict['vehicle_name'] = vehicle_name formdict['error_labels'] = error_labels # we don't bother parsing any of the "flight report" metadata, it's not very useful to us # stored_email = '' # if upload_type == 'flightreport': # try: # wind_speed = int(escape(form_data['windSpeed'].decode("utf-8"))) # except ValueError: # wind_speed = -1 # rating = escape(form_data['rating'].decode("utf-8")) # if rating == 'notset': rating = '' # stored_email = email # # get video url & check if valid # video_url = escape(form_data['videoUrl'].decode("utf-8"), quote=True) # if not validate_url(video_url): # video_url = '' # if 'vehicleName' in form_data: # vehicle_name = escape(form_data['vehicleName'].decode("utf-8")) # # always allow for statistical analysis # allow_for_analysis = 1 # if 'public' in form_data: # if form_data['public'].decode("utf-8") == 'true': # is_public = 1 # open the database connection con = sqlite3.connect(get_db_filename()) cur = con.cursor() file_obj = self.multipart_streamer.get_parts_by_name( 'filearg')[0] upload_file_name = file_obj.get_filename() # read file header and ensure validity peek_ulog_header = file_obj.get_payload_partial( len(ULog.HEADER_BYTES)) peek_zip_header = file_obj.get_payload_partial(4) zip_headers = [ b'\x50\x4b\x03\x04', b'\x50\x4b\x05\x06', b'\x50\x4b\x07\x08' ] # we check that it is either a well formed zip or ULog # is file a ULog? then continue as we were :) if (peek_ulog_header == ULog.HEADER_BYTES): log_id = save_uploaded_log(con, cur, file_obj, formdict) # generate URL info and redirect url = '/plot_app?log=' + log_id full_plot_url = get_http_protocol( ) + '://' + get_domain_name() + url print(full_plot_url) # do not redirect for QGC if source != 'QGroundControl': self.redirect(url) # is the file a zip? read the magic numbers and unzip it elif (peek_zip_header in zip_headers): with zipfile.ZipFile(file_obj.f_out) as zip: for log_filename in zip.namelist(): # make sure we're dealing with a ulog file # TODO: do actual validation here, don't just check filename _, ext = os.path.splitext(log_filename) if ext not in ['.ulg', '.ulog']: print( f'Skipping extracting non-ULog file {file_obj.f_out.name}//{log_filename}' ) continue # TODO: switch to save_uploaded_log # generate a log ID and persistence filename while True: log_id = str(uuid.uuid4()) new_file_name = get_log_filename(log_id) if not os.path.exists(new_file_name): break # extract and rename the ulog file to something we control print( f'Extracting uploaded log {file_obj.f_out.name}//{log_filename} file to', new_file_name) zip.extract(log_filename, path=os.path.dirname(new_file_name)) os.rename( os.path.join(os.path.dirname(new_file_name), log_filename), new_file_name) # Load the ulog file but only if not uploaded via CI. ulog = None if source != 'CI': ulog_file_name = get_log_filename(log_id) ulog = load_ulog_file(ulog_file_name) # generate a token: secure random string (url-safe) token = str(binascii.hexlify(os.urandom(16)), 'ascii') # put additional data into a DB cur.execute( 'insert into Logs (Id, Title, Description, ' 'OriginalFilename, Date, AllowForAnalysis, Obfuscated, ' 'Source, Email, WindSpeed, Rating, Feedback, Type, ' 'videoUrl, ErrorLabels, Public, Token) values ' '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [ log_id, title, description, upload_file_name, datetime.datetime.now(), allow_for_analysis, obfuscated, source, email, wind_speed, rating, feedback, upload_type, video_url, error_labels, is_public, token ]) if ulog is not None: vehicle_data = update_vehicle_db_entry( cur, ulog, log_id, vehicle_name) vehicle_name = vehicle_data.name con.commit() generate_db_data_from_log_file(log_id, con) con.commit() # generate URL info and redirect url = '/plot_app?log=' + log_id full_plot_url = get_http_protocol( ) + '://' + get_domain_name() + url print(full_plot_url) self.redirect('/browse') # is file neither a zip nor a ULog? error out :) else: if upload_file_name[-7:].lower() == '.px4log': raise CustomHTTPError( 400, 'Invalid File. This seems to be a px4log file. ' 'Upload it to <a href="http://logs.uaventure.com" ' 'target="_blank">logs.uaventure.com</a>.') raise CustomHTTPError(400, 'Invalid File') # this massive chunk of comment was the code used to send emails for # uploaded flight reports. we no longer use this functionality. # however, for some weird reason, this chunk of code also generated a # LogsGenerated entry for faster log loading for public logs. so # we move the line up and out of the code it's not supposed to be a part # of, and put it right here :) #generate_db_data_from_log_file(log_id, con) # delete_url = get_http_protocol()+'://'+get_domain_name()+ \ # '/edit_entry?action=delete&log='+log_id+'&token='+token # information for the notification email # info = {} # info['description'] = description # info['feedback'] = feedback # info['upload_filename'] = upload_file_name # info['type'] = '' # info['airframe'] = '' # info['hardware'] = '' # info['uuid'] = '' # info['software'] = '' # info['rating'] = rating # if len(vehicle_name) > 0: # info['vehicle_name'] = vehicle_name # if ulog is not None: # px4_ulog = PX4ULog(ulog) # info['type'] = px4_ulog.get_mav_type() # airframe_name_tuple = get_airframe_name(ulog) # if airframe_name_tuple is not None: # airframe_name, airframe_id = airframe_name_tuple # if len(airframe_name) == 0: # info['airframe'] = airframe_id # else: # info['airframe'] = airframe_name # sys_hardware = '' # if 'ver_hw' in ulog.msg_info_dict: # sys_hardware = escape(ulog.msg_info_dict['ver_hw']) # info['hardware'] = sys_hardware # if 'sys_uuid' in ulog.msg_info_dict and sys_hardware != 'SITL': # info['uuid'] = escape(ulog.msg_info_dict['sys_uuid']) # branch_info = '' # if 'ver_sw_branch' in ulog.msg_info_dict: # branch_info = ' (branch: '+ulog.msg_info_dict['ver_sw_branch']+')' # if 'ver_sw' in ulog.msg_info_dict: # ver_sw = escape(ulog.msg_info_dict['ver_sw']) # info['software'] = ver_sw + branch_info # if upload_type == 'flightreport' and is_public and source != 'CI': # destinations = set(email_notifications_config['public_flightreport']) # if rating in ['unsatisfactory', 'crash_sw_hw', 'crash_pilot']: # destinations = destinations | \ # set(email_notifications_config['public_flightreport_bad']) # send_flightreport_email( # list(destinations), # full_plot_url, # DBData.rating_str_static(rating), # DBData.wind_speed_str_static(wind_speed), delete_url, # stored_email, info) # # also generate the additional DB entry # # (we may have the log already loaded in 'ulog', however the # # lru cache will make it very quick to load it again) # generate_db_data_from_log_file(log_id, con) # # also generate the preview image # IOLoop.instance().add_callback(generate_overview_img_from_id, log_id) # send notification emails # send_notification_email(email, full_plot_url, delete_url, info) except CustomHTTPError: raise except ULogException as e: raise CustomHTTPError( 400, 'Failed to parse the file. It is most likely corrupt.' ) from e except Exception as e: print('Fatal error when handling POST data', sys.exc_info()[0], sys.exc_info()[1]) traceback.print_exc() raise CustomHTTPError(500) from e finally: # close our DB connections cur.close() con.close() # free the uploaded files self.multipart_streamer.release_parts()
def get(self, *args, **kwargs): """ GET request callback """ # load the log file log_id = self.get_argument('log') if not validate_log_id(log_id): raise tornado.web.HTTPError(400, 'Invalid Parameter') log_file_name = get_log_filename(log_id) ulog = load_ulog_file(log_file_name) # extract the necessary information from the log try: # required topics: none of these are optional gps_pos = ulog.get_dataset('vehicle_gps_position').data vehicle_global_position = ulog.get_dataset('vehicle_global_position').data attitude = ulog.get_dataset('vehicle_attitude').data except (KeyError, IndexError, ValueError) as error: raise CustomHTTPError( 400, 'The log does not contain all required topics<br />' '(vehicle_gps_position, vehicle_global_position, ' 'vehicle_attitude)') # These are optional data streams. Most of them are added # for charting/streaming 2D plots on the 3D viewer. manual_control_setpoint = None vehicle_local_position = None vehicle_local_position_setpoint = None vehicle_attitude_setpoint = None vehicle_rates_setpoint = None actuator_outputs = None sensor_combined = None actuator_controls_0 = None # Exception handling is done on each dataset separately to find # source of the missing stream. # Exception: manual_control_setpoint try: manual_control_setpoint = ulog.get_dataset('manual_control_setpoint').data except (KeyError, IndexError, ValueError) as error: print("Manual control setpoint not found") # Exception: vehicle_local_position try: vehicle_local_position = ulog.get_dataset('vehicle_local_position').data except (KeyError, IndexError, ValueError) as error: print("Vehicle local position not found") # Exception: vehicle_local_position_setpoint try: vehicle_local_position_setpoint = ulog.get_dataset('vehicle_local_position_setpoint').data except (KeyError, IndexError, ValueError) as error: print("Vehicle local position setpoint not found") # Exception: vehicle_attitude_setpoint try: vehicle_attitude_setpoint = ulog.get_dataset('vehicle_attitude_setpoint').data except (KeyError, IndexError, ValueError) as error: print("Vehicle attitude setpoint not found") # Exception: vehicle_rates_setpoint try: vehicle_rates_setpoint = ulog.get_dataset('vehicle_rates_setpoint').data except (KeyError, IndexError, ValueError) as error: print("Vehicle rates setpoint not found") # Exception: actuator_outputs try: actuator_outputs = ulog.get_dataset('actuator_outputs').data except (KeyError, IndexError, ValueError) as error: print("Actuator output not found") # Exception: sensor_combined try: sensor_combined = ulog.get_dataset('sensor_combined').data except (KeyError, IndexError, ValueError) as error: print("Sensor combined not found") # Exception: actuator_controls_0 try: actuator_controls_0 = ulog.get_dataset('actuator_controls_0').data except (KeyError, IndexError, ValueError) as error: print("Actuator Controls 0 not found") # Get the takeoff location. We use the first position with a valid fix, # and assume that the vehicle is not in the air already at that point takeoff_index = 0 gps_indices = np.nonzero(gps_pos['fix_type'] > 2) if len(gps_indices[0]) > 0: takeoff_index = gps_indices[0][0] takeoff_altitude = '{:.3f}' \ .format(gps_pos['alt'][takeoff_index] * 1.e-3) takeoff_latitude = '{:.10f}'.format(gps_pos['lat'][takeoff_index] * 1.e-7) takeoff_longitude = '{:.10f}'.format(gps_pos['lon'][takeoff_index] * 1.e-7) # calculate UTC time offset (assume there's no drift over the entire log) utc_offset = int(gps_pos['time_utc_usec'][takeoff_index]) - \ int(gps_pos['timestamp'][takeoff_index]) # flight modes flight_mode_changes = get_flight_mode_changes(ulog) flight_modes_str = '[ ' for t, mode in flight_mode_changes: t += utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) if mode in flight_modes_table: mode_name, color = flight_modes_table[mode] else: mode_name = '' color = '#ffffff' flight_modes_str += '["{:}", "{:}"], ' \ .format(utctimestamp.isoformat(), mode_name) flight_modes_str += ' ]' # manual control setpoints (stick input) manual_control_setpoints_str = '[ ' if manual_control_setpoint: for i in range(len(manual_control_setpoint['timestamp'])): manual_x = manual_control_setpoint['x'][i] manual_y = manual_control_setpoint['y'][i] manual_z = manual_control_setpoint['z'][i] manual_r = manual_control_setpoint['r'][i] t = manual_control_setpoint['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) manual_control_setpoints_str += '["{:}", {:.3f}, {:.3f}, {:.3f}, {:.3f}], ' \ .format(utctimestamp.isoformat(), manual_x, manual_y, manual_z, manual_r) manual_control_setpoints_str += ' ]' # position # Note: alt_ellipsoid from gps_pos would be the better match for # altitude, but it's not always available. And since we add an offset # (to match the takeoff location with the ground altitude) it does not # matter as much. position_data = '[ ' # TODO: use vehicle_global_position? If so, then: # - altitude requires an offset (to match the GPS data) # - it's worse for some logs where the estimation is bad -> acro flights # (-> add both: user-selectable between GPS & estimated trajectory?) for i in range(len(gps_pos['timestamp'])): lon = gps_pos['lon'][i] * 1.e-7 lat = gps_pos['lat'][i] * 1.e-7 alt = gps_pos['alt'][i] * 1.e-3 t = gps_pos['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) if i == 0: start_timestamp = utctimestamp end_timestamp = utctimestamp position_data += '["{:}", {:.10f}, {:.10f}, {:.3f}], ' \ .format(utctimestamp.isoformat(), lon, lat, alt) position_data += ' ]' start_timestamp_str = '"{:}"'.format(start_timestamp.isoformat()) boot_timestamp = datetime.datetime.utcfromtimestamp(utc_offset/1.e6).replace( tzinfo=datetime.timezone.utc) boot_timestamp_str = '"{:}"'.format(boot_timestamp.isoformat()) end_timestamp_str = '"{:}"'.format(end_timestamp.isoformat()) # orientation as quaternion attitude_data = '[ ' for i in range(len(attitude['timestamp'])): att_qw = attitude['q[0]'][i] att_qx = attitude['q[1]'][i] att_qy = attitude['q[2]'][i] att_qz = attitude['q[3]'][i] rollSpeed = attitude['rollspeed'][i] pitchSpeed = attitude['pitchspeed'][i] yawSpeed = attitude['yawspeed'][i] t = attitude['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) # Cesium uses (x, y, z, w) attitude_data += '["{:}", {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}], ' \ .format(utctimestamp.isoformat(), att_qx, att_qy, att_qz, att_qw, rollSpeed, pitchSpeed, yawSpeed) attitude_data += ' ]' # Optional data stream serialization starts here. # The code checks None condition to decide whether # to serialize or not. # Attitude setpoint data vehicle_rates_setpoint_data = '[ ' if vehicle_rates_setpoint is not None: for i in range(len(vehicle_rates_setpoint['timestamp'])): rollRateSP = vehicle_rates_setpoint['roll'][i] pitchRateSP = vehicle_rates_setpoint['pitch'][i] yawRateSp = vehicle_rates_setpoint['yaw'][i] t = vehicle_rates_setpoint['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) vehicle_rates_setpoint_data += '["{:}", {:.6f}, {:.6f}, {:.6f}], ' \ .format(utctimestamp.isoformat(), rollRateSP, pitchRateSP, yawRateSp) vehicle_rates_setpoint_data += ' ]' # Sensor combined data. Includes things like raw gyro, raw accelleration. sensor_combined_data = '[ ' if sensor_combined is not None: for i in range(len(sensor_combined['timestamp'])): rawRoll = sensor_combined['gyro_rad[0]'][i] rawPitch = sensor_combined['gyro_rad[1]'][i] rawYaw = sensor_combined['gyro_rad[2]'][i] rawXAcc = sensor_combined['accelerometer_m_s2[0]'][i] rawYAcc = sensor_combined['accelerometer_m_s2[1]'][i] rawZAcc = sensor_combined['accelerometer_m_s2[2]'][i] t = sensor_combined['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) sensor_combined_data += '["{:}", {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}], ' \ .format(utctimestamp.isoformat(), rawRoll, rawPitch, rawYaw, rawXAcc, rawYAcc, rawZAcc) sensor_combined_data += ' ]' # Attitude setpoint vehicle_attitude_setpoint_data = '[ ' if vehicle_attitude_setpoint is not None: for i in range(len(vehicle_attitude_setpoint['timestamp'])): rollSP = vehicle_attitude_setpoint['roll_body'][i] pitchSP = vehicle_attitude_setpoint['pitch_body'][i] yawSP = vehicle_attitude_setpoint['yaw_body'][i] t = vehicle_attitude_setpoint['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) vehicle_attitude_setpoint_data += '["{:}", {:.6f}, {:.6f}, {:.6f}], ' \ .format(utctimestamp.isoformat(), rollSP, pitchSP, yawSP) vehicle_attitude_setpoint_data += ' ]' # Local Position vehicle_local_position_data = '[ ' if vehicle_local_position is not None: for i in range(len(vehicle_local_position['timestamp'])): xPos = vehicle_local_position['x'][i] yPos = vehicle_local_position['y'][i] zPos = vehicle_local_position['z'][i] xVel = vehicle_local_position['vx'][i] yVel = vehicle_local_position['vy'][i] zVel = vehicle_local_position['vz'][i] t = vehicle_local_position['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) vehicle_local_position_data += '["{:}", {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}], ' \ .format(utctimestamp.isoformat(), xPos, yPos, zPos, xVel, yVel, zVel) vehicle_local_position_data += ' ]' # Local Position Setpoint vehicle_local_position_setpoint_data = '[ ' if vehicle_local_position_setpoint is not None: for i in range(len(vehicle_local_position_setpoint['timestamp'])): xPosSP = vehicle_local_position_setpoint['x'][i] yPosSP = vehicle_local_position_setpoint['y'][i] zPosSP = vehicle_local_position_setpoint['z'][i] xVelSP = vehicle_local_position_setpoint['vx'][i] yVelSP = vehicle_local_position_setpoint['vy'][i] zVelSP = vehicle_local_position_setpoint['vz'][i] t = vehicle_local_position_setpoint['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) vehicle_local_position_setpoint_data += '["{:}", {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}, {:.6f}], ' \ .format(utctimestamp.isoformat(), xPosSP, yPosSP, zPosSP, xVelSP, yVelSP, zVelSP) vehicle_local_position_setpoint_data += ' ]' # Actuator Outputs. This can handle airframes up to 8 actuation outputs (i.e. motors). # Tons of formatting things ... actuator_outputs_data = '[ ' if actuator_outputs is not None: num_actuator_outputs = 8 max_outputs = np.amax(actuator_outputs['noutputs']) if max_outputs < num_actuator_outputs: num_actuator_outputs = max_outputs for i in range(len(actuator_outputs['timestamp'])): t = actuator_outputs['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) actuatorList = [] actuatorList.append(utctimestamp.isoformat()) actuatorDictionary={} formatStringLoop = '' for x in range(max_outputs): actuatorDictionary["actuator_outputs_{0}".format(x)]=actuator_outputs['output['+str(x)+']'][i] formatStringLoop += ', {:.6f}' actuatorList.append(actuatorDictionary["actuator_outputs_{0}".format(x)]) formatStringLoop += '], ' formatString = '["{:}"' + formatStringLoop actuator_outputs_data += formatString.format(*actuatorList) actuator_outputs_data += ' ]' # Actuator controls actuator_controls_0_data = '[ ' if actuator_controls_0 is not None: for i in range(len(actuator_controls_0['timestamp'])): cont0 = actuator_controls_0['control[0]'][i] cont1 = actuator_controls_0['control[1]'][i] cont2 = actuator_controls_0['control[2]'][i] cont3 = actuator_controls_0['control[3]'][i] t = actuator_controls_0['timestamp'][i] + utc_offset utctimestamp = datetime.datetime.utcfromtimestamp(t/1.e6).replace( tzinfo=datetime.timezone.utc) actuator_controls_0_data += '["{:}", {:.6f}, {:.6f}, {:.6f}, {:.6f}], ' \ .format(utctimestamp.isoformat(), cont0, cont1, cont2, cont3) actuator_controls_0_data += ' ]' # handle different vehicle types # the model_scale_factor should scale the different models to make them # equal in size (in proportion) mav_type = ulog.initial_parameters.get('MAV_TYPE', None) if mav_type == 1: # fixed wing model_scale_factor = 0.06 model_uri = 'plot_app/static/cesium/SampleData/models/CesiumAir/Cesium_Air.glb' elif mav_type == 2: # quad model_scale_factor = 1 model_uri = 'plot_app/static/cesium/models/iris/iris.glb' elif mav_type == 22: # delta-quad # TODO: use the delta-quad model model_scale_factor = 0.06 model_uri = 'plot_app/static/cesium/SampleData/models/CesiumAir/Cesium_Air.glb' else: # TODO: handle more types model_scale_factor = 1 model_uri = 'plot_app/static/cesium/models/iris/iris.glb' template = get_jinja_env().get_template(THREED_TEMPLATE) self.write(template.render( flight_modes=flight_modes_str, manual_control_setpoints=manual_control_setpoints_str, takeoff_altitude=takeoff_altitude, takeoff_longitude=takeoff_longitude, takeoff_latitude=takeoff_latitude, position_data=position_data, start_timestamp=start_timestamp_str, boot_timestamp=boot_timestamp_str, end_timestamp=end_timestamp_str, attitude_data=attitude_data, vehicle_attitude_setpoint_data = vehicle_attitude_setpoint_data, vehicle_local_position_data = vehicle_local_position_data, vehicle_local_position_setpoint_data = vehicle_local_position_setpoint_data, actuator_outputs_data = actuator_outputs_data, vehicle_rates_setpoint_data = vehicle_rates_setpoint_data, sensor_combined_data = sensor_combined_data, actuator_controls_0_data = actuator_controls_0_data, model_scale_factor=model_scale_factor, model_uri=model_uri, log_id=log_id, bing_api_key=get_bing_maps_api_key(), cesium_api_key=get_cesium_api_key()))