def testing_args(): """ arguments for testing. :return: test arguments """ flight_logs_path = os.path.join(os.path.dirname(__file__), 'flight_logs') log_est_format_v1 = ULog( os.path.join(flight_logs_path, 'short_f450_log.ulg')) log_est_format_v2 = ULog( os.path.join(flight_logs_path, 'estimator_innovations.ulg')) return { 'est_format_version_1': log_est_format_v1, 'est_format_version_2': log_est_format_v2 }
def testing_args(): """ arguments for testing. :return: test arguments """ flight_logs_path = os.path.join(os.path.dirname(__file__), 'flight_logs') log_est_format_v1 = ULog( os.path.join(flight_logs_path, 'short_f450_log.ulg')) log_est_format_v2 = ULog( os.path.join(flight_logs_path, 'SITL_VTOL_standard_d7b958.ulg')) return { 'log_est_format_v1': log_est_format_v1, 'log_est_format_v2': log_est_format_v2 }
def getAllData(logfile): log = ULog(logfile) v_local = np.matrix([getData(log, 'vehicle_local_position', 'vx'), getData(log, 'vehicle_local_position', 'vy'), getData(log, 'vehicle_local_position', 'vz')]) t_v_local = ms2s(getData(log, 'vehicle_local_position', 'timestamp')) accel = np.matrix([getData(log, 'sensor_combined', 'accelerometer_m_s2[0]'), getData(log, 'sensor_combined', 'accelerometer_m_s2[1]'), getData(log, 'sensor_combined', 'accelerometer_m_s2[2]')]) t_accel = ms2s(getData(log, 'sensor_combined', 'timestamp')) q = np.matrix([getData(log, 'vehicle_attitude', 'q[0]'), getData(log, 'vehicle_attitude', 'q[1]'), getData(log, 'vehicle_attitude', 'q[2]'), getData(log, 'vehicle_attitude', 'q[3]')]) t_q = ms2s(getData(log, 'vehicle_attitude', 'timestamp')) dist_bottom = getData(log, 'vehicle_local_position', 'dist_bottom') t_dist_bottom = ms2s(getData(log, 'vehicle_local_position', 'timestamp')) (t_aligned, v_body_aligned, accel_aligned) = alignData(t_v_local, v_local, t_accel, accel, t_q, q, t_dist_bottom, dist_bottom) t_aligned -= t_aligned[0] return (t_aligned, v_body_aligned, accel_aligned)
def ulog2docx(ulg_file_name, doc_file_name=""): ulog = ULog(ulg_file_name) ### position plot config = {} config["layout"] = [1, 1] config["axeses"] = [] axes = {} axes["title"] = None axes["xlabel"] = "East/m" axes["ylabel"] = "North/m" axes["lines"] = [{"xdata": []}] ### altitude estimate config = {} config["layout"] = [2, 1] config["suptitle"] = "Alt Estimate" config["axeses"] = [] # axes 1 axes = {} axes["title"] = "" axes["xlabel"] = "Time" axes["ylabel"] = "Alt/m" axes["lines"] = [{ "xdata": "vehicle_global_position.timestamp", "ydata": [rescale, "vehicle_global_position.alt", 0.001, 0], "specs": "-r", "label": "Fused Alt" }, { "xdata": "vehicle_air_data.timestamp", "ydata": ["vehicle_air_data", "baro_alt_meter"], "specs": "-k", "label": "Baro Alt" }, { "xdata": ["vehicle_gps_position", "timestamp"], "process": lambda x: x * 0.001, "ydata": [lambda: "vehicle_gps_position", "alt"], "spec": "-b", "label": "GPS Alt" }] config["axeses"].append(axes) # axes 2 axes = {} axes["title"] = "" axes["xlabel"] = "Time/sec" axes["ylabel"] = "Thrust" axes["lines"] = [{ "xdata": ["actuator_controls_0", "timestamp"], "ydata": ["actuator_controls_0", "control[3]"], "spec": "-r", "process": None, "label": "Thrust[0,100]" }] config["axeses"].append(axes) # plot plot2d(ulog, config)
def process_logdata_ekf(filename: str, check_level_dict_filename: str, check_table_filename: str, plot: bool = True, sensor_safety_margins: bool = True): ## load the log and extract the necessary data for the analyses try: ulog = ULog(filename) except: raise PreconditionError('could not open {:s}'.format(filename)) try: # get the dictionary of fail and warning test thresholds from a csv file with open(check_level_dict_filename, 'r') as file: reader = csv.DictReader(file) check_levels = { row['check_id']: float(row['threshold']) for row in reader } print('Using test criteria loaded from {:s}'.format( check_level_dict_filename)) except: raise PreconditionError( 'could not find {:s}'.format(check_level_dict_filename)) in_air_margin = 5.0 if sensor_safety_margins else 0.0 # perform the ekf analysis master_status, check_status, metrics, airtime_info = analyse_ekf( ulog, check_levels, red_thresh=1.0, amb_thresh=0.5, min_flight_duration_seconds=5.0, in_air_margin_seconds=in_air_margin) test_results = create_results_table(check_table_filename, master_status, check_status, metrics, airtime_info) # write metadata to a .csv file with open('{:s}.mdat.csv'.format(filename), "w") as file: file.write("name,value,description\n") # loop through the test results dictionary and write each entry on a separate row, with data comma separated # save data in alphabetical order key_list = list(test_results.keys()) key_list.sort() for key in key_list: file.write(key + "," + str(test_results[key][0]) + "," + test_results[key][1] + "\n") print('Test results written to {:s}.mdat.csv'.format(filename)) if plot: create_pdf_report(ulog, '{:s}.pdf'.format(filename)) print('Plots saved to {:s}.pdf'.format(filename)) return test_results
def test_in_air_detector_on_flight_log(testing_args): """ tests the basics of the in air detector using the basic test flight log file. :param testing_args: :return: """ basic_test_log_filename = os.path.join( testing_args['test_flight_logs_path'], testing_args['simple_test_flight_log']) ulog = ULog(basic_test_log_filename) original_take_offs(ulog)
def test_position_analyzer(testing_args): """ :param testing_args: :return: """ filename = os.path.join(testing_args['golden_flight_logs_path'], testing_args['golden_flight_logs'][0]) position_analyzer = PositionAnalyzer(ULog(filename)) position_analyzer.set_min_ground_distance(0.2) assert position_analyzer.get_valid_position('sensor_combined') == [], \ 'returned valid positions were not empty'
def load_ulog_file(file_name): """ load an ULog file :return: ULog object """ # The reason to put this method into helper is that the main module gets # (re)loaded on each page request. Thus the caching would not work there. # load only the messages we really need msg_filter = ['battery_status', 'estimator_status', 'sensor_combined', 'cpuload', 'vehicle_gps_position', 'vehicle_local_position', 'vehicle_global_position', 'vehicle_attitude', 'vehicle_rates_setpoint', 'vehicle_attitude_groundtruth', 'vehicle_local_position_groundtruth', 'vehicle_status', 'airspeed', 'rate_ctrl_status', 'vehicle_air_data', 'vehicle_magnetometer', 'system_power'] # has been removed , because useless: # position_setpoint_triplet # 'actuator_controls_1' ,'actuator_controls_0','actuator_outputs' # distance_sensor # 'vehicle_local_position_setpoint', 'vehicle_angular_velocity','vehicle_attitude_setpoint' # 'tecs_status' # 'rc_channels', 'input_rc', # 'manual_control_setpoint','vehicle_visual_odometry' try: ulog = ULog(file_name, msg_filter, disable_str_exceptions=False) except FileNotFoundError: print("Error: file %s not found" % file_name) raise # catch all other exceptions and turn them into an ULogException except Exception as error: traceback.print_exception(*sys.exc_info()) raise ULogException() # filter messages with timestamp = 0 (these are invalid). # The better way is not to publish such messages in the first place, and fix # the code instead (it goes against the monotonicity requirement of ulog). # So we display the values such that the problem becomes visible. # for d in ulog.data_list: # t = d.data['timestamp'] # non_zero_indices = t != 0 # if not np.all(non_zero_indices): # d.data = np.compress(non_zero_indices, d.data, axis=0) return ulog
def test_basics_in_air_detector(testing_args): """ tests the basics of the in air detector on a dummy log file. :param testing_args: :return: """ dummy_log_filename = os.path.join(testing_args['test_flight_logs_path'], testing_args['dummy_log_file']) ulog = ULog(dummy_log_filename) always_on_ground(ulog) always_in_air(ulog) start_in_air(ulog) take_off_at_second_time_stamp(ulog) multiple_take_offs(ulog)
def getInputOutputData(logfile, axis, instance=0): log = ULog(logfile) y_data = get_data(log, 'vehicle_angular_velocity', 'xyz[{}]'.format(axis)) t_y_data = ms2s(get_data(log, 'vehicle_angular_velocity', 'timestamp')) actuator_controls_n = 'actuator_controls_{}'.format(instance) u_data = get_data(log, actuator_controls_n, 'control[{}]'.format(axis)) t_u_data = ms2s(get_data(log, actuator_controls_n, 'timestamp')) (t_aligned, u_aligned, y_aligned) = extract_identification_data(log, t_u_data, u_data, t_y_data, y_data, axis) t_aligned -= t_aligned[0] return (t_aligned, u_aligned, y_aligned)
def convert_ulog2csv(ulog_file_name): """ Coverts and ULog file to a CSV file. :param ulog_file_name: The ULog filename to open and read :param messages: A list of message names :param output: Output file path :param delimiter: CSV delimiter :return: None """ msg_filter = None disable_str_exceptions=False delimiter = ',' ulog = ULog(ulog_file_name, msg_filter, disable_str_exceptions) data = ulog.data_list for d in data: # use same field order as in the log, except for the timestamp data_keys = [f.field_name for f in d.field_data] data_keys.remove('timestamp') # data_keys.insert(0, 'timestamp') # we want timestamp at first position # we don't use np.savetxt, because we have multiple arrays with # potentially different data types. However the following is quite # slow... # write the header # print(delimiter.join(data_keys) + '\n') # write the data # last_elem = len(data_keys)-1 # for i in range(len(d.data['timestamp'])): # for k in range(len(data_keys)): # csvfile.write(str(d.data[data_keys[k]][i])) # if k != last_elem: # csvfile.write(delimiter) # csvfile.write('\n') print("Key =", data_keys) for g in data_keys: keylist.append(g) # print("Data =", d.data) print("Keylist =", keylist) print("One item", keylist[10])
def getAllData(logfile): log = ULog(logfile) v_local = np.matrix([ getData(log, 'vehicle_local_position', 'vx'), getData(log, 'vehicle_local_position', 'vy'), getData(log, 'vehicle_local_position', 'vz') ]) t_local = ms2s(getData(log, 'vehicle_local_position', 'timestamp')) dist_bottom = getData(log, 'vehicle_local_position', 'dist_bottom') baro = getData(log, 'vehicle_air_data', 'baro_alt_meter') t_baro = ms2s(getData(log, 'vehicle_air_data', 'timestamp')) baro_bias = getData(log, 'estimator_baro_bias', 'bias') t_baro_bias = ms2s(getData(log, 'estimator_baro_bias', 'timestamp')) q = np.matrix([ getData(log, 'vehicle_attitude', 'q[0]'), getData(log, 'vehicle_attitude', 'q[1]'), getData(log, 'vehicle_attitude', 'q[2]'), getData(log, 'vehicle_attitude', 'q[3]') ]) t_q = ms2s(getData(log, 'vehicle_attitude', 'timestamp')) gnss_h = getData(log, 'vehicle_gps_position', 'alt') * 1e-3 t_gnss = ms2s(getData(log, 'vehicle_gps_position', 'timestamp')) (t_aligned, v_body_aligned, baro_aligned, v_local_z_aligned, gnss_h_aligned, baro_bias_aligned) = alignData(t_local, v_local, dist_bottom, t_q, q, baro, t_baro, t_gnss, gnss_h, t_baro_bias, baro_bias) t_aligned -= t_aligned[0] return (t_aligned, v_body_aligned, baro_aligned, v_local_z_aligned, gnss_h_aligned, baro_bias_aligned)
def process_logdata_ekf(filename: str, plot: bool = False) -> List[dict]: """ main function for processing the logdata for ekf analysis. :param filename: :param plot: :return: """ try: ulog = ULog(filename) except Exception as e: raise PreconditionError('could not open {:s}'.format(filename)) from e test_results = analyse_logdata_ekf(ulog) with open('{:s}.json'.format(os.path.splitext(filename)[0]), 'w') as file: json.dump(test_results, file, indent=2) if plot: create_pdf_report(ulog, '{:s}.pdf'.format(filename)) print('Plots saved to {:s}.pdf'.format(filename)) return test_results
list_file = glob.glob('sample_log/runpendulum/*.ulg') tot_file = len(list_file) print(" file found (" + str(tot_file) + ") :") for name in list_file: print(name) #list_file = ['sample_log/log_0_2019-9-14-21-54-24.ulg','sample_log/log_14_2019-9-24-23-31-14.ulg'] dfg = pd.DataFrame() i = 0 for file_log in list_file: i += 1 print("\nReading Ulog file [" + str(i) + "/" + str(tot_file) + "] : " + file_log) ulog = ULog(file_log, None, False) dfi = info_to_df(ulog, False, file_log) #print(dfi) print("append dfi to dfg") dfg = pd.concat([dfg, dfi]) print("Writing : df_ulog.pkl ") dfg.to_pickle(name_df_input) else: print("Loading : df_ulog.pkl ") dfg = pd.read_pickle(name_df_input) print(dfg)
def run(log_name, showplots): log = ULog(log_name) # Select msgs and copy into arrays thrust = -get_data(log, 'vehicle_local_position_setpoint', 'thrust[2]') az = -get_data(log, 'vehicle_local_position', 'az') dist_bottom = get_data(log, 'vehicle_local_position', 'dist_bottom') t = ms2s_list(get_data(log, 'vehicle_local_position_setpoint', 'timestamp')) t_ekf = ms2s_list(get_data(log, 'vehicle_local_position', 'timestamp')) # Downsample ekf estimate to setpoint sample rate accel = array(interp(t, t_ekf, az)) # Estimator initial conditions hover_thrust_0 = 0.5 hover_thrust_noise_0 = 0.1 P0 = hover_thrust_noise_0**2 hover_thrust_process_noise = sqrt(0.25e-6) # hover thrust change / s Q = hover_thrust_process_noise**2 Qk = Q accel_noise_0 = sqrt(5.0) R = accel_noise_0**2 # Rk = R hover_ekf = HoverThrEstimator(hover_thrust_0) hover_ekf.setStateVar(P0) hover_ekf.setProcessVar(Qk) hover_ekf.setMeasVar(R) # Initialize arrays n = len(t) accel_true = zeros(n) hover_thrust = zeros(n) hover_thrust_std = zeros(n) hover_thrust_true = zeros(n) accel_noise_std = zeros(n) innov = zeros(n) innov_std = zeros(n) innov_test_ratio = zeros(n) innov_test_ratio_signed_lpf = zeros(n) residual_lpf = zeros(n) for k in range(1, n - 100): # Save data hover_thrust[k] = hover_ekf._hover_thr hover_thrust_std[k] = sqrt(hover_ekf._P) dt = t[k] - t[k - 1] if dist_bottom[k] > 1.0: # Update the EKF hover_ekf.predict(dt) (innov[k], innov_var, innov_test_ratio[k]) = hover_ekf.fuseAccZ(accel[k], thrust[k]) innov_std[k] = sqrt(innov_var) accel_noise_std[k] = sqrt(hover_ekf._R) innov_test_ratio_signed_lpf[ k] = hover_ekf._innov_test_ratio_signed_lpf residual_lpf[k] = hover_ekf._residual_lpf if showplots: head_tail = os.path.split(log_name) plotData(t, thrust, accel, accel_noise_std, hover_thrust, hover_thrust_std, innov_test_ratio, innov_test_ratio_signed_lpf, innov, innov_std, residual_lpf, head_tail[1])
def get_flight_report(ulog_file): print(ulog_file) try: ulog = ULog( ulog_file, ['vehicle_status', 'battery_status', 'vehicle_gps_position']) vehicle_status = ulog.get_dataset('vehicle_status') battery_status = ulog.get_dataset('battery_status') utc_avail = False # Processing arming data. armed_data = vehicle_status.data[ 'arming_state'] # 1 is standby, 2 is armed. armed_tstamp = vehicle_status.data['timestamp'] # in us. was_armed = np.any(armed_data == 2) if not was_armed: del ulog return False try: gps_position = ulog.get_dataset('vehicle_gps_position') start_time = time.localtime(gps_position.data['time_utc_usec'][0] / 1E6) # in us since epoch # boot_time = time.localtime(gps_position.data['timestamp'][0]/1E6) # in us since FC boot utc_avail = True except: utc_avail = False batt_v_start = battery_status.data['voltage_filtered_v'][0] batt_v_end = battery_status.data['voltage_filtered_v'][-1] # batt_v_tsamp = battery_status.data['timestamp'] # in us. arm_disarm_idx = np.where( armed_data[:-1] != armed_data[1:] ) # Find where arming state changes by comparing to neighbor. arm_disarm_events = armed_data[arm_disarm_idx] arm_disarm_events_tstamp = armed_tstamp[arm_disarm_idx] arm_disarm_durations = ( arm_disarm_events_tstamp[1::2] - arm_disarm_events_tstamp[0::2] ) / 1E6 # Assume every second event is arming. Can't be armed on boot! arm_disarm_total_time = arm_disarm_durations.sum() arm_min, arm_sec = divmod(arm_disarm_total_time, 60) arm_min = int(arm_min) # because it's an integer. if utc_avail: print('System powered on {0}'.format(time.asctime(start_time))) else: print('No GPS. No UTC time') print('Log file name: {0}'.format(ulog_file)) print('Arm duration: {0} min {1} sec'.format(str(arm_min), str(arm_sec))) print('Start voltage: {0}V, End voltage: {1}V'.format( str(batt_v_start), str(batt_v_end))) print('\n\n') del ulog return True except IndexError: # print('Empty or invalid file') return False except Exception as e: # print(str(e)) return False
def ulog2mat(ulg_file_name, mat_file_name=""): """ Converts ULog file to a MATLAB mat file. :param ulg_file_name: The ULog filename to open and read :param mat_file_name: The MAT filename to be saved """ #################################################### # check file path #################################################### if not os.path.exists(ulg_file_name): return if mat_file_name == "": mat_split_path, ulg_split_name = os.path.split(ulg_file_name) mat_file_path = mat_split_path mat_file_name = ulg_file_name[:-4] + ".mat" elif not mat_file_name.endswith(".mat"): ulg_split_path, ulg_split_name = os.path.split(ulg_file_name) mat_file_path = ulg_file_name mat_file_name = os.path.join(mat_file_path, ulg_split_name[:-4] + ".mat") else: mat_split_path, ulg_split_name = os.path.split(ulg_file_name) mat_file_path = mat_split_path # if mat_file_path not exist then make it if not os.path.isdir(mat_file_path): os.mkdir(mat_file_path) #################################################### # open ulog file #################################################### ulog = ULog(ulg_file_name, None) mat_file_dict = {} #################################################### # get ulog topic #################################################### for topic in ulog.data_list: topic_name = topic.name + "_" + str(topic.multi_id) topic_data = {} for field in topic.field_data: key = (field.field_name).replace("[", "_").replace("]", "") val = topic.data[field.field_name] topic_data[key] = val mat_file_dict[topic_name] = topic_data #################################################### # get ulog parameter #################################################### for name, init_value in ulog.initial_parameters.items(): mat_file_dict[name] = init_value chg_list = [] for chg_time, chg_name, chg_value in ulog.changed_parameters: if chg_name == name: chg_list.append([chg_time, chg_value]) if len(chg_list) > 0: mat_file_dict[key + "_CHG"] = np.array(chg_list) #################################################### # get ulog information #################################################### m1, s1 = divmod(int(ulog.start_timestamp / 1e6), 60) h1, m1 = divmod(m1, 60) m2, s2 = divmod(int((ulog.last_timestamp - ulog.start_timestamp) / 1e6), 60) h2, m2 = divmod(m2, 60) info_data = {} # time info_data["start_time"] = "{:d}:{:02d}:{:02d}".format(h1, m1, s1) info_data["duration_time"] = "{:d}:{:02d}:{:02d}".format(h2, m2, s2) # dropout dropout_durations = [dropout.duration for dropout in ulog.dropouts] if len(dropout_durations) == 0: info_data["dropout"] = "No Dropouts" else: info_data[ "dropout"] = "Dropouts: count: {:}, total duration: {:.1f} s, max: {:} ms, mean: {:} ms".format( len(dropout_durations), sum(dropout_durations) / 1000., max(dropout_durations), int(sum(dropout_durations) / len(dropout_durations))) # fw version info_data["ver_fw"] = "{}".format(ulog.get_version_info_str()) # msg_info_dict for key, value in ulog.msg_info_dict.items(): if not key.startswith('perf_'): info_data[key] = "{}".format(value) # msg_info_multiple_dict for key, value in ulog.msg_info_multiple_dict.items(): info_data[key] = value # mat_file_dict["information"] = info_data #################################################### # get ulog message #################################################### msg_list = [] msg_type = np.dtype({ "names": ["time", "level", "message"], "formats": ["S32", "S16", "S128"] }) for msg in ulog.logged_messages: m1, s1 = divmod(int(msg.timestamp / 1e6), 60) h1, m1 = divmod(m1, 60) msg_data = ("{:d}:{:02d}:{:02d}".format(h1, m1, s1), msg.log_level_str(), msg.message) msg_list.append(msg_data) mat_file_dict["message"] = np.array(msg_list, dtype=msg_type) #################################################### # save to mat file #################################################### scipyio.savemat(mat_file_name, mat_file_dict)
def run(log_name, showplots): log = ULog(log_name) # Select msgs and copy into arrays thrust = -get_data(log, 'vehicle_local_position_setpoint', 'thrust[2]') az = -get_data(log, 'vehicle_local_position', 'az') vx = get_data(log, 'vehicle_local_position', 'vx') vy = get_data(log, 'vehicle_local_position', 'vy') vz = -get_data(log, 'vehicle_local_position', 'vz') dist_bottom = get_data(log, 'vehicle_local_position', 'dist_bottom') t = ms2s_list(get_data(log, 'vehicle_local_position_setpoint', 'timestamp')) t_ekf = ms2s_list(get_data(log, 'vehicle_local_position', 'timestamp')) # Downsample ekf estimate to setpoint sample rate accel = array(interp(t, t_ekf, az)) vel_x = array(interp(t, t_ekf, vx)) vel_y = array(interp(t, t_ekf, vy)) vel_z = array(interp(t, t_ekf, vz)) # Estimator initial conditions hover_thrust_0 = 0.5 hover_thrust_noise_0 = 0.1 P0 = hover_thrust_noise_0**2 hover_thrust_process_noise = 0.0036 # hover thrust change / s Q = hover_thrust_process_noise**2 accel_noise_0 = sqrt(5.0) R = accel_noise_0**2 # Rk = R # Speed sensitivity reduction vz_thr = 2.0 vxy_thr = 10.0 hover_ekf = HoverThrEstimator(hover_thrust_0) hover_ekf.setStateVar(P0) hover_ekf.setProcessVar(Q) hover_ekf.setMeasVar(R) # Initialize arrays n = len(t) accel_true = zeros(n) hover_thrust = zeros(n) hover_thrust_std = zeros(n) hover_thrust_true = zeros(n) accel_noise_std = zeros(n) innov = zeros(n) innov_std = zeros(n) innov_test_ratio = zeros(n) innov_test_ratio_signed_lpf = zeros(n) residual_lpf = zeros(n) for k in range(1, n): meas_noise_coeff_z = max((abs(vel_z[k]) - vz_thr) + 1.0, 1.0) meas_noise_coeff_xy = max( (sqrt(vel_x[k]**2 + vel_y[k]**2) - vxy_thr) + 1.0, 1.0) hover_ekf.setMeasVarCoeff( max(meas_noise_coeff_xy**2, meas_noise_coeff_z**2)) # Save data hover_thrust[k] = hover_ekf._hover_thr hover_thrust_std[k] = sqrt(hover_ekf._P) dt = t[k] - t[k - 1] if dist_bottom[k] > 1.0: # Update the EKF hover_ekf.predict(dt) (innov[k], innov_var, innov_test_ratio[k]) = hover_ekf.fuseAccZ(accel[k], thrust[k]) innov_std[k] = sqrt(innov_var) accel_noise_std[k] = sqrt(hover_ekf._R) innov_test_ratio_signed_lpf[ k] = hover_ekf._innov_test_ratio_signed_lpf residual_lpf[k] = hover_ekf._residual_lpf if showplots: head_tail = os.path.split(log_name) plotData(t, thrust, accel, accel_noise_std, hover_thrust, hover_thrust_std, innov_test_ratio, innov_test_ratio_signed_lpf, innov, innov_std, residual_lpf, head_tail[1])
from mpl_toolkits.mplot3d import Axes3D from pyulog import ULog from pyulog.px4 import PX4ULog from pylab import * import numpy as np import textwrap as tw #arguments arguments = len(sys.argv) - 1 if (arguments < 1): print "Give logfile name as argument" else: log_name = sys.argv[1] #Load the log data (produced by pyulog) log = ULog(log_name) pxlog = PX4ULog(log) def get_data(topic_name, variable_name, index): try: dataset = log.get_dataset(topic_name, index) return dataset.data[variable_name] except: return [] def ms2s_list(time_ms_list): if (len(time_ms_list) > 0): return 1e-6 * time_ms_list else:
def main() -> None: """ main entry point :return: """ args = get_arguments() table = pd.DataFrame() try: ulog = ULog(args.input_file) except: print("Could not find ulog file") exit(-1) try: imu = util.getImuData(ulog) print("IMU data detected") table = pd.concat([table, imu], ignore_index=True, sort=False) except: print("IMU data not detected") try: mag = util.getMagnetometerData(ulog) print("Mag data detected") table = pd.concat([table, mag], ignore_index=True, sort=False) except: print("Mag data not detected") try: baro = util.getBarometerData(ulog) print("Baro data detected") table = pd.concat([table, baro], ignore_index=True, sort=False) except: print("Baro data not detected") try: gps = util.getGpsData(ulog) print("GPS data detected") table = pd.concat([table, gps], ignore_index=True, sort=False) except: print("GPS data not detected") try: airspeed = util.getAirspeedData(ulog) print("Airspeed data detected") table = pd.concat([table, airspeed], ignore_index=True, sort=False) except: print("Airspeed data not detected") try: flow = util.getOpticalFlowData(ulog) print("Flow data detected") table = pd.concat([table, flow], ignore_index=True, sort=False) except: print("Flow data not detected") try: range = util.getRangeFinderData(ulog) print("Range data detected") table = pd.concat([table, range], ignore_index=True, sort=False) except: print("Range data not detected") try: vio = util.getVioData(ulog) print("VIO data detected") table = pd.concat([table, vio], ignore_index=True, sort=False) except: print("VIO data not detected") try: land = util.getVehicleLandingStatus(ulog) print("Landing data detected") table = pd.concat([table, land], ignore_index=True, sort=False) except: print("Landing data not detected") table = table.sort_values('timestamp', axis=0, ascending=True) table['timestamp'] = table['timestamp'] - table['timestamp'].iloc[0] # remove the first 0.5 seconds of data to be robust against faulty initialized data table = table[table.timestamp > 500000] table.timestamp = table.timestamp - 500000 try: table.to_csv(args.output_file, index=None, header=None) # post processing remove empty cells from csv result = [] with open(args.output_file, "r") as in_file: reader = csv.reader(in_file) result = [[item for item in row if item != ''] for row in reader] with open(args.output_file, "w") as out_file: csv_writer = csv.writer(out_file) csv_writer.writerows(result) except: print("Could not write to specified output file")