class TestUsbl(unittest.TestCase): def setUp(self): self.usbl = Usbl() def test_Usbl(self): self.usbl.clear() self.assertFalse(self.usbl.valid())
def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta data sensor_string = "autosub" category = category output_format = ftype if category == Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath / ".." / filepath) file_list = get_file_list(path) data_list = [] for file in file_list: with file.open("r", errors="ignore") as nmea_file: for line in nmea_file.readlines(): parts = line.split("\t") if len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(" ")[0] hour_str = str(parts[1]).split(",")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, timezone_offset) epoch_timestamp = epoch_time + msec / 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data = usbl.export(output_format) data_list.append(data) return data_list
def interpolate_usbl(query_timestamp, data_1, data_2): temp_data = Usbl() temp_data.epoch_timestamp = query_timestamp temp_data.northings = interpolate( query_timestamp, data_1.epoch_timestamp, data_2.epoch_timestamp, data_1.northings, data_2.northings, ) temp_data.eastings = interpolate( query_timestamp, data_1.epoch_timestamp, data_2.epoch_timestamp, data_1.eastings, data_2.eastings, ) temp_data.northings_std = interpolate( query_timestamp, data_1.epoch_timestamp, data_2.epoch_timestamp, data_1.northings_std, data_2.northings_std, ) temp_data.eastings_std = interpolate( query_timestamp, data_1.epoch_timestamp, data_2.epoch_timestamp, data_1.eastings_std, data_2.eastings_std, ) temp_data.depth = interpolate( query_timestamp, data_1.epoch_timestamp, data_2.epoch_timestamp, data_1.depth, data_2.depth, ) return temp_data
def oplab_to_acfr(args): if not args.dive_folder: Console.error("No dive folder provided. Exiting...") Console.quit("Missing comandline arguments") if not args.output_folder: Console.error("No output folder provided. Exiting...") Console.quit("Missing comandline arguments") output_folder = get_processed_folder(args.output_folder) vf = output_folder / "vehicle_pose_est.data" sf = output_folder / "stereo_pose_est.data" if (vf.exists() or sf.exists()) and not args.force: Console.error( "Output folder already exists. Use -F to overwrite. Exiting...") Console.quit("Default behaviour: not to overwrite results") Console.info("Loading mission.yaml") path_processed = get_processed_folder(args.dive_folder) mission_file = path_processed / "mission.yaml" mission = Mission(mission_file) vehicle_file = path_processed / "vehicle.yaml" vehicle = Vehicle(vehicle_file) origin_lat = mission.origin.latitude origin_lon = mission.origin.longitude json_list = list(path_processed.glob("json_*")) if len(json_list) == 0: Console.quit( "No navigation solution could be found. Please run ", "auv_nav parse and process first", ) navigation_path = path_processed / json_list[0] # Try if ekf exists: full_navigation_path = navigation_path / "csv" / "ekf" vpw = AcfrVehiclePoseWriter(vf, origin_lat, origin_lon) vehicle_navigation_file = full_navigation_path / "auv_ekf_centre.csv" dataframe = pd.read_csv(vehicle_navigation_file) dr_list = [] for _, row in dataframe.iterrows(): sb = SyncedOrientationBodyVelocity() sb.from_df(row) dr_list.append(sb) vpw.write(dr_list) Console.info("Vehicle pose written to", vf) if not mission.image.cameras: Console.warn("No cameras found in the mission file.") return if len(mission.image.cameras) == 1: Console.error("Only one camera found in the mission file. Exiting...") Console.quit("ACFR stereo pose est requires two cameras.") camera0_name = mission.image.cameras[0].name camera1_name = mission.image.cameras[1].name camera0_nav_file = full_navigation_path / ("auv_ekf_" + camera0_name + ".csv") dataframe = pd.read_csv(camera0_nav_file) cam1_list = [] for _, row in dataframe.iterrows(): sb = Camera() sb.from_df(row) cam1_list.append(sb) camera1_nav_file = full_navigation_path / ("auv_ekf_" + camera1_name + ".csv") dataframe = pd.read_csv(camera1_nav_file) cam2_list = [] for _, row in dataframe.iterrows(): sb = Camera() sb.from_df(row) cam2_list.append(sb) spw = AcfrStereoPoseWriter(sf, origin_lat, origin_lon) spw.write(cam1_list, cam2_list) Console.info("Stereo pose written to", sf) Console.info("Generating combined.RAW") nav_standard_file = path_processed / "nav" / "nav_standard.json" nav_standard_file = get_processed_folder(nav_standard_file) Console.info("Loading json file {}".format(nav_standard_file)) with nav_standard_file.open("r") as nav_standard: parsed_json_data = json.load(nav_standard) start_datetime = "" finish_datetime = "" # setup start and finish date time if start_datetime == "": epoch_start_time = epoch_from_json(parsed_json_data[1]) start_datetime = epoch_to_datetime(epoch_start_time) else: epoch_start_time = string_to_epoch(start_datetime) if finish_datetime == "": epoch_finish_time = epoch_from_json(parsed_json_data[-1]) finish_datetime = epoch_to_datetime(epoch_finish_time) else: epoch_finish_time = string_to_epoch(finish_datetime) sensors_std = { "usbl": { "model": "sensor" }, "dvl": { "model": "sensor" }, "depth": { "model": "sensor" }, "orientation": { "model": "sensor" }, } exporter = AcfrCombinedRawWriter(mission, vehicle, output_folder) # read in data from json file # i here is the number of the data packet for i in range(len(parsed_json_data)): Console.progress(i, len(parsed_json_data)) epoch_timestamp = parsed_json_data[i]["epoch_timestamp"] if epoch_timestamp >= epoch_start_time and epoch_timestamp <= epoch_finish_time: if "velocity" in parsed_json_data[i]["category"]: if "body" in parsed_json_data[i]["frame"]: # to check for corrupted data point which have inertial # frame data values if "epoch_timestamp_dvl" in parsed_json_data[i]: # confirm time stamps of dvl are aligned with main # clock (within a second) if (abs(parsed_json_data[i]["epoch_timestamp"] - parsed_json_data[i]["epoch_timestamp_dvl"]) ) < 1.0: velocity_body = BodyVelocity() velocity_body.from_json(parsed_json_data[i], sensors_std["dvl"]) exporter.add(velocity_body) if "inertial" in parsed_json_data[i]["frame"]: velocity_inertial = InertialVelocity() velocity_inertial.from_json(parsed_json_data[i]) exporter.add(velocity_inertial) if "orientation" in parsed_json_data[i]["category"]: orientation = Orientation() orientation.from_json(parsed_json_data[i], sensors_std["orientation"]) exporter.add(orientation) if "depth" in parsed_json_data[i]["category"]: depth = Depth() depth.from_json(parsed_json_data[i], sensors_std["depth"]) exporter.add(depth) if "altitude" in parsed_json_data[i]["category"]: altitude = Altitude() altitude.from_json(parsed_json_data[i]) exporter.add(altitude) if "usbl" in parsed_json_data[i]["category"]: usbl = Usbl() usbl.from_json(parsed_json_data[i], sensors_std["usbl"]) exporter.add(usbl) if "image" in parsed_json_data[i]["category"]: camera1 = Camera() # LC camera1.from_json(parsed_json_data[i], "camera1") exporter.add(camera1) camera2 = Camera() camera2.from_json(parsed_json_data[i], "camera2") exporter.add(camera2)
def setUp(self): self.usbl = Usbl()
def parse_eiva_navipac(mission, vehicle, category, output_format, outpath): # Get your data from a file using mission paths, for example # Get your data from a file using mission paths, for example filepath = None category_str = None if category == Category.ORIENTATION: category_str = "orientation" filepath = mission.orientation.filepath elif category == Category.DEPTH: category_str = "depth" filepath = mission.depth.filepath elif category == Category.USBL: category_str = "usbl" filepath = mission.usbl.filepath log_file_path = get_raw_folder(outpath / ".." / filepath) depth_std_factor = mission.depth.std_factor orientation_std_offset = mission.orientation.std_offset heading_offset = vehicle.ins.yaw usbl_id = mission.usbl.label latitude_reference = float(mission.origin.latitude) longitude_reference = float(mission.origin.longitude) orientation = Orientation(heading_offset, orientation_std_offset) depth = Depth(depth_std_factor) usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) data_list = [] num_entries = 0 num_valid_entries = 0 # For each log file for log_file in log_file_path.glob("*_G.NPD"): # Open the log file with log_file.open("r", encoding="utf-8", errors="ignore") as filein: if category == Category.ORIENTATION: Console.info("... parsing orientation") # For each line in the file for line in filein.readlines(): if line.startswith("R132 4") and "$PRDID" in line: orientation.from_eiva_navipac(line) num_entries += 1 if orientation.roll is None: continue if orientation.valid(): num_valid_entries += 1 data = orientation.export(output_format) data_list.append(data) elif category == Category.DEPTH: Console.info("... parsing depth") for line in filein.readlines(): if line[0:14] == "D " + str(usbl_id) + " 19 1": depth.from_eiva_navipac(line) num_entries += 1 if depth.valid(): num_valid_entries += 1 data = depth.export(output_format) data_list.append(data) elif category == Category.USBL: Console.info("... parsing USBL") for line in filein.readlines(): if line.startswith("P D") and int(line[6:10]) == usbl_id: num_entries += 1 usbl.from_eiva_navipac(line) if usbl.valid(): num_valid_entries += 1 data = usbl.export(output_format) data_list.append(data) elif category == Category.ALTITUDE: Console.quit("EIVA Navipac parser has no ALTITUDE available") elif category == Category.VELOCITY: Console.quit("EIVA Navipac parser has no VELOCITY available") else: Console.quit("EIVA Navipac parser has no category", category, "available") Console.info( "Processed", num_entries, "entries , of which", num_valid_entries, "are valid for category", category_str, ) return data_list
def parse_rosbag(mission, vehicle, category, output_format, outpath): """Parse rosbag files Parameters ---------- mission : Mission Mission object vehicle : Vehicle Vehicle object category : str Measurement category output_format : str Output format outpath : str Output path Returns ------- list Measurement data list """ if not ROSBAG_IS_AVAILABLE: Console.error("rosbag is not available") Console.error("install it with:") Console.error( "pip install --extra-index-url", "https://rospypi.github.io/simple/ rosbag", ) Console.quit( "rosbag is not available and required to parse ROS bagfiles.") # Get your data from a file using mission paths, for example depth_std_factor = mission.depth.std_factor velocity_std_factor = mission.velocity.std_factor velocity_std_offset = mission.velocity.std_offset altitude_std_factor = mission.altitude.std_factor usbl_std_factor = mission.usbl.std_factor usbl_std_offset = mission.usbl.std_offset # NED origin origin_latitude = mission.origin.latitude origin_longitude = mission.origin.longitude ins_heading_offset = vehicle.ins.yaw dvl_heading_offset = vehicle.dvl.yaw body_velocity = BodyVelocity( velocity_std_factor, velocity_std_offset, dvl_heading_offset, ) orientation = Orientation(ins_heading_offset) depth = Depth(depth_std_factor) altitude = Altitude(altitude_std_factor) usbl = Usbl( usbl_std_factor, usbl_std_offset, latitude_reference=origin_latitude, longitude_reference=origin_longitude, ) camera = Camera() camera.sensor_string = mission.image.cameras[0].name body_velocity.sensor_string = "rosbag" orientation.sensor_string = "rosbag" depth.sensor_string = "rosbag" altitude.sensor_string = "rosbag" usbl.sensor_string = "rosbag" # Adjust timezone offsets body_velocity.tz_offset_s = ( read_timezone(mission.velocity.timezone) * 60 + mission.velocity.timeoffset) orientation.tz_offset_s = ( read_timezone(mission.orientation.timezone) * 60 + mission.orientation.timeoffset) depth.tz_offset_s = (read_timezone(mission.depth.timezone) * 60 + mission.depth.timeoffset) altitude.tz_offset_s = (read_timezone(mission.altitude.timezone) * 60 + mission.altitude.timeoffset) usbl.tz_offset_s = (read_timezone(mission.usbl.timezone) * 60 + mission.usbl.timeoffset) camera.tz_offset_s = (read_timezone(mission.image.timezone) * 60 + mission.image.timeoffset) data_list = [] bagfile = None wanted_topic = None data_object = None filepath = None if category == Category.ORIENTATION: Console.info("... parsing orientation") filepath = get_raw_folder(mission.orientation.filepath) bagfile = mission.orientation.filename wanted_topic = mission.orientation.topic data_object = orientation elif category == Category.VELOCITY: Console.info("... parsing velocity") filepath = get_raw_folder(mission.velocity.filepath) bagfile = mission.velocity.filename wanted_topic = mission.velocity.topic data_object = body_velocity elif category == Category.DEPTH: Console.info("... parsing depth") filepath = get_raw_folder(mission.depth.filepath) bagfile = mission.depth.filename wanted_topic = mission.depth.topic data_object = depth elif category == Category.ALTITUDE: Console.info("... parsing altitude") filepath = get_raw_folder(mission.altitude.filepath) bagfile = mission.altitude.filename wanted_topic = mission.altitude.topic data_object = altitude elif category == Category.USBL: Console.info("... parsing position") filepath = get_raw_folder(mission.usbl.filepath) bagfile = mission.usbl.filename wanted_topic = mission.usbl.topic data_object = usbl elif category == Category.IMAGES: Console.info("... parsing images") filepath = get_raw_folder(mission.image.cameras[0].path) bagfile = "*.bag" wanted_topic = mission.image.cameras[0].topic data_object = camera else: Console.quit("Unknown category for ROS parser", category) bagfile_list = list(filepath.glob(bagfile)) outpath = Path(outpath).parent data_list = rosbag_topic_worker(bagfile_list, wanted_topic, data_object, data_list, output_format, outpath) Console.info("... parsed " + str(len(data_list)) + " " + category + " entries") return data_list