class TestDepth(unittest.TestCase):
    def setUp(self):
        self.depth = Depth()

    def test_Depth(self):
        self.depth.clear()
        self.assertFalse(self.depth.valid())
    def __init__(self, mission, vehicle, category, ftype, outpath):
        # parser meta data
        self.class_string = "measurement"
        self.sensor_string = "phins"
        self.category = category

        self.outpath = outpath
        self.filename = mission.velocity.filename
        self.filepath = mission.velocity.filepath
        self.output_format = ftype
        self.headingoffset = vehicle.dvl.yaw

        # phins std models
        depth_std_factor = mission.depth.std_factor
        velocity_std_factor = mission.velocity.std_factor
        velocity_std_offset = mission.velocity.std_offset
        altitude_std_factor = mission.altitude.std_factor

        # read in date from filename
        yyyy = int(self.filename[0:4])
        mm = int(self.filename[4:6])
        dd = int(self.filename[6:8])
        date = yyyy, mm, dd

        self.timestamp = PhinsTimestamp(date, mission.velocity.timezone,
                                        mission.velocity.timeoffset)
        self.body_velocity = BodyVelocity(
            velocity_std_factor,
            velocity_std_offset,
            self.headingoffset,
            self.timestamp,
        )
        self.inertial_velocity = InertialVelocity()
        self.orientation = Orientation(self.headingoffset)
        self.depth = Depth(depth_std_factor, self.timestamp)
        self.altitude = Altitude(altitude_std_factor)
class PhinsParser:
    def __init__(self, mission, vehicle, category, ftype, outpath):
        # parser meta data
        self.class_string = "measurement"
        self.sensor_string = "phins"
        self.category = category

        self.outpath = outpath
        self.filename = mission.velocity.filename
        self.filepath = mission.velocity.filepath
        self.output_format = ftype
        self.headingoffset = vehicle.dvl.yaw

        # phins std models
        depth_std_factor = mission.depth.std_factor
        velocity_std_factor = mission.velocity.std_factor
        velocity_std_offset = mission.velocity.std_offset
        altitude_std_factor = mission.altitude.std_factor

        # read in date from filename
        yyyy = int(self.filename[0:4])
        mm = int(self.filename[4:6])
        dd = int(self.filename[6:8])
        date = yyyy, mm, dd

        self.timestamp = PhinsTimestamp(date, mission.velocity.timezone,
                                        mission.velocity.timeoffset)
        self.body_velocity = BodyVelocity(
            velocity_std_factor,
            velocity_std_offset,
            self.headingoffset,
            self.timestamp,
        )
        self.inertial_velocity = InertialVelocity()
        self.orientation = Orientation(self.headingoffset)
        self.depth = Depth(depth_std_factor, self.timestamp)
        self.altitude = Altitude(altitude_std_factor)

    def set_timestamp(self, epoch_timestamp):
        self.body_velocity.epoch_timestamp = epoch_timestamp
        self.inertial_velocity.epoch_timestamp = epoch_timestamp
        self.orientation.epoch_timestamp = epoch_timestamp
        self.depth.epoch_timestamp = epoch_timestamp
        self.altitude.epoch_timestamp = epoch_timestamp

    def line_is_valid(self, line, line_split):
        start_or_heading = (line[0] == PhinsHeaders.START
                            or line[0] == PhinsHeaders.HEADING)
        if len(line_split) == 2 and start_or_heading:
            # Get timestamp
            # Do a check sum as a lot of broken packets are found in phins data
            check_sum = str(line_split[1])

            # extract from $ to * as per phins manual
            string_to_check = ",".join(line)
            string_to_check = string_to_check[1:len(
                string_to_check)]  # noqa E203
            string_sum = 0

            for i in range(len(string_to_check)):
                string_sum ^= ord(string_to_check[i])

            if str(hex(string_sum)[2:].zfill(2).upper()) == check_sum.upper():
                return True

            else:
                Console.warn("Broken packet: " + str(line))
                Console.warn("Check sum calculated " +
                             str(hex(string_sum).zfill(2).upper()))
                Console.warn("Does not match that provided " +
                             str(check_sum.upper()))
                Console.warn("Ignore and move on")
        return False

    def parse(self):
        # parse phins data
        Console.info("... parsing phins standard data")

        data_list = []
        path = get_raw_folder(self.outpath / ".." / self.filepath /
                              self.filename)
        with path.open("r", encoding="utf-8", errors="ignore") as filein:
            for complete_line in filein.readlines():
                line_and_md5 = complete_line.strip().split("*")
                line = line_and_md5[0].strip().split(",")
                if not self.line_is_valid(line, line_and_md5):
                    continue
                header = line[1]
                data = self.process_line(header, line)
                if data is not None:
                    data_list.append(data)
            return data_list

    def build_rdi_acfr(self):
        data = ("RDI: " + str(float(self.body_velocity.epoch_timestamp_dvl)) +
                " alt:" + str(float(self.altitude.altitude)) +
                " r1:0 r2:0 r3:0 r4:0" + " h:" +
                str(float(self.orientation.yaw)) + " p:" +
                str(float(self.orientation.pitch)) + " r:" +
                str(float(self.orientation.roll)) + " vx:" +
                str(float(self.body_velocity.x_velocity)) + " vy:" +
                str(float(self.body_velocity.y_velocity)) + " vz:" +
                str(float(self.body_velocity.z_velocity)) +
                " nx:0 ny:0 nz:0 COG:0 SOG:0 bt_status:0 h_true:0 p_gimbal:0" +
                " sv: " + str(float(self.altitude.sound_velocity)) + "\n")
        self.body_velocity.clear()
        self.orientation.clear()
        self.altitude.clear()
        return data

    def process_line(self, header, line):
        data = None
        if header == PhinsHeaders.TIME:
            epoch_timestamp = self.timestamp.epoch_timestamp_from_phins(line)
            self.set_timestamp(epoch_timestamp)

        if self.category == Category.VELOCITY:
            if header == PhinsHeaders.DVL:
                self.body_velocity.from_phins(line)
                if self.output_format == "oplab":
                    data = self.body_velocity.export(self.output_format)

            if header == PhinsHeaders.VEL or header == PhinsHeaders.VEL_STD:
                self.inertial_velocity.from_phins(line)
                if self.output_format == "oplab":
                    data = self.inertial_velocity.export(self.output_format)

            if self.output_format == "acfr":
                self.orientation.from_phins(line)
                if header == PhinsHeaders.ALTITUDE:
                    self.altitude.from_phins(
                        line, self.body_velocity.epoch_timestamp_dvl)
                if (self.body_velocity.valid() and self.altitude.valid()
                        and self.orientation.valid()):
                    data = self.build_rdi_acfr()

        if self.category == Category.ORIENTATION:
            self.orientation.from_phins(line)
            data = self.orientation.export(self.output_format)

        if self.category == Category.DEPTH and header == PhinsHeaders.DEPTH:
            self.depth.from_phins(line)
            data = self.depth.export(self.output_format)

        if self.category == Category.ALTITUDE:
            if header == PhinsHeaders.ALTITUDE:
                self.altitude.from_phins(
                    line, self.body_velocity.epoch_timestamp_dvl)
                data = self.altitude.export(self.output_format)
            if header == PhinsHeaders.DVL:
                self.body_velocity.from_phins(line)
                data = self.altitude.export(self.output_format)
        return data
def parse_autosub(mission, vehicle, category, ftype, outpath):
    # parser meta data
    sensor_string = "autosub"
    category = category
    output_format = ftype
    filename = mission.velocity.filename
    filepath = mission.velocity.filepath

    # autosub std models
    depth_std_factor = mission.depth.std_factor
    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    orientation_std_offset = mission.orientation.std_offset
    altitude_std_factor = mission.altitude.std_factor
    headingoffset = vehicle.dvl.yaw

    body_velocity = BodyVelocity(velocity_std_factor, velocity_std_offset,
                                 headingoffset)
    orientation = Orientation(headingoffset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    altitude = Altitude(altitude_std_factor)

    body_velocity.sensor_string = sensor_string
    orientation.sensor_string = sensor_string
    depth.sensor_string = sensor_string
    altitude.sensor_string = sensor_string

    path = get_raw_folder(outpath / ".." / filepath / filename)

    autosub_log = loadmat(str(path))
    autosub_adcp = autosub_log["missionData"]["ADCPbin00"]
    autosub_ins = autosub_log["missionData"]["INSAttitude"]
    autosub_dep_ctl = autosub_log["missionData"]["DepCtlNode"]
    autosub_adcp_log1 = autosub_log["missionData"]["ADCPLog_1"]

    data_list = []
    if category == Category.VELOCITY:
        Console.info("... parsing autosub velocity")
        previous_timestamp = 0
        for i in range(len(autosub_adcp["eTime"])):
            body_velocity.from_autosub(autosub_adcp, i)
            data = body_velocity.export(output_format)
            if body_velocity.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = body_velocity.epoch_timestamp
    if category == Category.ORIENTATION:
        Console.info("... parsing autosub orientation")
        previous_timestamp = 0
        for i in range(len(autosub_ins["eTime"])):
            orientation.from_autosub(autosub_ins, i)
            data = orientation.export(output_format)
            if orientation.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = orientation.epoch_timestamp
    if category == Category.DEPTH:
        Console.info("... parsing autosub depth")
        previous_timestamp = 0
        for i in range(len(autosub_dep_ctl["eTime"])):
            depth.from_autosub(autosub_dep_ctl, i)
            data = depth.export(output_format)
            if depth.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = depth.epoch_timestamp
    if category == Category.ALTITUDE:
        Console.info("... parsing autosub altitude")
        previous_timestamp = 0
        for i in range(len(autosub_adcp_log1["eTime"])):
            altitude.from_autosub(autosub_adcp_log1, i)
            data = altitude.export(output_format)
            if altitude.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = altitude.epoch_timestamp
    return data_list
示例#5
0
def oplab_to_acfr(args):
    if not args.dive_folder:
        Console.error("No dive folder provided. Exiting...")
        Console.quit("Missing comandline arguments")
    if not args.output_folder:
        Console.error("No output folder provided. Exiting...")
        Console.quit("Missing comandline arguments")

    output_folder = get_processed_folder(args.output_folder)
    vf = output_folder / "vehicle_pose_est.data"
    sf = output_folder / "stereo_pose_est.data"
    if (vf.exists() or sf.exists()) and not args.force:
        Console.error(
            "Output folder already exists. Use -F to overwrite. Exiting...")
        Console.quit("Default behaviour: not to overwrite results")

    Console.info("Loading mission.yaml")
    path_processed = get_processed_folder(args.dive_folder)
    mission_file = path_processed / "mission.yaml"
    mission = Mission(mission_file)

    vehicle_file = path_processed / "vehicle.yaml"
    vehicle = Vehicle(vehicle_file)

    origin_lat = mission.origin.latitude
    origin_lon = mission.origin.longitude

    json_list = list(path_processed.glob("json_*"))
    if len(json_list) == 0:
        Console.quit(
            "No navigation solution could be found. Please run ",
            "auv_nav parse and process first",
        )
    navigation_path = path_processed / json_list[0]
    # Try if ekf exists:
    full_navigation_path = navigation_path / "csv" / "ekf"

    vpw = AcfrVehiclePoseWriter(vf, origin_lat, origin_lon)
    vehicle_navigation_file = full_navigation_path / "auv_ekf_centre.csv"
    dataframe = pd.read_csv(vehicle_navigation_file)
    dr_list = []
    for _, row in dataframe.iterrows():
        sb = SyncedOrientationBodyVelocity()
        sb.from_df(row)
        dr_list.append(sb)
    vpw.write(dr_list)
    Console.info("Vehicle pose written to", vf)

    if not mission.image.cameras:
        Console.warn("No cameras found in the mission file.")
        return
    if len(mission.image.cameras) == 1:
        Console.error("Only one camera found in the mission file. Exiting...")
        Console.quit("ACFR stereo pose est requires two cameras.")

    camera0_name = mission.image.cameras[0].name
    camera1_name = mission.image.cameras[1].name

    camera0_nav_file = full_navigation_path / ("auv_ekf_" + camera0_name +
                                               ".csv")
    dataframe = pd.read_csv(camera0_nav_file)
    cam1_list = []
    for _, row in dataframe.iterrows():
        sb = Camera()
        sb.from_df(row)
        cam1_list.append(sb)

    camera1_nav_file = full_navigation_path / ("auv_ekf_" + camera1_name +
                                               ".csv")

    dataframe = pd.read_csv(camera1_nav_file)
    cam2_list = []
    for _, row in dataframe.iterrows():
        sb = Camera()
        sb.from_df(row)
        cam2_list.append(sb)

    spw = AcfrStereoPoseWriter(sf, origin_lat, origin_lon)

    spw.write(cam1_list, cam2_list)
    Console.info("Stereo pose written to", sf)

    Console.info("Generating combined.RAW")
    nav_standard_file = path_processed / "nav" / "nav_standard.json"
    nav_standard_file = get_processed_folder(nav_standard_file)
    Console.info("Loading json file {}".format(nav_standard_file))

    with nav_standard_file.open("r") as nav_standard:
        parsed_json_data = json.load(nav_standard)

    start_datetime = ""
    finish_datetime = ""

    # setup start and finish date time
    if start_datetime == "":
        epoch_start_time = epoch_from_json(parsed_json_data[1])
        start_datetime = epoch_to_datetime(epoch_start_time)
    else:
        epoch_start_time = string_to_epoch(start_datetime)
    if finish_datetime == "":
        epoch_finish_time = epoch_from_json(parsed_json_data[-1])
        finish_datetime = epoch_to_datetime(epoch_finish_time)
    else:
        epoch_finish_time = string_to_epoch(finish_datetime)

    sensors_std = {
        "usbl": {
            "model": "sensor"
        },
        "dvl": {
            "model": "sensor"
        },
        "depth": {
            "model": "sensor"
        },
        "orientation": {
            "model": "sensor"
        },
    }

    exporter = AcfrCombinedRawWriter(mission, vehicle, output_folder)

    # read in data from json file
    # i here is the number of the data packet
    for i in range(len(parsed_json_data)):
        Console.progress(i, len(parsed_json_data))
        epoch_timestamp = parsed_json_data[i]["epoch_timestamp"]
        if epoch_timestamp >= epoch_start_time and epoch_timestamp <= epoch_finish_time:
            if "velocity" in parsed_json_data[i]["category"]:
                if "body" in parsed_json_data[i]["frame"]:
                    # to check for corrupted data point which have inertial
                    # frame data values
                    if "epoch_timestamp_dvl" in parsed_json_data[i]:
                        # confirm time stamps of dvl are aligned with main
                        # clock (within a second)
                        if (abs(parsed_json_data[i]["epoch_timestamp"] -
                                parsed_json_data[i]["epoch_timestamp_dvl"])
                            ) < 1.0:
                            velocity_body = BodyVelocity()
                            velocity_body.from_json(parsed_json_data[i],
                                                    sensors_std["dvl"])
                            exporter.add(velocity_body)
                if "inertial" in parsed_json_data[i]["frame"]:
                    velocity_inertial = InertialVelocity()
                    velocity_inertial.from_json(parsed_json_data[i])
                    exporter.add(velocity_inertial)

            if "orientation" in parsed_json_data[i]["category"]:
                orientation = Orientation()
                orientation.from_json(parsed_json_data[i],
                                      sensors_std["orientation"])
                exporter.add(orientation)

            if "depth" in parsed_json_data[i]["category"]:
                depth = Depth()
                depth.from_json(parsed_json_data[i], sensors_std["depth"])
                exporter.add(depth)

            if "altitude" in parsed_json_data[i]["category"]:
                altitude = Altitude()
                altitude.from_json(parsed_json_data[i])
                exporter.add(altitude)

            if "usbl" in parsed_json_data[i]["category"]:
                usbl = Usbl()
                usbl.from_json(parsed_json_data[i], sensors_std["usbl"])
                exporter.add(usbl)

            if "image" in parsed_json_data[i]["category"]:
                camera1 = Camera()
                # LC
                camera1.from_json(parsed_json_data[i], "camera1")
                exporter.add(camera1)
                camera2 = Camera()
                camera2.from_json(parsed_json_data[i], "camera2")
                exporter.add(camera2)
 def setUp(self):
     self.depth = Depth()
def parse_alr(mission, vehicle, category, ftype, outpath):
    # parser meta data
    sensor_string = "alr"
    category = category
    output_format = ftype
    filename = mission.velocity.filename
    filepath = mission.velocity.filepath

    # ALR std models
    depth_std_factor = mission.depth.std_factor
    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    orientation_std_offset = mission.orientation.std_offset
    altitude_std_factor = mission.altitude.std_factor
    headingoffset = vehicle.dvl.yaw

    body_velocity = BodyVelocity(
        velocity_std_factor, velocity_std_offset, headingoffset
    )
    orientation = Orientation(headingoffset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    altitude = Altitude(altitude_std_factor)

    body_velocity.sensor_string = sensor_string
    orientation.sensor_string = sensor_string
    depth.sensor_string = sensor_string
    altitude.sensor_string = sensor_string

    path = get_raw_folder(outpath / ".." / filepath / filename)

    alr_log = loadmat(str(path))
    mission_data = alr_log["MissionData"]

    data_list = []
    if category == Category.VELOCITY:
        Console.info("Parsing alr velocity...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            if mission_data["DVL_down_BT_is_good"][i] == 1:
                vx = mission_data["DVL_down_BT_x"][i]
                vy = mission_data["DVL_down_BT_y"][i]
                vz = mission_data["DVL_down_BT_z"][i]
                # vx, vy, vz should not be NaN on lines with bottom lock,
                # but check to be on the safe side:
                if not isnan(vx) and not isnan(vy) and not isnan(vz):
                    t = mission_data["epoch_timestamp"][i]
                    body_velocity.from_alr(t, vx, vy, vz)
                    data = body_velocity.export(output_format)
                    if body_velocity.epoch_timestamp > previous_timestamp:
                        data_list.append(data)
                    else:
                        data_list[-1] = data
                    previous_timestamp = body_velocity.epoch_timestamp
        Console.info("...done parsing alr velocity")
    if category == Category.ORIENTATION:
        Console.info("Parsing alr orientation...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            roll = mission_data["AUV_roll"][i]
            pitch = mission_data["AUV_pitch"][i]
            yaw = mission_data["AUV_heading"][i]
            if not isnan(roll) and not isnan(pitch) and not isnan(yaw):
                t = mission_data["epoch_timestamp"][i]
                orientation.from_alr(t, roll, pitch, yaw)
                data = orientation.export(output_format)
                if orientation.epoch_timestamp > previous_timestamp:
                    data_list.append(data)
                else:
                    data_list[-1] = data
                previous_timestamp = orientation.epoch_timestamp
        Console.info("...done parsing alr orientation")
    if category == Category.DEPTH:
        Console.info("Parsing alr depth...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            d = mission_data["AUV_depth"][i]
            if not isnan(d):
                t = mission_data["epoch_timestamp"][i]
                depth.from_alr(t, d)
                data = depth.export(output_format)
                if depth.epoch_timestamp > previous_timestamp:
                    data_list.append(data)
                else:
                    data_list[-1] = data
                previous_timestamp = depth.epoch_timestamp
        Console.info("...done parsing alr depth")
    if category == Category.ALTITUDE:
        Console.info("Parsing alr altitude...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            if mission_data["DVL_down_BT_is_good"][i] == 1:
                a = mission_data["AUV_altitude"][i]
                # The altitude should not be NaN on lines with bottom lock,
                # but check to be on the safe side:
                if not isnan(a):
                    t = mission_data["epoch_timestamp"][i]
                    altitude.from_alr(t, a)
                    data = altitude.export(output_format)
                    if altitude.epoch_timestamp > previous_timestamp:
                        data_list.append(data)
                    else:
                        data_list[-1] = data
                    previous_timestamp = altitude.epoch_timestamp
        Console.info("...done parsing alr altitude")
    return data_list
示例#8
0
def parse_eiva_navipac(mission, vehicle, category, output_format, outpath):
    # Get your data from a file using mission paths, for example
    # Get your data from a file using mission paths, for example
    filepath = None
    category_str = None
    if category == Category.ORIENTATION:
        category_str = "orientation"
        filepath = mission.orientation.filepath
    elif category == Category.DEPTH:
        category_str = "depth"
        filepath = mission.depth.filepath
    elif category == Category.USBL:
        category_str = "usbl"
        filepath = mission.usbl.filepath

    log_file_path = get_raw_folder(outpath / ".." / filepath)

    depth_std_factor = mission.depth.std_factor
    orientation_std_offset = mission.orientation.std_offset
    heading_offset = vehicle.ins.yaw
    usbl_id = mission.usbl.label

    latitude_reference = float(mission.origin.latitude)
    longitude_reference = float(mission.origin.longitude)

    orientation = Orientation(heading_offset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    usbl = Usbl(
        mission.usbl.std_factor,
        mission.usbl.std_offset,
        latitude_reference,
        longitude_reference,
    )

    data_list = []

    num_entries = 0
    num_valid_entries = 0

    # For each log file
    for log_file in log_file_path.glob("*_G.NPD"):
        # Open the log file
        with log_file.open("r", encoding="utf-8", errors="ignore") as filein:

            if category == Category.ORIENTATION:
                Console.info("... parsing orientation")
                # For each line in the file
                for line in filein.readlines():
                    if line.startswith("R132  4") and "$PRDID" in line:
                        orientation.from_eiva_navipac(line)
                        num_entries += 1
                        if orientation.roll is None:
                            continue
                        if orientation.valid():
                            num_valid_entries += 1
                            data = orientation.export(output_format)
                            data_list.append(data)
            elif category == Category.DEPTH:
                Console.info("... parsing depth")
                for line in filein.readlines():
                    if line[0:14] == "D     " + str(usbl_id) + "  19  1":
                        depth.from_eiva_navipac(line)
                        num_entries += 1
                        if depth.valid():
                            num_valid_entries += 1
                            data = depth.export(output_format)
                            data_list.append(data)
            elif category == Category.USBL:
                Console.info("... parsing USBL")
                for line in filein.readlines():
                    if line.startswith("P  D") and int(line[6:10]) == usbl_id:
                        num_entries += 1
                        usbl.from_eiva_navipac(line)
                        if usbl.valid():
                            num_valid_entries += 1
                            data = usbl.export(output_format)
                            data_list.append(data)
            elif category == Category.ALTITUDE:
                Console.quit("EIVA Navipac parser has no ALTITUDE available")
            elif category == Category.VELOCITY:
                Console.quit("EIVA Navipac parser has no VELOCITY available")
            else:
                Console.quit("EIVA Navipac parser has no category", category,
                             "available")
    Console.info(
        "Processed",
        num_entries,
        "entries , of which",
        num_valid_entries,
        "are valid for category",
        category_str,
    )

    return data_list
def parse_rosbag(mission, vehicle, category, output_format, outpath):
    """Parse rosbag files

    Parameters
    ----------
    mission : Mission
        Mission object
    vehicle : Vehicle
        Vehicle object
    category : str
        Measurement category
    output_format : str
        Output format
    outpath : str
        Output path

    Returns
    -------
    list
        Measurement data list
    """
    if not ROSBAG_IS_AVAILABLE:
        Console.error("rosbag is not available")
        Console.error("install it with:")
        Console.error(
            "pip install --extra-index-url",
            "https://rospypi.github.io/simple/ rosbag",
        )
        Console.quit(
            "rosbag is not available and required to parse ROS bagfiles.")

    # Get your data from a file using mission paths, for example
    depth_std_factor = mission.depth.std_factor
    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    altitude_std_factor = mission.altitude.std_factor
    usbl_std_factor = mission.usbl.std_factor
    usbl_std_offset = mission.usbl.std_offset

    # NED origin
    origin_latitude = mission.origin.latitude
    origin_longitude = mission.origin.longitude

    ins_heading_offset = vehicle.ins.yaw
    dvl_heading_offset = vehicle.dvl.yaw

    body_velocity = BodyVelocity(
        velocity_std_factor,
        velocity_std_offset,
        dvl_heading_offset,
    )
    orientation = Orientation(ins_heading_offset)
    depth = Depth(depth_std_factor)
    altitude = Altitude(altitude_std_factor)
    usbl = Usbl(
        usbl_std_factor,
        usbl_std_offset,
        latitude_reference=origin_latitude,
        longitude_reference=origin_longitude,
    )
    camera = Camera()
    camera.sensor_string = mission.image.cameras[0].name

    body_velocity.sensor_string = "rosbag"
    orientation.sensor_string = "rosbag"
    depth.sensor_string = "rosbag"
    altitude.sensor_string = "rosbag"
    usbl.sensor_string = "rosbag"

    # Adjust timezone offsets
    body_velocity.tz_offset_s = (
        read_timezone(mission.velocity.timezone) * 60 +
        mission.velocity.timeoffset)
    orientation.tz_offset_s = (
        read_timezone(mission.orientation.timezone) * 60 +
        mission.orientation.timeoffset)
    depth.tz_offset_s = (read_timezone(mission.depth.timezone) * 60 +
                         mission.depth.timeoffset)
    altitude.tz_offset_s = (read_timezone(mission.altitude.timezone) * 60 +
                            mission.altitude.timeoffset)
    usbl.tz_offset_s = (read_timezone(mission.usbl.timezone) * 60 +
                        mission.usbl.timeoffset)
    camera.tz_offset_s = (read_timezone(mission.image.timezone) * 60 +
                          mission.image.timeoffset)

    data_list = []

    bagfile = None
    wanted_topic = None
    data_object = None
    filepath = None

    if category == Category.ORIENTATION:
        Console.info("... parsing orientation")
        filepath = get_raw_folder(mission.orientation.filepath)
        bagfile = mission.orientation.filename
        wanted_topic = mission.orientation.topic
        data_object = orientation
    elif category == Category.VELOCITY:
        Console.info("... parsing velocity")
        filepath = get_raw_folder(mission.velocity.filepath)
        bagfile = mission.velocity.filename
        wanted_topic = mission.velocity.topic
        data_object = body_velocity
    elif category == Category.DEPTH:
        Console.info("... parsing depth")
        filepath = get_raw_folder(mission.depth.filepath)
        bagfile = mission.depth.filename
        wanted_topic = mission.depth.topic
        data_object = depth
    elif category == Category.ALTITUDE:
        Console.info("... parsing altitude")
        filepath = get_raw_folder(mission.altitude.filepath)
        bagfile = mission.altitude.filename
        wanted_topic = mission.altitude.topic
        data_object = altitude
    elif category == Category.USBL:
        Console.info("... parsing position")
        filepath = get_raw_folder(mission.usbl.filepath)
        bagfile = mission.usbl.filename
        wanted_topic = mission.usbl.topic
        data_object = usbl
    elif category == Category.IMAGES:
        Console.info("... parsing images")
        filepath = get_raw_folder(mission.image.cameras[0].path)
        bagfile = "*.bag"
        wanted_topic = mission.image.cameras[0].topic
        data_object = camera
    else:
        Console.quit("Unknown category for ROS parser", category)

    bagfile_list = list(filepath.glob(bagfile))
    outpath = Path(outpath).parent
    data_list = rosbag_topic_worker(bagfile_list, wanted_topic, data_object,
                                    data_list, output_format, outpath)
    Console.info("... parsed " + str(len(data_list)) + " " + category +
                 " entries")
    return data_list