class PhinsParser:
    def __init__(self, mission, vehicle, category, ftype, outpath):
        # parser meta data
        self.class_string = "measurement"
        self.sensor_string = "phins"
        self.category = category

        self.outpath = outpath
        self.filename = mission.velocity.filename
        self.filepath = mission.velocity.filepath
        self.output_format = ftype
        self.headingoffset = vehicle.dvl.yaw

        # phins std models
        depth_std_factor = mission.depth.std_factor
        velocity_std_factor = mission.velocity.std_factor
        velocity_std_offset = mission.velocity.std_offset
        altitude_std_factor = mission.altitude.std_factor

        # read in date from filename
        yyyy = int(self.filename[0:4])
        mm = int(self.filename[4:6])
        dd = int(self.filename[6:8])
        date = yyyy, mm, dd

        self.timestamp = PhinsTimestamp(date, mission.velocity.timezone,
                                        mission.velocity.timeoffset)
        self.body_velocity = BodyVelocity(
            velocity_std_factor,
            velocity_std_offset,
            self.headingoffset,
            self.timestamp,
        )
        self.inertial_velocity = InertialVelocity()
        self.orientation = Orientation(self.headingoffset)
        self.depth = Depth(depth_std_factor, self.timestamp)
        self.altitude = Altitude(altitude_std_factor)

    def set_timestamp(self, epoch_timestamp):
        self.body_velocity.epoch_timestamp = epoch_timestamp
        self.inertial_velocity.epoch_timestamp = epoch_timestamp
        self.orientation.epoch_timestamp = epoch_timestamp
        self.depth.epoch_timestamp = epoch_timestamp
        self.altitude.epoch_timestamp = epoch_timestamp

    def line_is_valid(self, line, line_split):
        start_or_heading = (line[0] == PhinsHeaders.START
                            or line[0] == PhinsHeaders.HEADING)
        if len(line_split) == 2 and start_or_heading:
            # Get timestamp
            # Do a check sum as a lot of broken packets are found in phins data
            check_sum = str(line_split[1])

            # extract from $ to * as per phins manual
            string_to_check = ",".join(line)
            string_to_check = string_to_check[1:len(
                string_to_check)]  # noqa E203
            string_sum = 0

            for i in range(len(string_to_check)):
                string_sum ^= ord(string_to_check[i])

            if str(hex(string_sum)[2:].zfill(2).upper()) == check_sum.upper():
                return True

            else:
                Console.warn("Broken packet: " + str(line))
                Console.warn("Check sum calculated " +
                             str(hex(string_sum).zfill(2).upper()))
                Console.warn("Does not match that provided " +
                             str(check_sum.upper()))
                Console.warn("Ignore and move on")
        return False

    def parse(self):
        # parse phins data
        Console.info("... parsing phins standard data")

        data_list = []
        path = get_raw_folder(self.outpath / ".." / self.filepath /
                              self.filename)
        with path.open("r", encoding="utf-8", errors="ignore") as filein:
            for complete_line in filein.readlines():
                line_and_md5 = complete_line.strip().split("*")
                line = line_and_md5[0].strip().split(",")
                if not self.line_is_valid(line, line_and_md5):
                    continue
                header = line[1]
                data = self.process_line(header, line)
                if data is not None:
                    data_list.append(data)
            return data_list

    def build_rdi_acfr(self):
        data = ("RDI: " + str(float(self.body_velocity.epoch_timestamp_dvl)) +
                " alt:" + str(float(self.altitude.altitude)) +
                " r1:0 r2:0 r3:0 r4:0" + " h:" +
                str(float(self.orientation.yaw)) + " p:" +
                str(float(self.orientation.pitch)) + " r:" +
                str(float(self.orientation.roll)) + " vx:" +
                str(float(self.body_velocity.x_velocity)) + " vy:" +
                str(float(self.body_velocity.y_velocity)) + " vz:" +
                str(float(self.body_velocity.z_velocity)) +
                " nx:0 ny:0 nz:0 COG:0 SOG:0 bt_status:0 h_true:0 p_gimbal:0" +
                " sv: " + str(float(self.altitude.sound_velocity)) + "\n")
        self.body_velocity.clear()
        self.orientation.clear()
        self.altitude.clear()
        return data

    def process_line(self, header, line):
        data = None
        if header == PhinsHeaders.TIME:
            epoch_timestamp = self.timestamp.epoch_timestamp_from_phins(line)
            self.set_timestamp(epoch_timestamp)

        if self.category == Category.VELOCITY:
            if header == PhinsHeaders.DVL:
                self.body_velocity.from_phins(line)
                if self.output_format == "oplab":
                    data = self.body_velocity.export(self.output_format)

            if header == PhinsHeaders.VEL or header == PhinsHeaders.VEL_STD:
                self.inertial_velocity.from_phins(line)
                if self.output_format == "oplab":
                    data = self.inertial_velocity.export(self.output_format)

            if self.output_format == "acfr":
                self.orientation.from_phins(line)
                if header == PhinsHeaders.ALTITUDE:
                    self.altitude.from_phins(
                        line, self.body_velocity.epoch_timestamp_dvl)
                if (self.body_velocity.valid() and self.altitude.valid()
                        and self.orientation.valid()):
                    data = self.build_rdi_acfr()

        if self.category == Category.ORIENTATION:
            self.orientation.from_phins(line)
            data = self.orientation.export(self.output_format)

        if self.category == Category.DEPTH and header == PhinsHeaders.DEPTH:
            self.depth.from_phins(line)
            data = self.depth.export(self.output_format)

        if self.category == Category.ALTITUDE:
            if header == PhinsHeaders.ALTITUDE:
                self.altitude.from_phins(
                    line, self.body_velocity.epoch_timestamp_dvl)
                data = self.altitude.export(self.output_format)
            if header == PhinsHeaders.DVL:
                self.body_velocity.from_phins(line)
                data = self.altitude.export(self.output_format)
        return data
Пример #2
0
def parse_alr(mission, vehicle, category, ftype, outpath):
    # parser meta data
    sensor_string = "alr"
    category = category
    output_format = ftype
    filename = mission.velocity.filename
    filepath = mission.velocity.filepath

    # ALR std models
    depth_std_factor = mission.depth.std_factor
    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    orientation_std_offset = mission.orientation.std_offset
    altitude_std_factor = mission.altitude.std_factor
    headingoffset = vehicle.dvl.yaw

    body_velocity = BodyVelocity(
        velocity_std_factor, velocity_std_offset, headingoffset
    )
    orientation = Orientation(headingoffset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    altitude = Altitude(altitude_std_factor)

    body_velocity.sensor_string = sensor_string
    orientation.sensor_string = sensor_string
    depth.sensor_string = sensor_string
    altitude.sensor_string = sensor_string

    path = get_raw_folder(outpath / ".." / filepath / filename)

    alr_log = loadmat(str(path))
    mission_data = alr_log["MissionData"]

    data_list = []
    if category == Category.VELOCITY:
        Console.info("Parsing alr velocity...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            if mission_data["DVL_down_BT_is_good"][i] == 1:
                vx = mission_data["DVL_down_BT_x"][i]
                vy = mission_data["DVL_down_BT_y"][i]
                vz = mission_data["DVL_down_BT_z"][i]
                # vx, vy, vz should not be NaN on lines with bottom lock,
                # but check to be on the safe side:
                if not isnan(vx) and not isnan(vy) and not isnan(vz):
                    t = mission_data["epoch_timestamp"][i]
                    body_velocity.from_alr(t, vx, vy, vz)
                    data = body_velocity.export(output_format)
                    if body_velocity.epoch_timestamp > previous_timestamp:
                        data_list.append(data)
                    else:
                        data_list[-1] = data
                    previous_timestamp = body_velocity.epoch_timestamp
        Console.info("...done parsing alr velocity")
    if category == Category.ORIENTATION:
        Console.info("Parsing alr orientation...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            roll = mission_data["AUV_roll"][i]
            pitch = mission_data["AUV_pitch"][i]
            yaw = mission_data["AUV_heading"][i]
            if not isnan(roll) and not isnan(pitch) and not isnan(yaw):
                t = mission_data["epoch_timestamp"][i]
                orientation.from_alr(t, roll, pitch, yaw)
                data = orientation.export(output_format)
                if orientation.epoch_timestamp > previous_timestamp:
                    data_list.append(data)
                else:
                    data_list[-1] = data
                previous_timestamp = orientation.epoch_timestamp
        Console.info("...done parsing alr orientation")
    if category == Category.DEPTH:
        Console.info("Parsing alr depth...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            d = mission_data["AUV_depth"][i]
            if not isnan(d):
                t = mission_data["epoch_timestamp"][i]
                depth.from_alr(t, d)
                data = depth.export(output_format)
                if depth.epoch_timestamp > previous_timestamp:
                    data_list.append(data)
                else:
                    data_list[-1] = data
                previous_timestamp = depth.epoch_timestamp
        Console.info("...done parsing alr depth")
    if category == Category.ALTITUDE:
        Console.info("Parsing alr altitude...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            if mission_data["DVL_down_BT_is_good"][i] == 1:
                a = mission_data["AUV_altitude"][i]
                # The altitude should not be NaN on lines with bottom lock,
                # but check to be on the safe side:
                if not isnan(a):
                    t = mission_data["epoch_timestamp"][i]
                    altitude.from_alr(t, a)
                    data = altitude.export(output_format)
                    if altitude.epoch_timestamp > previous_timestamp:
                        data_list.append(data)
                    else:
                        data_list[-1] = data
                    previous_timestamp = altitude.epoch_timestamp
        Console.info("...done parsing alr altitude")
    return data_list
def parse_autosub(mission, vehicle, category, ftype, outpath):
    # parser meta data
    sensor_string = "autosub"
    category = category
    output_format = ftype
    filename = mission.velocity.filename
    filepath = mission.velocity.filepath

    # autosub std models
    depth_std_factor = mission.depth.std_factor
    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    orientation_std_offset = mission.orientation.std_offset
    altitude_std_factor = mission.altitude.std_factor
    headingoffset = vehicle.dvl.yaw

    body_velocity = BodyVelocity(velocity_std_factor, velocity_std_offset,
                                 headingoffset)
    orientation = Orientation(headingoffset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    altitude = Altitude(altitude_std_factor)

    body_velocity.sensor_string = sensor_string
    orientation.sensor_string = sensor_string
    depth.sensor_string = sensor_string
    altitude.sensor_string = sensor_string

    path = get_raw_folder(outpath / ".." / filepath / filename)

    autosub_log = loadmat(str(path))
    autosub_adcp = autosub_log["missionData"]["ADCPbin00"]
    autosub_ins = autosub_log["missionData"]["INSAttitude"]
    autosub_dep_ctl = autosub_log["missionData"]["DepCtlNode"]
    autosub_adcp_log1 = autosub_log["missionData"]["ADCPLog_1"]

    data_list = []
    if category == Category.VELOCITY:
        Console.info("... parsing autosub velocity")
        previous_timestamp = 0
        for i in range(len(autosub_adcp["eTime"])):
            body_velocity.from_autosub(autosub_adcp, i)
            data = body_velocity.export(output_format)
            if body_velocity.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = body_velocity.epoch_timestamp
    if category == Category.ORIENTATION:
        Console.info("... parsing autosub orientation")
        previous_timestamp = 0
        for i in range(len(autosub_ins["eTime"])):
            orientation.from_autosub(autosub_ins, i)
            data = orientation.export(output_format)
            if orientation.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = orientation.epoch_timestamp
    if category == Category.DEPTH:
        Console.info("... parsing autosub depth")
        previous_timestamp = 0
        for i in range(len(autosub_dep_ctl["eTime"])):
            depth.from_autosub(autosub_dep_ctl, i)
            data = depth.export(output_format)
            if depth.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = depth.epoch_timestamp
    if category == Category.ALTITUDE:
        Console.info("... parsing autosub altitude")
        previous_timestamp = 0
        for i in range(len(autosub_adcp_log1["eTime"])):
            altitude.from_autosub(autosub_adcp_log1, i)
            data = altitude.export(output_format)
            if altitude.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = altitude.epoch_timestamp
    return data_list
Пример #4
0
def parse_eiva_navipac(mission, vehicle, category, output_format, outpath):
    # Get your data from a file using mission paths, for example
    # Get your data from a file using mission paths, for example
    filepath = None
    category_str = None
    if category == Category.ORIENTATION:
        category_str = "orientation"
        filepath = mission.orientation.filepath
    elif category == Category.DEPTH:
        category_str = "depth"
        filepath = mission.depth.filepath
    elif category == Category.USBL:
        category_str = "usbl"
        filepath = mission.usbl.filepath

    log_file_path = get_raw_folder(outpath / ".." / filepath)

    depth_std_factor = mission.depth.std_factor
    orientation_std_offset = mission.orientation.std_offset
    heading_offset = vehicle.ins.yaw
    usbl_id = mission.usbl.label

    latitude_reference = float(mission.origin.latitude)
    longitude_reference = float(mission.origin.longitude)

    orientation = Orientation(heading_offset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    usbl = Usbl(
        mission.usbl.std_factor,
        mission.usbl.std_offset,
        latitude_reference,
        longitude_reference,
    )

    data_list = []

    num_entries = 0
    num_valid_entries = 0

    # For each log file
    for log_file in log_file_path.glob("*_G.NPD"):
        # Open the log file
        with log_file.open("r", encoding="utf-8", errors="ignore") as filein:

            if category == Category.ORIENTATION:
                Console.info("... parsing orientation")
                # For each line in the file
                for line in filein.readlines():
                    if line.startswith("R132  4") and "$PRDID" in line:
                        orientation.from_eiva_navipac(line)
                        num_entries += 1
                        if orientation.roll is None:
                            continue
                        if orientation.valid():
                            num_valid_entries += 1
                            data = orientation.export(output_format)
                            data_list.append(data)
            elif category == Category.DEPTH:
                Console.info("... parsing depth")
                for line in filein.readlines():
                    if line[0:14] == "D     " + str(usbl_id) + "  19  1":
                        depth.from_eiva_navipac(line)
                        num_entries += 1
                        if depth.valid():
                            num_valid_entries += 1
                            data = depth.export(output_format)
                            data_list.append(data)
            elif category == Category.USBL:
                Console.info("... parsing USBL")
                for line in filein.readlines():
                    if line.startswith("P  D") and int(line[6:10]) == usbl_id:
                        num_entries += 1
                        usbl.from_eiva_navipac(line)
                        if usbl.valid():
                            num_valid_entries += 1
                            data = usbl.export(output_format)
                            data_list.append(data)
            elif category == Category.ALTITUDE:
                Console.quit("EIVA Navipac parser has no ALTITUDE available")
            elif category == Category.VELOCITY:
                Console.quit("EIVA Navipac parser has no VELOCITY available")
            else:
                Console.quit("EIVA Navipac parser has no category", category,
                             "available")
    Console.info(
        "Processed",
        num_entries,
        "entries , of which",
        num_valid_entries,
        "are valid for category",
        category_str,
    )

    return data_list