Exemple #1
0
    def set_path(self, path):
        """Set the path for the corrector"""
        # The path is expected to be non-empty, so we no longer check for it
        self.path_raw = get_raw_folder(path)
        self.path_processed = get_processed_folder(path)
        self.path_config = get_config_folder(path)

        self.mission = Mission(self.path_raw / "mission.yaml")

        self.user_specified_image_list = None  # To be overwritten on parse/process
        self.user_specified_image_list_parse = None
        self.user_specified_image_list_process = None

        self.camera_name = self.camera.name
        # Find the camera topic corresponding to the name
        for camera in self.mission.image.cameras:
            if camera.name == self.camera_name and camera.topic is not None:
                self.camera.topic = camera.topic
                break
        self._type = self.camera.type
        # Load camera configuration
        image_properties = self.camera.image_properties
        self.image_height = image_properties[0]
        self.image_width = image_properties[1]
        self.image_channels = image_properties[2]
def parse_NOC_nmea(mission, vehicle, category, ftype, outpath):
    # parser meta data
    sensor_string = "autosub"
    category = category
    output_format = ftype

    if category == Category.USBL:
        filepath = mission.usbl.filepath
        timezone = mission.usbl.timezone
        beacon_id = mission.usbl.label
        timeoffset = mission.usbl.timeoffset
        timezone_offset = read_timezone(timezone)
        latitude_reference = mission.origin.latitude
        longitude_reference = mission.origin.longitude

        usbl = Usbl(
            mission.usbl.std_factor,
            mission.usbl.std_offset,
            latitude_reference,
            longitude_reference,
        )
        usbl.sensor_string = sensor_string

        path = get_raw_folder(outpath / ".." / filepath)

        file_list = get_file_list(path)

        data_list = []
        for file in file_list:
            with file.open("r", errors="ignore") as nmea_file:
                for line in nmea_file.readlines():
                    parts = line.split("\t")
                    if len(parts) < 2:
                        continue
                    msg = pynmea2.parse(parts[1])

                    if int(msg.ref_station_id) != beacon_id:
                        continue

                    date_str = line.split(" ")[0]
                    hour_str = str(parts[1]).split(",")[1]

                    yyyy = int(date_str[6:10])
                    mm = int(date_str[3:5])
                    dd = int(date_str[0:2])
                    hour = int(hour_str[0:2])
                    mins = int(hour_str[2:4])
                    secs = int(hour_str[4:6])
                    msec = int(hour_str[7:10])
                    epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins,
                                                    secs, timezone_offset)
                    epoch_timestamp = epoch_time + msec / 1000 + timeoffset
                    msg.timestamp = epoch_timestamp
                    usbl.from_nmea(msg)
                    data = usbl.export(output_format)
                    data_list.append(data)
        return data_list
def parse_NOC_polpred(mission, vehicle, category, ftype, outpath):
    # parser meta data
    sensor_string = "autosub"
    category = category
    output_format = ftype

    if category == Category.TIDE:
        filepath = mission.tide.filepath
        timezone = mission.tide.timezone
        timeoffset = mission.tide.timeoffset
        timezone_offset = read_timezone(timezone)

        tide = Tide(mission.tide.std_offset)
        tide.sensor_string = sensor_string

        path = get_raw_folder(outpath / ".." / filepath)

        file_list = get_file_list(path)

        data_list = []

        Console.info("... parsing NOC tide data")
        # Data format sample
        #  Date      Time      Level     Speed    Direc'n
        #                         m        m/s       deg
        # 6/ 9/2019  00:00       0.74      0.14       51
        # 6/ 9/2019  01:00       0.58      0.15       56

        for file in file_list:
            with file.open("r", errors="ignore") as tide_file:
                for line in tide_file.readlines()[6:]:
                    # we have to skip the first 5 rows of the file
                    dd = int(line[0:2])
                    mm = int(line[3:5])
                    yyyy = int(line[6:10])
                    hour = int(line[12:14])
                    mins = int(line[15:17])
                    secs = 0  # current models only provide resolution in minutes
                    msec = 0
                    epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins,
                                                    secs, timezone_offset)
                    epoch_timestamp = epoch_time + msec / 1000 + timeoffset
                    tide.epoch_timestamp = epoch_timestamp
                    tide.height = float(line[22:28])
                    tide.height_std = tide.height * tide.height_std_factor

                    data = tide.export(output_format)
                    data_list.append(data)
        return data_list
    def parse(self):
        # parse phins data
        Console.info("... parsing phins standard data")

        data_list = []
        path = get_raw_folder(self.outpath / ".." / self.filepath /
                              self.filename)
        with path.open("r", encoding="utf-8", errors="ignore") as filein:
            for complete_line in filein.readlines():
                line_and_md5 = complete_line.strip().split("*")
                line = line_and_md5[0].strip().split(",")
                if not self.line_is_valid(line, line_and_md5):
                    continue
                header = line[1]
                data = self.process_line(header, line)
                if data is not None:
                    data_list.append(data)
            return data_list
    def __init__(self, filepath, force_overwite=False, overwrite_all=False):
        filepath = Path(filepath).resolve()
        self.filepath = get_raw_folder(filepath)
        self.fo = force_overwite
        self.foa = overwrite_all

        if self.foa:
            self.fo = True

        self.configuration_path = (get_config_folder(self.filepath.parent) /
                                   "calibration")
        calibration_config_file = self.configuration_path / "calibration.yaml"
        if calibration_config_file.exists():
            Console.info("Loading existing calibration.yaml at {}".format(
                calibration_config_file))
        else:
            root = Path(__file__).parents[1]
            default_file = root / "auv_cal/default_yaml" / "default_calibration.yaml"
            Console.warn("Cannot find {}, generating default from {}".format(
                calibration_config_file, default_file))
            # save localisation yaml to processed directory
            default_file.copy(calibration_config_file)

            Console.warn("Edit the file at: \n\t" +
                         str(calibration_config_file))
            Console.warn(
                "Try to use relative paths to the calibration datasets")
            Console.quit(
                "Modify the file calibration.yaml and run this code again.")

        with calibration_config_file.open("r") as stream:
            self.calibration_config = yaml.safe_load(stream)

        # Create the calibration folder at the same level as the dives
        self.output_path = get_processed_folder(
            self.filepath.parent) / "calibration"
        if not self.output_path.exists():
            self.output_path.mkdir(parents=True)
        if not self.configuration_path.exists():
            self.configuration_path.mkdir(parents=True)
def parse_seaxerocks_images(mission, vehicle, category, ftype, outpath):
    data_list = []
    if ftype == "acfr":
        data_list = ""

    # parser meta data
    class_string = "measurement"
    frame_string = "body"
    category_stereo = "image"
    category_laser = "laser"
    sensor_string = "seaxerocks_3"

    timezone = mission.image.timezone
    timeoffset = mission.image.timeoffset
    camera1_filepath = mission.image.cameras[0].path
    camera2_filepath = mission.image.cameras[1].path
    camera3_filepath = mission.image.cameras[2].path
    camera1_label = mission.image.cameras[0].name
    camera2_label = mission.image.cameras[1].name
    camera3_label = mission.image.cameras[2].name

    epoch_timestamp_stereo = []
    epoch_timestamp_laser = []
    epoch_timestamp_camera1 = []
    epoch_timestamp_camera2 = []
    epoch_timestamp_camera3 = []
    stereo_index = []
    laser_index = []
    camera1_index = []
    camera2_index = []
    camera3_index = []
    camera1_filename = []
    camera2_filename = []
    camera3_filename = []

    camera1_serial = list(camera1_label)
    camera2_serial = list(camera2_label)
    camera3_serial = list(camera3_label)

    for i in range(1, len(camera1_label)):
        if camera1_label[i] == "/":
            camera1_serial[i] = "_"

    for i in range(1, len(camera2_label)):
        if camera2_label[i] == "/":
            camera2_serial[i] = "_"

    for i in range(1, len(camera3_label)):
        if camera3_label[i] == "/":
            camera3_serial[i] = "_"

    camera1_serial = "".join(camera1_serial)
    camera2_serial = "".join(camera2_serial)
    camera3_serial = "".join(camera3_serial)

    i = 0
    # read in timezone
    # TODO change ALL timezones to integers
    if isinstance(timezone, str):
        if timezone == "utc" or timezone == "UTC":
            timezone_offset = 0
        elif timezone == "jst" or timezone == "JST":
            timezone_offset = 9
    else:
        try:
            timezone_offset = float(timezone)
        except ValueError:
            print(
                "Error: timezone",
                timezone,
                "in mission.cfg not recognised, \
                  please enter value from UTC in hours",
            )
            return

    # convert to seconds from utc
    # timeoffset = -timezone_offset*60*60 + timeoffset

    Console.info("  Parsing " + sensor_string + " images...")

    cam1_path = get_raw_folder(outpath / ".." / camera1_filepath / "..")
    cam1_filetime = cam1_path / "FileTime.csv"

    with cam1_filetime.open("r", encoding="utf-8", errors="ignore") as filein:
        for line in filein.readlines():
            stereo_index_timestamps = line.strip().split(",")

            index_string = stereo_index_timestamps[0]
            date_string = stereo_index_timestamps[1]
            time_string = stereo_index_timestamps[2]
            ms_time_string = stereo_index_timestamps[3]

            # read in date
            if date_string != "date":  # ignore header
                stereo_index.append(index_string)
                if len(date_string) != 8:
                    Console.warn(
                        "Date string ({}) in FileTime.csv file has "
                        "unexpected length. Expected length: 8.".format(
                            date_string))
                yyyy = int(date_string[0:4])
                mm = int(date_string[4:6])
                dd = int(date_string[6:8])

                # read in time
                if len(time_string) != 6:
                    Console.warn(
                        "Time string ({}) in FileTime.csv file has "
                        "unexpected length. Expected length: 6.".format(
                            time_string))
                hour = int(time_string[0:2])
                mins = int(time_string[2:4])
                secs = int(time_string[4:6])
                msec = int(ms_time_string[0:3])

                epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs,
                                                timezone_offset)

                epoch_timestamp_stereo.append(
                    float(epoch_time + msec / 1000 + timeoffset))

    camera1_list = ["{}.raw".format(i) for i in stereo_index]
    camera2_list = ["{}.raw".format(i) for i in stereo_index]

    for i in range(len(camera1_list)):
        camera1_image = camera1_list[i].split(".")
        camera2_image = camera2_list[i].split(".")

        camera1_index.append(camera1_image[0])
        camera2_index.append(camera2_image[0])

    j = 0
    for i in range(len(camera1_list)):
        # find corresponding timestamp even if some images are deletec
        if camera1_index[i] == stereo_index[j]:
            epoch_timestamp_camera1.append(epoch_timestamp_stereo[j])
            epoch_timestamp_camera2.append(epoch_timestamp_stereo[j])
            date = epoch_to_day(epoch_timestamp_stereo[0])
            if ftype == "acfr":
                camera1_filename.append("sx3_" + date[2:4] + date[5:7] +
                                        date[8:10] + "_image" +
                                        str(camera1_index[i]) + "_FC.png")
                camera2_filename.append("sx3_" + date[2:4] + date[5:7] +
                                        date[8:10] + "_image" +
                                        str(camera2_index[i]) + "_AC.png")
            j = j + 1
        elif stereo_index[j] > camera1_index[i]:
            j = j + 1
        else:
            j = j - 1

    if ftype == "oplab":
        camera1_filename = [line for line in camera1_list]
        camera2_filename = [line for line in camera2_list]

    for i in range(len(camera1_list)):
        if ftype == "acfr":
            data = ("VIS: " + str(float(epoch_timestamp_camera1[i])) + " [" +
                    str(float(epoch_timestamp_camera1[i])) + "] " +
                    str(camera1_filename[i]) + " exp: 0\n")
            data_list += data
            data = ("VIS: " + str(float(epoch_timestamp_camera2[i])) + " [" +
                    str(float(epoch_timestamp_camera2[i])) + "] " +
                    str(camera2_filename[i]) + " exp: 0\n")
            data_list += data

        if ftype == "oplab":
            data = {
                "epoch_timestamp":
                float(epoch_timestamp_camera1[i]),
                "class":
                class_string,
                "sensor":
                sensor_string,
                "frame":
                frame_string,
                "category":
                category_stereo,
                "camera1": [{
                    "epoch_timestamp":
                    float(epoch_timestamp_camera1[i]),
                    "serial":
                    camera1_serial,
                    "filename":
                    str(camera1_filepath + "/" + camera1_filename[i]),
                }],
                "camera2": [{
                    "epoch_timestamp":
                    float(epoch_timestamp_camera2[i]),
                    "serial":
                    camera2_serial,
                    "filename":
                    str(camera2_filepath + "/" + camera2_filename[i]),
                }],
            }
            data_list.append(data)

    cam3_path = get_raw_folder(outpath / ".." / camera3_filepath)
    cam3_filetime = cam3_path / "FileTime.csv"
    with cam3_filetime.open("r", encoding="utf-8", errors="ignore") as filein:
        for line in filein.readlines():
            laser_index_timestamps = line.strip().split(",")

            if len(laser_index_timestamps) < 4:
                Console.warn("The laser FileTime.csv is apparently corrupt...")
                continue
            index_string = laser_index_timestamps[0]
            date_string = laser_index_timestamps[1]
            time_string = laser_index_timestamps[2]
            ms_time_string = laser_index_timestamps[3]

            # read in date
            if date_string != "date":  # ignore header
                laser_index.append(index_string)

                yyyy = int(date_string[0:4])
                mm = int(date_string[4:6])
                dd = int(date_string[6:8])

                # read in time
                hour = int(time_string[0:2])
                mins = int(time_string[2:4])
                secs = int(time_string[4:6])
                msec = int(ms_time_string[0:3])

                epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs,
                                                timezone_offset)

                epoch_timestamp_laser.append(
                    float(epoch_time + msec / 1000 + timeoffset))

    # try use pandas for all parsers, should be faster
    camera3_list = ["{}".format(i) for i in laser_index]

    # The LM165 images are saved either as jpg or as tif, and are split into
    # subfolders either at every 1000 or every 10000 images. Find out which
    # convention is used in current dataset by looking at the files.
    if len(camera3_list) > 0:
        s, extension = determine_extension_and_images_per_folder(
            cam3_path, camera3_list, camera3_label)

    for i in range(len(camera3_list)):
        camera3_filename.append("{}/image{}.{}".format(
            camera3_list[i][s:(s + 3)],
            camera3_list[i],
            extension,  # noqa: E203
        ))
        camera3_index.append(camera3_list[i])

    j = 0
    # original comment: find corresponding timestamp even if some images are
    # deleted
    for i in range(len(camera3_filename)):
        if camera3_index[i] == laser_index[j]:
            epoch_timestamp_camera3.append(epoch_timestamp_laser[j])
            j = j + 1
        # Jin: incomplete? it means that laser data is missing for this image
        # file, so no epoch_timestamp data, and do what when this happens?
        elif laser_index[j] > camera3_index[i]:
            j = j + 1
        else:
            # Jin: incomplete and possibly wrong? it means that this laser
            # data is extra, with no accompanying image file, so it should be
            # j+1 till index match?
            j = j - 1

        if ftype == "oplab":
            data = {
                "epoch_timestamp": float(epoch_timestamp_camera3[i]),
                "class": class_string,
                "sensor": sensor_string,
                "frame": frame_string,
                "category": category_laser,
                "serial": camera3_serial,
                "filename": camera3_filepath + "/" + str(camera3_filename[i]),
            }
            data_list.append(data)

    Console.info("  ...done parsing " + sensor_string + " images.")

    return data_list
Exemple #7
0
def parse_rdi(mission, vehicle, category, ftype, outpath):

    # parser meta data
    sensor_string = "rdi"
    output_format = ftype
    filename = ""
    filepath = ""

    timeoffset_s = 0

    # RDI format only has two digits for year,
    # extract the other two from dive folder
    # name
    prepend_year = outpath.parents[0].name[0:2]

    # autosub std models
    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    orientation_std_offset = mission.orientation.std_offset
    altitude_std_factor = mission.altitude.std_factor
    headingoffset = vehicle.dvl.yaw

    bv = BodyVelocity(velocity_std_factor, velocity_std_offset, headingoffset)
    ot = Orientation(headingoffset, orientation_std_offset)
    al = Altitude(altitude_std_factor)

    bv.sensor_string = sensor_string
    ot.sensor_string = sensor_string
    al.sensor_string = sensor_string

    if category == Category.VELOCITY:
        Console.info("... parsing RDI velocity")
        filename = mission.velocity.filename
        filepath = mission.velocity.filepath
        timeoffset_s = mission.velocity.timeoffset_s
    elif category == Category.ORIENTATION:
        Console.info("... parsing RDI orientation")
        filename = mission.orientation.filename
        filepath = mission.orientation.filepath
        timeoffset_s = mission.orientation.timeoffset_s
    elif category == Category.ALTITUDE:
        Console.info("... parsing RDI altitude")
        filename = mission.altitude.filename
        timeoffset_s = mission.altitude.timeoffset_s
        filepath = mission.altitude.filepath

    logfile = get_raw_folder(outpath / ".." / filepath / filename)
    data_list = []
    altitude_valid = False
    with logfile.open("r", errors="ignore") as rdi_file:
        for line in rdi_file.readlines():
            # Lines start with TS, BI, BS, BE, BD, SA
            parts = line.split(",")
            if parts[0] == ":SA" and len(parts) == 4:
                ot.roll = math.radians(float(parts[2]))
                ot.pitch = math.radians(float(parts[1]))
                ot.yaw = math.radians(float(parts[3]))
            elif parts[0] == ":TS" and len(parts) == 7:

                # Every time a new TimeStamp is received, send the previous
                # data packet
                if category == Category.VELOCITY:
                    data = bv.export(output_format)
                    if data is not None:
                        data_list.append(data)
                if category == Category.ORIENTATION:
                    data = ot.export(output_format)
                    if data is not None:
                        data_list.append(data)
                if category == Category.ALTITUDE:
                    data = al.export(output_format)
                    if data is not None:
                        data_list.append(data)

                date = parts[1]
                year = int(prepend_year + date[0:2])
                month = int(date[2:4])
                day = int(date[4:6])
                hour = int(date[6:8])
                minute = int(date[8:10])
                second = int(date[10:12])
                millisecond = float(date[12:14]) * 1e-2
                date = datetime(
                    int(year),
                    int(month),
                    int(day),
                    int(hour),
                    int(minute),
                    int(second),
                )
                stamp = (float(calendar.timegm(date.timetuple())) +
                         millisecond + timeoffset_s)
                bv.epoch_timestamp = stamp
                bv.epoch_timestamp_dvl = stamp
                ot.epoch_timestamp = stamp
                al.epoch_timestamp = stamp
                al.altitude_timestamp = stamp
                al.sound_velocity = float(parts[5])
                altitude_valid = False
            elif parts[0] == ":BI" and len(parts) == 6:
                status = parts[5].strip()
                if status == "A":
                    altitude_valid = True
                    x = float(parts[1]) * 0.001
                    y = float(parts[2]) * 0.001
                    bv.x_velocity = x * math.cos(headingoffset) - y * math.sin(
                        headingoffset)
                    bv.y_velocity = x * math.sin(headingoffset) + y * math.cos(
                        headingoffset)
                    bv.z_velocity = float(parts[3]) * 0.001
                    bv.x_velocity_std = float(parts[4]) * 0.001
                    bv.y_velocity_std = float(parts[4]) * 0.001
                    bv.z_velocity_std = float(parts[4]) * 0.001
            elif parts[0] == ":BD" and len(parts) == 6 and altitude_valid:
                al.altitude = float(parts[4])
                al.altitude_std = altitude_std_factor * al.altitude
    # print(data_list)
    return data_list
Exemple #8
0
def parse_ntnu_dvl(mission, vehicle, category, output_format, outpath):
    # Get your data from a file using mission paths, for example
    filepath = mission.velocity.filepath
    log_file_path = get_raw_folder(outpath / ".." / filepath)

    category_str = None
    if category == Category.VELOCITY:
        category_str = "velocity"
    elif category == Category.ALTITUDE:
        category_str = "altitude"

    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    heading_offset = vehicle.dvl.yaw
    body_velocity = BodyVelocity(
        velocity_std_factor,
        velocity_std_offset,
        heading_offset,
    )
    altitude = Altitude(altitude_std_factor=mission.altitude.std_factor)

    data_list = []

    num_entries = 0
    num_valid_entries = 0

    # For each log file
    for log_file in log_file_path.glob("*.log"):
        # Open the log file
        df = pd.read_csv(log_file,
                         sep="\t",
                         skiprows=(0, 1),
                         names=header_list)

        if category == Category.VELOCITY:
            Console.info("... parsing velocity")
            # For each row in the file
            for index, row in df.iterrows():
                body_velocity.from_ntnu_dvl(str(log_file.name), row)
                num_entries += 1
                if body_velocity.valid():
                    # DVL provides -32 when no bottom lock
                    data = body_velocity.export(output_format)
                    num_valid_entries += 1
                    data_list.append(data)
        elif category == Category.ALTITUDE:
            Console.info("... parsing altitude")
            # For each row in the file
            for index, row in df.iterrows():
                num_entries += 1
                altitude.from_ntnu_dvl(str(log_file.name), row)
                if altitude.valid():
                    num_valid_entries += 1
                    data = altitude.export(output_format)
                    data_list.append(data)

        elif category == Category.ORIENTATION:
            Console.quit("NTNU DVL parser has no ORIENTATION available")
        elif category == Category.DEPTH:
            Console.quit("NTNU DVL parser has no DEPTH available")
        elif category == Category.USBL:
            Console.quit("NTNU DVL parser has no USBL available")
        else:
            Console.quit("NTNU DVL parser has no category", category,
                         "available")

    Console.info(
        "... parsed",
        num_entries,
        "entries, of which",
        num_valid_entries,
        "are valid for category",
        category_str,
    )

    return data_list
Exemple #9
0
def parse_gaps(mission, vehicle, category, ftype, outpath):

    Console.info("  Parsing GAPS data...")

    # parser meta data
    class_string = "measurement"
    sensor_string = "gaps"
    frame_string = "inertial"

    timezone = mission.usbl.timezone
    timeoffset = mission.usbl.timeoffset
    filepath = mission.usbl.filepath
    usbl_id = mission.usbl.label
    latitude_reference = mission.origin.latitude
    longitude_reference = mission.origin.longitude

    # define headers used in phins
    header_absolute = "$PTSAG"  # '<< $PTSAG' #georeferenced strings
    header_heading = "$HEHDT"  # '<< $HEHDT'

    # gaps std models
    distance_std_factor = mission.usbl.std_factor
    distance_std_offset = mission.usbl.std_offset
    broken_packet_flag = False

    # read in timezone
    timezone_offset = read_timezone(timezone)

    # convert to seconds from utc
    # timeoffset = -timezone_offset*60*60 + timeoffset

    # determine file paths
    path = (outpath / ".." / filepath).absolute()
    filepath = get_raw_folder(path)
    all_list = os.listdir(str(filepath))
    gaps_list = [line for line in all_list if ".dat" in line]
    Console.info("  " + str(len(gaps_list)) + " GAPS file(s) found")

    # extract data from files
    data_list = []
    if ftype == "acfr":
        data_list = ""
    for i in range(len(gaps_list)):
        path_gaps = filepath / gaps_list[i]

        with path_gaps.open("r", errors="ignore") as gaps:
            # initialise flag
            flag_got_time = 0
            for line in gaps.readlines():
                line_split = line.strip().split("*")
                line_split_no_checksum = line_split[0].strip().split(",")
                broken_packet_flag = False
                # print(line_split_no_checksum)
                # keep on upating ship position to find the prior interpolation
                #  value of ship coordinates
                # line_split_no_checksum[0] == header_absolute:
                if header_absolute in line_split_no_checksum[0]:
                    # start with a ship coordinate
                    if line_split_no_checksum[6] == str(
                            usbl_id) and flag_got_time == 2:
                        if (line_split_no_checksum[11] == "F"
                                and line_split_no_checksum[13] == "1"):
                            # read in date
                            yyyy = int(line_split_no_checksum[5])
                            mm = int(line_split_no_checksum[4])
                            dd = int(line_split_no_checksum[3])

                            # read in time
                            time_string = str(line_split_no_checksum[2])

                            try:
                                hour = int(time_string[0:2])
                                mins = int(time_string[2:4])
                                secs = int(time_string[4:6])
                                msec = int(time_string[7:10])
                            except ValueError:
                                broken_packet_flag = True
                                pass

                            if secs >= 60:
                                mins += 1
                                secs = 0
                                broken_packet_flag = True

                            if mins >= 60:
                                hour += 1
                                mins = 0
                                broken_packet_flag = True

                            if hour >= 24:
                                dd += 1
                                hour = 0
                                broken_packet_flag = True

                            epoch_time = date_time_to_epoch(
                                yyyy, mm, dd, hour, mins, secs,
                                timezone_offset)

                            # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs)
                            # time_tuple = dt_obj.timetuple()
                            # epoch_time = time.mktime(time_tuple)
                            epoch_timestamp = epoch_time + msec / 1000 + timeoffset

                            # get position
                            latitude_negative_flag = False
                            longitude_negative_flag = False
                            latitude_string = line_split_no_checksum[7]
                            latitude_degrees = int(latitude_string[0:2])
                            latitude_minutes = float(latitude_string[2:10])
                            if line_split_no_checksum[8] == "S":
                                latitude_negative_flag = True

                            longitude_string = line_split_no_checksum[9]
                            longitude_degrees = int(longitude_string[0:3])
                            longitude_minutes = float(longitude_string[3:11])
                            if line_split_no_checksum[10] == "W":
                                longitude_negative_flag = True

                            depth = float(line_split_no_checksum[12])

                            latitude = latitude_degrees + latitude_minutes / 60.0
                            longitude = longitude_degrees + longitude_minutes / 60.0

                            if latitude_negative_flag:
                                latitude = -latitude
                            if longitude_negative_flag:
                                longitude = -longitude

                            # flag raised to proceed
                            flag_got_time = 3
                        else:
                            flag_got_time = 0

                    if line_split_no_checksum[6] == "0":
                        if flag_got_time < 3:

                            # read in date

                            yyyy = int(line_split_no_checksum[5])
                            mm = int(line_split_no_checksum[4])
                            dd = int(line_split_no_checksum[3])

                            # print(yyyy,mm,dd)
                            # read in time
                            time_string = str(line_split_no_checksum[2])
                            # print(time_string)
                            hour = int(time_string[0:2])
                            mins = int(time_string[2:4])
                            secs = int(time_string[4:6])

                            try:
                                msec = int(time_string[7:10])
                            except ValueError:
                                broken_packet_flag = True
                                pass

                            if secs >= 60:
                                mins += 1
                                secs = 0
                                broken_packet_flag = True

                            if mins >= 60:
                                hour += 1
                                mins = 0
                                broken_packet_flag = True

                            if hour >= 24:
                                dd += 1
                                hour = 0
                                broken_packet_flag = True

                            epoch_time = date_time_to_epoch(
                                yyyy, mm, dd, hour, mins, secs,
                                timezone_offset)
                            # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs)
                            # time_tuple = dt_obj.timetuple()
                            # epoch_time = time.mktime(time_tuple)
                            epoch_timestamp_ship_prior = (epoch_time +
                                                          msec / 1000 +
                                                          timeoffset)

                            # get position
                            latitude_string = line_split_no_checksum[7]
                            latitude_degrees_ship_prior = int(
                                latitude_string[0:2])
                            latitude_minutes_ship_prior = float(
                                latitude_string[2:10])
                            latitude_prior = (
                                latitude_degrees_ship_prior +
                                latitude_minutes_ship_prior / 60.0)
                            if line_split_no_checksum[8] == "S":
                                latitude_prior = -latitude_prior

                            longitude_string = line_split_no_checksum[9]
                            longitude_degrees_ship_prior = int(
                                longitude_string[0:3])
                            longitude_minutes_ship_prior = float(
                                longitude_string[3:11])
                            longitude_prior = (
                                longitude_degrees_ship_prior +
                                longitude_minutes_ship_prior / 60.0)
                            if line_split_no_checksum[10] == "W":
                                longitude_prior = -longitude_prior

                            # flag raised to proceed
                            if flag_got_time < 2:
                                flag_got_time = flag_got_time + 1

                        elif flag_got_time >= 3:
                            if line_split_no_checksum[6] == "0":

                                # read in date
                                yyyy = int(line_split_no_checksum[5])
                                mm = int(line_split_no_checksum[4])
                                dd = int(line_split_no_checksum[3])

                                # read in time
                                time_string = str(line_split_no_checksum[2])
                                hour = int(time_string[0:2])
                                mins = int(time_string[2:4])
                                secs = int(time_string[4:6])
                                msec = int(time_string[7:10])

                                # calculate epoch time
                                epoch_time = date_time_to_epoch(
                                    yyyy,
                                    mm,
                                    dd,
                                    hour,
                                    mins,
                                    secs,
                                    timezone_offset,
                                )
                                # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs)
                                # time_tuple = dt_obj.timetuple()
                                # epoch_time = time.mktime(time_tuple)
                                epoch_timestamp_ship_posterior = (epoch_time +
                                                                  msec / 1000 +
                                                                  timeoffset)

                                # get position
                                latitude_string = line_split_no_checksum[7]
                                latitude_degrees_ship_posterior = int(
                                    latitude_string[0:2])
                                latitude_minutes_ship_posterior = float(
                                    latitude_string[2:10])
                                latitude_posterior = (
                                    latitude_degrees_ship_posterior +
                                    latitude_minutes_ship_posterior / 60.0)
                                if line_split_no_checksum[8] == "S":
                                    latitude_posterior = -latitude_posterior

                                longitude_string = line_split_no_checksum[9]
                                longitude_degrees_ship_posterior = int(
                                    longitude_string[0:3])
                                longitude_minutes_ship_posterior = float(
                                    longitude_string[3:11])
                                longitude_posterior = (
                                    longitude_degrees_ship_posterior +
                                    longitude_minutes_ship_posterior / 60.0)
                                if line_split_no_checksum[10] == "W":
                                    longitude_posterior = -longitude_posterior

                                # flag raised to proceed
                                flag_got_time = flag_got_time + 1

                # line_split_no_checksum[0] == header_heading:
                if header_heading in line_split_no_checksum[0]:
                    if flag_got_time < 3:
                        heading_ship_prior = float(line_split_no_checksum[1])
                        if flag_got_time < 2:
                            flag_got_time = flag_got_time + 1

                    else:

                        heading_ship_posterior = float(
                            line_split_no_checksum[1])
                        flag_got_time = flag_got_time + 1

                if flag_got_time >= 5:
                    # interpolate for the ships location and heading
                    inter_time = (epoch_timestamp - epoch_timestamp_ship_prior
                                  ) / (epoch_timestamp_ship_posterior -
                                       epoch_timestamp_ship_prior)
                    longitude_ship = (inter_time *
                                      (longitude_posterior - longitude_prior) +
                                      longitude_prior)
                    latitude_ship = (inter_time *
                                     (latitude_posterior - latitude_prior) +
                                     latitude_prior)
                    heading_ship = (
                        inter_time *
                        (heading_ship_posterior - heading_ship_prior) +
                        heading_ship_prior)

                    while heading_ship > 360:
                        heading_ship = heading_ship - 360
                    while heading_ship < 0:
                        heading_ship = heading_ship + 360

                    lateral_distance, bearing = latlon_to_metres(
                        latitude, longitude, latitude_ship, longitude_ship)

                    # determine range to input to uncertainty model
                    distance = math.sqrt(lateral_distance * lateral_distance +
                                         depth * depth)
                    distance_std = distance_std_factor * distance + distance_std_offset

                    # determine uncertainty in terms of latitude and longitude
                    latitude_offset, longitude_offset = metres_to_latlon(
                        abs(latitude),
                        abs(longitude),
                        distance_std,
                        distance_std,
                    )

                    latitude_std = abs(abs(latitude) - latitude_offset)
                    longitude_std = abs(abs(longitude) - longitude_offset)

                    # calculate in metres from reference
                    lateral_distance_ship, bearing_ship = latlon_to_metres(
                        latitude_ship,
                        longitude_ship,
                        latitude_reference,
                        longitude_reference,
                    )
                    eastings_ship = (math.sin(bearing_ship * math.pi / 180.0) *
                                     lateral_distance_ship)
                    northings_ship = (
                        math.cos(bearing_ship * math.pi / 180.0) *
                        lateral_distance_ship)

                    lateral_distance_target, bearing_target = latlon_to_metres(
                        latitude,
                        longitude,
                        latitude_reference,
                        longitude_reference,
                    )
                    eastings_target = (
                        math.sin(bearing_target * math.pi / 180.0) *
                        lateral_distance_target)
                    northings_target = (
                        math.cos(bearing_target * math.pi / 180.0) *
                        lateral_distance_target)

                    if not broken_packet_flag:

                        if ftype == "oplab" and category == Category.USBL:
                            data = {
                                "epoch_timestamp":
                                float(epoch_timestamp),
                                "class":
                                class_string,
                                "sensor":
                                sensor_string,
                                "frame":
                                frame_string,
                                "category":
                                category,
                                "data_ship": [
                                    {
                                        "latitude": float(latitude_ship),
                                        "longitude": float(longitude_ship),
                                    },
                                    {
                                        "northings": float(northings_ship),
                                        "eastings": float(eastings_ship),
                                    },
                                    {
                                        "heading": float(heading_ship)
                                    },
                                ],
                                "data_target": [
                                    {
                                        "latitude": float(latitude),
                                        "latitude_std": float(latitude_std),
                                    },
                                    {
                                        "longitude": float(longitude),
                                        "longitude_std": float(longitude_std),
                                    },
                                    {
                                        "northings": float(northings_target),
                                        "northings_std": float(distance_std),
                                    },
                                    {
                                        "eastings": float(eastings_target),
                                        "eastings_std": float(distance_std),
                                    },
                                    {
                                        "depth": float(depth),
                                        "depth_std": float(distance_std),
                                    },
                                    {
                                        "distance_to_ship": float(distance)
                                    },
                                ],
                            }
                            data_list.append(data)
                        elif ftype == "oplab" and category == Category.DEPTH:
                            data = {
                                "epoch_timestamp":
                                float(epoch_timestamp),
                                "epoch_timestamp_depth":
                                float(epoch_timestamp),
                                "class":
                                class_string,
                                "sensor":
                                sensor_string,
                                "frame":
                                "inertial",
                                "category":
                                Category.DEPTH,
                                "data": [{
                                    "depth": float(depth),
                                    "depth_std": float(distance_std),
                                }],
                            }
                            data_list.append(data)

                        if ftype == "acfr":
                            data = (
                                "SSBL_FIX: " + str(float(epoch_timestamp)) +
                                " ship_x: " + str(float(northings_ship)) +
                                " ship_y: " + str(float(eastings_ship)) +
                                " target_x: " + str(float(northings_target)) +
                                " target_y: " + str(float(eastings_target)) +
                                " target_z: " + str(float(depth)) +
                                " target_hr: " + str(float(lateral_distance)) +
                                " target_sr: " + str(float(distance)) +
                                " target_bearing: " + str(float(bearing)) +
                                "\n")
                            data_list += data

                    else:
                        Console.warn("Badly formatted packet (GAPS TIME)")
                        Console.warn(line)
                        # print(hour,mins,secs)

                    # reset flag
                    flag_got_time = 0

    Console.info("  ...done parsing GAPS data.")

    return data_list
    def laser_imp(self, c0, c1):
        main_camera_name = c1["name"]
        calibration_file = self.output_path / ("laser_calibration_top_" +
                                               main_camera_name + ".yaml")
        calibration_file_b = self.output_path / ("laser_calibration_bottom_" +
                                                 main_camera_name + ".yaml")
        Console.info("Looking for a calibration file at " +
                     str(calibration_file))
        if calibration_file.exists() and not self.fo:
            Console.quit(
                "The laser planes from cameras " + c1["name"] + " and " +
                c0["name"] +
                " have already been calibrated. If you want to overwite the " +
                "calibration, use the -F flag.")
        Console.info(
            "The laser planes are not calibrated, running laser calibration..."
        )

        # Check if the stereo pair has already been calibrated
        stereo_calibration_file = self.output_path / str("stereo_" +
                                                         c1["name"] + "_" +
                                                         c0["name"] + ".yaml")
        if not stereo_calibration_file.exists():
            Console.warn("Could not find a stereo calibration file " +
                         str(stereo_calibration_file) + "...")
            self.stereo()

        non_laser_cam_name = c0["name"]
        non_laser_cam_filepath = get_raw_folder(self.filepath) / str(
            c0["laser_calibration"]["path"])
        non_laser_cam_extension = str(c0["laser_calibration"]["glob_pattern"])
        laser_cam_name = c1["name"]
        laser_cam_filepath = get_raw_folder(self.filepath) / str(
            c1["laser_calibration"]["path"])
        laser_cam_extension = str(c1["laser_calibration"]["glob_pattern"])
        if not non_laser_cam_filepath.exists():
            non_laser_cam_filepath = get_processed_folder(
                non_laser_cam_filepath)
            if not non_laser_cam_filepath.exists():
                Console.quit(
                    "Could not find stereo image folder, neither in raw nor " +
                    "in processed folder (" + str(non_laser_cam_filepath) +
                    ").")
        if not laser_cam_filepath.exists():
            laser_cam_filepath = get_processed_folder(laser_cam_filepath)
            if not laser_cam_filepath.exists():
                Console.quit(
                    "Could not find stereo image folder, neither in raw nor " +
                    "in processed folder (" + str(laser_cam_filepath) + ").")
        non_laser_cam_filepath = non_laser_cam_filepath.resolve()
        laser_cam_filepath = laser_cam_filepath.resolve()
        Console.info("Reading stereo images of laser line from " +
                     str(non_laser_cam_filepath) + " and " +
                     str(laser_cam_filepath))
        if "skip_first" not in self.calibration_config:
            self.calibration_config["skip_first"] = 0
        calibrate_laser(
            laser_cam_name,
            laser_cam_filepath,
            laser_cam_extension,
            non_laser_cam_name,
            non_laser_cam_filepath,
            non_laser_cam_extension,
            stereo_calibration_file,
            self.calibration_config["laser_calibration"],
            calibration_file,
            calibration_file_b,
            self.calibration_config["skip_first"],
            self.fo,
            self.foa,
        )
Exemple #11
0
    def __init__(self, navigation_file, image_path, reference_lat,
                 reference_lon):
        navigation_file = get_raw_folder(navigation_file)
        image_path = Path(image_path)
        image_path = get_raw_folder(image_path)

        # extract data from files
        df = pd.read_csv(navigation_file, skipinitialspace=True)
        date = list(df["Date "])
        timestr = list(df["Time"])
        roll = list(df["Roll"])
        pitch = list(df["Pitch"])
        heading = list(df["Heading"])
        depth = list(df["Pressure"])
        altitude = list(df["Altitude"])
        lon = list(df["Hybis Long"])
        lat = list(df["Hybis Lat"])

        Console.info("Found " + str(len(df)) + " navigation records!")

        hybis_vec = []
        for i in range(len(df)):
            if len(lon[i]) < 11:
                continue
            p = HyBisPos(
                roll[i],
                pitch[i],
                heading[i],
                depth[i],
                altitude[i],
                lon[i],
                lat[i],
                date[i],
                timestr[i],
            )
            hybis_vec.append(p)

        for i in range(len(hybis_vec) - 1):
            if hybis_vec[i].altitude == 0:
                hybis_vec[i].altitude = interpolate(
                    hybis_vec[i].epoch_timestamp,
                    hybis_vec[i - 1].epoch_timestamp,
                    hybis_vec[i + 1].epoch_timestamp,
                    hybis_vec[i - 1].altitude,
                    hybis_vec[i + 1].altitude,
                )
            if hybis_vec[i].depth == 0:
                hybis_vec[i].depth = interpolate(
                    hybis_vec[i].epoch_timestamp,
                    hybis_vec[i - 1].epoch_timestamp,
                    hybis_vec[i + 1].epoch_timestamp,
                    hybis_vec[i - 1].depth,
                    hybis_vec[i + 1].depth,
                )

        if reference_lon == 0 or reference_lat == 0:
            i = 0
            while hybis_vec[i].latitude == 0:
                i += 1
            latitude_ref = hybis_vec[i].latitude
            longitude_ref = hybis_vec[i].longitude
        else:
            latitude_ref = reference_lat
            longitude_ref = reference_lon

        self.data = [
            "image_number,",
            "northing [m],",
            "easting [m],",
            "depth [m],",
            "roll [deg],",
            "pitch [deg],",
            "heading [deg],",
            "altitude [m],",
            "timestamp,",
            "latitude [deg],",
            "longitude [deg],",
            "relative_path\n",
        ]

        image_list = sorted(os.listdir(str(image_path)))
        Console.info("Found " + str(len(image_list)) + " images!")
        Console.info("Interpolating...")
        for k, filename in enumerate(image_list):
            modification_time = os.stat(str(image_path) + "/" +
                                        filename).st_mtime
            filename = str(image_path) + "/" + filename

            i = 0
            while (i < len(hybis_vec) - 2
                   and hybis_vec[i].epoch_timestamp < modification_time):
                i += 1

            latitude = interpolate(
                modification_time,
                hybis_vec[i].epoch_timestamp,
                hybis_vec[i + 1].epoch_timestamp,
                hybis_vec[i].latitude,
                hybis_vec[i + 1].latitude,
            )
            longitude = interpolate(
                modification_time,
                hybis_vec[i].epoch_timestamp,
                hybis_vec[i + 1].epoch_timestamp,
                hybis_vec[i].longitude,
                hybis_vec[i + 1].longitude,
            )
            depth = interpolate(
                modification_time,
                hybis_vec[i].epoch_timestamp,
                hybis_vec[i + 1].epoch_timestamp,
                hybis_vec[i].depth,
                hybis_vec[i + 1].depth,
            )
            roll = interpolate(
                modification_time,
                hybis_vec[i].epoch_timestamp,
                hybis_vec[i + 1].epoch_timestamp,
                hybis_vec[i].roll,
                hybis_vec[i + 1].roll,
            )
            pitch = interpolate(
                modification_time,
                hybis_vec[i].epoch_timestamp,
                hybis_vec[i + 1].epoch_timestamp,
                hybis_vec[i].pitch,
                hybis_vec[i + 1].pitch,
            )
            heading = interpolate(
                modification_time,
                hybis_vec[i].epoch_timestamp,
                hybis_vec[i + 1].epoch_timestamp,
                hybis_vec[i].heading,
                hybis_vec[i + 1].heading,
            )
            altitude = interpolate(
                modification_time,
                hybis_vec[i].epoch_timestamp,
                hybis_vec[i + 1].epoch_timestamp,
                hybis_vec[i].altitude,
                hybis_vec[i + 1].altitude,
            )

            lateral_distance, bearing = latlon_to_metres(
                latitude, longitude, latitude_ref, longitude_ref)
            eastings = math.sin(bearing * math.pi / 180.0) * lateral_distance
            northings = math.cos(bearing * math.pi / 180.0) * lateral_distance

            msg = (str(k) + "," + str(northings) + "," + str(eastings) + "," +
                   str(depth) + "," + str(roll) + "," + str(pitch) + "," +
                   str(heading) + "," + str(altitude) + "," +
                   str(modification_time) + "," + str(latitude) + "," +
                   str(longitude) + "," + str(filename) + "\n")
            self.data.append(msg)
        self.start_epoch = hybis_vec[0].epoch_timestamp
        self.finish_epoch = hybis_vec[-1].epoch_timestamp
def load_configuration_and_camera_system(path, suffix=None):
    """Generate correct_config and camera system objects from input config
    yaml files

    Parameters
    -----------
    path : Path
        User provided Path of source images
    """

    Console.warn("Parsing multipaths with suffix:", suffix)

    # resolve paths to raw, processed and config folders
    path_raw_folder = get_raw_folder(path)
    path_config_folder = get_config_folder(path)

    # resolve path to mission.yaml
    path_mission = path_raw_folder / "mission.yaml"

    # find mission and correct_images yaml files
    if path_mission.exists():
        Console.info("File mission.yaml found at", path_mission)
    else:
        Console.quit("File mission.yaml file not found at", path_raw_folder)

    # load mission parameters
    mission = Mission(path_mission)

    # resolve path to camera.yaml file
    camera_yaml_raw_path = path_raw_folder / "camera.yaml"
    camera_yaml_config_path = path_config_folder / "camera.yaml"

    camera_yaml_path = None
    default_file_path_correct_config = None

    if not camera_yaml_raw_path.exists(
    ) and not camera_yaml_config_path.exists():
        Console.info(
            "Not found camera.yaml file in /raw folder...Using default ",
            "camera.yaml file...",
        )
        # find out default yaml paths
        root = Path(__file__).resolve().parents[1]

        acfr_std_camera_file = "auv_nav/default_yaml/ts1/SSK17-01/camera.yaml"
        sx3_camera_file = "auv_nav/default_yaml/ae2000/YK17-23C/camera.yaml"
        biocam_camera_file = "auv_nav/default_yaml/as6/DY109/camera.yaml"
        biocam4000_15c_camera_file = "auv_nav/default_yaml/alr/jc220/camera.yaml"
        hybis_camera_file = "auv_nav/default_yaml/hybis/camera.yaml"
        ntnu_camera_file = "auv_nav/default_yaml/ntnu_stereo/tautra21/camera.yaml"
        rosbag_camera_file = "auv_nav/default_yaml/rosbag/grassmap/camera.yaml"

        acfr_std_correct_config_file = (
            "correct_images/default_yaml/acfr/correct_images.yaml")
        sx3_std_correct_config_file = (
            "correct_images/default_yaml/sx3/correct_images.yaml")
        biocam_std_correct_config_file = (
            "correct_images/default_yaml/biocam/correct_images.yaml")
        biocam4000_15c_std_correct_config_file = (
            "correct_images/default_yaml/biocam4000_15c/correct_images.yaml")
        hybis_std_correct_config_file = (
            "correct_images/default_yaml/hybis/correct_images.yaml")
        ntnu_std_correct_config_file = (
            "correct_images/default_yaml/ntnu_stereo/correct_images.yaml")
        rosbag_std_correct_config_file = (
            "correct_images/default_yaml/rosbag/correct_images.yaml")

        Console.info("Image format:", mission.image.format)

        if mission.image.format == "acfr_standard":
            camera_yaml_path = root / acfr_std_camera_file
            default_file_path_correct_config = root / acfr_std_correct_config_file
        elif mission.image.format == "seaxerocks_3":
            camera_yaml_path = root / sx3_camera_file
            default_file_path_correct_config = root / sx3_std_correct_config_file
        elif mission.image.format == "biocam":
            camera_yaml_path = root / biocam_camera_file
            default_file_path_correct_config = root / biocam_std_correct_config_file
        elif mission.image.format == "biocam4000_15c":
            camera_yaml_path = root / biocam4000_15c_camera_file
            default_file_path_correct_config = (
                root / biocam4000_15c_std_correct_config_file)
        elif mission.image.format == "hybis":
            camera_yaml_path = root / hybis_camera_file
            default_file_path_correct_config = root / hybis_std_correct_config_file
        elif mission.image.format == "ntnu_stereo":
            camera_yaml_path = root / ntnu_camera_file
            default_file_path_correct_config = root / ntnu_std_correct_config_file
        elif mission.image.format == "rosbag":
            camera_yaml_path = root / rosbag_camera_file
            default_file_path_correct_config = root / rosbag_std_correct_config_file
            camera_yaml_path.copy(camera_yaml_config_path)
            Console.info(
                "Copied camera.yaml file to config folder. Please edit it.")
            Console.info("The file is located at", camera_yaml_config_path)
        else:
            Console.quit(
                "Image system in camera.yaml does not match with mission.yaml",
                "Provide correct camera.yaml in /raw folder... ",
            )
    elif camera_yaml_raw_path.exists(
    ) and not camera_yaml_config_path.exists():
        Console.info("Found camera.yaml file in /raw folder")
        camera_yaml_path = camera_yaml_raw_path
    elif not camera_yaml_raw_path.exists() and camera_yaml_config_path.exists(
    ):
        Console.info("Found camera.yaml file in /config folder")
        camera_yaml_path = camera_yaml_config_path
    elif camera_yaml_raw_path.exists() and camera_yaml_config_path.exists():
        Console.info("Found camera.yaml both in /raw and in /config folder")
        Console.info("Using camera.yaml from /config folder")
        camera_yaml_path = camera_yaml_config_path
    else:
        Console.quit(
            "rosbag image type requires a camera.yaml file in /config folder",
            "Please provide camera.yaml file in /config folder",
        )

    # instantiate the camera system and setup cameras from mission and
    # config files / auv_nav
    camera_system = CameraSystem(camera_yaml_path, path_raw_folder)
    if camera_system.camera_system != mission.image.format:
        Console.quit(
            "Image system in camera.yaml does not match with mission.yaml",
            "Provide correct camera.yaml in /raw folder",
        )
    # check for correct_config yaml path
    path_correct_images = None
    if suffix == "" or suffix is None:
        path_correct_images = path_config_folder / "correct_images.yaml"
    else:
        path_correct_images = path_config_folder / ("correct_images_" +
                                                    suffix + ".yaml")
    if path_correct_images.exists():
        Console.info(
            "Configuration file correct_images.yaml file found at",
            path_correct_images,
        )
    else:
        default_file_path_correct_config.copy(path_correct_images)
        Console.warn(
            "Configuration file not found, copying a default one at",
            path_correct_images,
        )

    # load parameters from correct_config.yaml
    correct_config = CorrectConfig(path_correct_images)
    return correct_config, camera_system
def parse_alr(mission, vehicle, category, ftype, outpath):
    # parser meta data
    sensor_string = "alr"
    category = category
    output_format = ftype
    filename = mission.velocity.filename
    filepath = mission.velocity.filepath

    # ALR std models
    depth_std_factor = mission.depth.std_factor
    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    orientation_std_offset = mission.orientation.std_offset
    altitude_std_factor = mission.altitude.std_factor
    headingoffset = vehicle.dvl.yaw

    body_velocity = BodyVelocity(
        velocity_std_factor, velocity_std_offset, headingoffset
    )
    orientation = Orientation(headingoffset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    altitude = Altitude(altitude_std_factor)

    body_velocity.sensor_string = sensor_string
    orientation.sensor_string = sensor_string
    depth.sensor_string = sensor_string
    altitude.sensor_string = sensor_string

    path = get_raw_folder(outpath / ".." / filepath / filename)

    alr_log = loadmat(str(path))
    mission_data = alr_log["MissionData"]

    data_list = []
    if category == Category.VELOCITY:
        Console.info("Parsing alr velocity...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            if mission_data["DVL_down_BT_is_good"][i] == 1:
                vx = mission_data["DVL_down_BT_x"][i]
                vy = mission_data["DVL_down_BT_y"][i]
                vz = mission_data["DVL_down_BT_z"][i]
                # vx, vy, vz should not be NaN on lines with bottom lock,
                # but check to be on the safe side:
                if not isnan(vx) and not isnan(vy) and not isnan(vz):
                    t = mission_data["epoch_timestamp"][i]
                    body_velocity.from_alr(t, vx, vy, vz)
                    data = body_velocity.export(output_format)
                    if body_velocity.epoch_timestamp > previous_timestamp:
                        data_list.append(data)
                    else:
                        data_list[-1] = data
                    previous_timestamp = body_velocity.epoch_timestamp
        Console.info("...done parsing alr velocity")
    if category == Category.ORIENTATION:
        Console.info("Parsing alr orientation...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            roll = mission_data["AUV_roll"][i]
            pitch = mission_data["AUV_pitch"][i]
            yaw = mission_data["AUV_heading"][i]
            if not isnan(roll) and not isnan(pitch) and not isnan(yaw):
                t = mission_data["epoch_timestamp"][i]
                orientation.from_alr(t, roll, pitch, yaw)
                data = orientation.export(output_format)
                if orientation.epoch_timestamp > previous_timestamp:
                    data_list.append(data)
                else:
                    data_list[-1] = data
                previous_timestamp = orientation.epoch_timestamp
        Console.info("...done parsing alr orientation")
    if category == Category.DEPTH:
        Console.info("Parsing alr depth...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            d = mission_data["AUV_depth"][i]
            if not isnan(d):
                t = mission_data["epoch_timestamp"][i]
                depth.from_alr(t, d)
                data = depth.export(output_format)
                if depth.epoch_timestamp > previous_timestamp:
                    data_list.append(data)
                else:
                    data_list[-1] = data
                previous_timestamp = depth.epoch_timestamp
        Console.info("...done parsing alr depth")
    if category == Category.ALTITUDE:
        Console.info("Parsing alr altitude...")
        previous_timestamp = 0
        for i in range(len(mission_data["epoch_timestamp"])):
            if mission_data["DVL_down_BT_is_good"][i] == 1:
                a = mission_data["AUV_altitude"][i]
                # The altitude should not be NaN on lines with bottom lock,
                # but check to be on the safe side:
                if not isnan(a):
                    t = mission_data["epoch_timestamp"][i]
                    altitude.from_alr(t, a)
                    data = altitude.export(output_format)
                    if altitude.epoch_timestamp > previous_timestamp:
                        data_list.append(data)
                    else:
                        data_list[-1] = data
                    previous_timestamp = altitude.epoch_timestamp
        Console.info("...done parsing alr altitude")
    return data_list
Exemple #14
0
def parse_eiva_navipac(mission, vehicle, category, output_format, outpath):
    # Get your data from a file using mission paths, for example
    # Get your data from a file using mission paths, for example
    filepath = None
    category_str = None
    if category == Category.ORIENTATION:
        category_str = "orientation"
        filepath = mission.orientation.filepath
    elif category == Category.DEPTH:
        category_str = "depth"
        filepath = mission.depth.filepath
    elif category == Category.USBL:
        category_str = "usbl"
        filepath = mission.usbl.filepath

    log_file_path = get_raw_folder(outpath / ".." / filepath)

    depth_std_factor = mission.depth.std_factor
    orientation_std_offset = mission.orientation.std_offset
    heading_offset = vehicle.ins.yaw
    usbl_id = mission.usbl.label

    latitude_reference = float(mission.origin.latitude)
    longitude_reference = float(mission.origin.longitude)

    orientation = Orientation(heading_offset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    usbl = Usbl(
        mission.usbl.std_factor,
        mission.usbl.std_offset,
        latitude_reference,
        longitude_reference,
    )

    data_list = []

    num_entries = 0
    num_valid_entries = 0

    # For each log file
    for log_file in log_file_path.glob("*_G.NPD"):
        # Open the log file
        with log_file.open("r", encoding="utf-8", errors="ignore") as filein:

            if category == Category.ORIENTATION:
                Console.info("... parsing orientation")
                # For each line in the file
                for line in filein.readlines():
                    if line.startswith("R132  4") and "$PRDID" in line:
                        orientation.from_eiva_navipac(line)
                        num_entries += 1
                        if orientation.roll is None:
                            continue
                        if orientation.valid():
                            num_valid_entries += 1
                            data = orientation.export(output_format)
                            data_list.append(data)
            elif category == Category.DEPTH:
                Console.info("... parsing depth")
                for line in filein.readlines():
                    if line[0:14] == "D     " + str(usbl_id) + "  19  1":
                        depth.from_eiva_navipac(line)
                        num_entries += 1
                        if depth.valid():
                            num_valid_entries += 1
                            data = depth.export(output_format)
                            data_list.append(data)
            elif category == Category.USBL:
                Console.info("... parsing USBL")
                for line in filein.readlines():
                    if line.startswith("P  D") and int(line[6:10]) == usbl_id:
                        num_entries += 1
                        usbl.from_eiva_navipac(line)
                        if usbl.valid():
                            num_valid_entries += 1
                            data = usbl.export(output_format)
                            data_list.append(data)
            elif category == Category.ALTITUDE:
                Console.quit("EIVA Navipac parser has no ALTITUDE available")
            elif category == Category.VELOCITY:
                Console.quit("EIVA Navipac parser has no VELOCITY available")
            else:
                Console.quit("EIVA Navipac parser has no category", category,
                             "available")
    Console.info(
        "Processed",
        num_entries,
        "entries , of which",
        num_valid_entries,
        "are valid for category",
        category_str,
    )

    return data_list
Exemple #15
0
def parse_single(filepath, force_overwrite):
    # initiate data and processing flags
    filepath = Path(filepath).resolve()
    filepath = get_raw_folder(filepath)

    if not force_overwrite:
        existing_files = check_output_files_exist(
            get_processed_folder(filepath))
        if existing_files:
            msg = ("It looks like this dataset has already been parsed.\n" +
                   "The following file(s) already exist:\n" + existing_files +
                   "If you would like auv_nav to overwrite existing file," +
                   " rerun it with the flag -F.\n" +
                   "Example:   auv_nav parse -F PATH")
            Console.warn(msg)
            Console.warn("Dataset skipped")
            return

    ftype = "oplab"

    # load mission.yaml config file

    mission_file = filepath / "mission.yaml"
    vehicle_file = filepath / "vehicle.yaml"
    mission_file = get_raw_folder(mission_file)
    vehicle_file = get_raw_folder(vehicle_file)
    Console.info("Loading mission.yaml at", mission_file)
    mission = Mission(mission_file)

    Console.info("Loading vehicle.yaml at", vehicle_file)
    vehicle = Vehicle(vehicle_file)

    # copy mission.yaml and vehicle.yaml to processed folder for process step
    mission_processed = get_processed_folder(mission_file)
    vehicle_processed = get_processed_folder(vehicle_file)

    # Write mission with metadata (username, date and hostname)
    mission.write(mission_processed)
    # mission_file.copy(mission_processed)
    vehicle.write(vehicle_processed)

    # check for recognised formats and create nav file
    outpath = get_processed_folder(filepath)
    outpath = outpath / "nav"
    filename = "nav_standard.json"

    # make file path if not exist
    if not outpath.is_dir():
        try:
            outpath.mkdir()
        except Exception as e:
            print("Warning:", e)

    Console.info("Loading raw data...")

    if multiprocessing.cpu_count() < 4:
        cpu_to_use = 1
    else:
        cpu_to_use = multiprocessing.cpu_count() - 2

    try:
        pool = multiprocessing.Pool(cpu_to_use)
    except AttributeError as e:
        print(
            "Error: ",
            e,
            "\n===============\nThis error is known to \
                happen when running the code more than once from the same \
                console in Spyder. Please run the code from a new console \
                to prevent this error from happening. You may close the \
                current console.\n==============",
        )
    pool_list = []

    # read in, parse data and write data
    if not mission.image.empty():
        if mission.image.format == "acfr_standard" or mission.image.format == "unagi":
            pool_list.append(
                pool.apply_async(
                    parse_acfr_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif mission.image.format == "seaxerocks_3":
            pool_list.append(
                pool.apply_async(
                    parse_seaxerocks_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif (mission.image.format == "biocam"
              or mission.image.format == "biocam4000_15c"):
            pool_list.append(
                pool.apply_async(
                    parse_biocam_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif mission.image.format == "ntnu_stereo":
            pool_list.append(
                pool.apply_async(
                    parse_ntnu_stereo_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif mission.image.format == "rosbag_extracted_images":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag_extracted_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif mission.image.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        else:
            Console.quit("Mission image format", mission.image.format,
                         "not supported.")
    if not mission.usbl.empty():
        if mission.usbl.format == "gaps":
            pool_list.append(
                pool.apply_async(parse_gaps,
                                 [mission, vehicle, "usbl", ftype, outpath]))
        elif mission.usbl.format == "usbl_dump":
            pool_list.append(
                pool.apply_async(parse_usbl_dump,
                                 [mission, vehicle, "usbl", ftype, outpath]))
        elif mission.usbl.format == "NOC_nmea":
            pool_list.append(
                pool.apply_async(parse_NOC_nmea,
                                 [mission, vehicle, "usbl", ftype, outpath]))
        elif mission.usbl.format == "eiva_navipac":
            pool_list.append(
                pool.apply_async(
                    parse_eiva_navipac,
                    [mission, vehicle, "usbl", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "usbl", ftype, outpath],
                ))
        else:
            Console.quit("Mission usbl format", mission.usbl.format,
                         "not supported.")

    if not mission.velocity.empty():
        if mission.velocity.format == "phins":
            pool_list.append(
                pool.apply_async(
                    parse_phins,
                    [mission, vehicle, "velocity", ftype, outpath]))
        elif mission.velocity.format == "ae2000":
            pool_list.append(
                pool.apply_async(
                    parse_ae2000,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        elif mission.velocity.format == "alr":
            pool_list.append(
                pool.apply_async(
                    parse_alr,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        elif mission.velocity.format == "autosub":
            pool_list.append(
                pool.apply_async(
                    parse_autosub,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        elif mission.velocity.format == "rdi":
            pool_list.append(
                pool.apply_async(
                    parse_rdi, [mission, vehicle, "velocity", ftype, outpath]))
        elif mission.velocity.format == "ntnu_dvl":
            pool_list.append(
                pool.apply_async(
                    parse_ntnu_dvl,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        else:
            Console.quit(
                "Mission velocity format",
                mission.velocity.format,
                "not supported.",
            )

    if not mission.orientation.empty():
        if mission.orientation.format == "phins":
            pool_list.append(
                pool.apply_async(
                    parse_phins,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "ae2000":
            pool_list.append(
                pool.apply_async(
                    parse_ae2000,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "alr":
            pool_list.append(
                pool.apply_async(
                    parse_alr,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "autosub":
            pool_list.append(
                pool.apply_async(
                    parse_autosub,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "rdi":
            pool_list.append(
                pool.apply_async(
                    parse_rdi,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "eiva_navipac":
            pool_list.append(
                pool.apply_async(
                    parse_eiva_navipac,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        else:
            Console.quit(
                "Mission orientation format",
                mission.orientation.format,
                "not supported.",
            )

    if not mission.depth.empty():
        if mission.depth.format == "phins":
            pool_list.append(
                pool.apply_async(parse_phins,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "ae2000":
            pool_list.append(
                pool.apply_async(parse_ae2000,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "alr":
            pool_list.append(
                pool.apply_async(parse_alr,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "autosub":
            pool_list.append(
                pool.apply_async(parse_autosub,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "gaps":
            pool_list.append(
                pool.apply_async(parse_gaps,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "eiva_navipac":
            pool_list.append(
                pool.apply_async(
                    parse_eiva_navipac,
                    [mission, vehicle, "depth", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "depth", ftype, outpath],
                ))
        else:
            Console.quit("Mission depth format", mission.depth.format,
                         "not supported.")

    if not mission.altitude.empty():
        if mission.altitude.format == "phins":
            pool_list.append(
                pool.apply_async(
                    parse_phins,
                    [mission, vehicle, "altitude", ftype, outpath]))
        elif mission.altitude.format == "ae2000":
            pool_list.append(
                pool.apply_async(
                    parse_ae2000,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        elif mission.altitude.format == "alr":
            pool_list.append(
                pool.apply_async(
                    parse_alr,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        elif mission.altitude.format == "autosub":
            pool_list.append(
                pool.apply_async(
                    parse_autosub,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        elif mission.altitude.format == "rdi":
            pool_list.append(
                pool.apply_async(
                    parse_rdi, [mission, vehicle, "altitude", ftype, outpath]))
        elif mission.altitude.format == "ntnu_dvl":
            pool_list.append(
                pool.apply_async(
                    parse_ntnu_dvl,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        else:
            Console.quit(
                "Mission altitude format",
                mission.altitude.format,
                "not supported.",
            )

    if not mission.tide.empty():
        if mission.tide.format == "NOC_polpred":
            tide_list = parse_NOC_polpred(mission, vehicle, "tide", ftype,
                                          outpath)
        else:
            Console.quit("Mission tide format", mission.tide.format,
                         "not supported.")
    else:
        tide_list = None

    pool.close()
    pool.join()

    Console.info("...done loading raw data.")
    Console.info("Compile data list...")

    data_list = [[{
        "epoch_timestamp":
        0.0,
        "class":
        "origin",
        "category":
        "origin",
        "data": [{
            "latitude": mission.origin.latitude,
            "longitude": mission.origin.longitude,
            "crs": mission.origin.crs,
            "date": mission.origin.date,
        }],
    }]]

    # Set advance True/False flag so not comparing for every i in pool_list
    if mission.image.format == "biocam":
        correcting_timestamps = True
    else:
        correcting_timestamps = False
    for i in pool_list:
        results = i.get()
        # If current retrieved data is DEPTH
        # and if TIDE data is available
        if len(results) < 1:
            continue
        if results[0] is None:
            Console.warn(
                "Some results are empty. Please check whether this is correct or not"
            )
            continue
        if correcting_timestamps:
            if results[0]["category"] == "image":
                Console.info("Correction of BioCam cpu timestamps...")
                results = correct_timestamps(results)
        if (results[0]["category"] == Category.DEPTH
                or results[0]["category"] == Category.USBL):
            if not mission.tide.empty():
                # proceed to tidal correction
                Console.info("Tidal correction of depth vector...")
                # Offset depth to acknowledge for tides
                j = 0
                for k in range(len(results)):
                    while (j < len(tide_list)
                           and tide_list[j]["epoch_timestamp"] <
                           results[k]["epoch_timestamp"]):
                        j = j + 1

                    if j >= 1:
                        _result = interpolate(
                            results[k]["epoch_timestamp"],
                            tide_list[j - 1]["epoch_timestamp"],
                            tide_list[j]["epoch_timestamp"],
                            tide_list[j - 1]["data"][0]["height"],
                            tide_list[j]["data"][0]["height"],
                        )
                        if results[0]["category"] == Category.DEPTH:
                            results[k]["data"][0]["depth"] = (
                                results[k]["data"][0]["depth"] - _result)
                        elif results[0]["category"] == Category.USBL:
                            results[k]["data_target"][4]["depth"] = (
                                results[k]["data_target"][4]["depth"] -
                                _result)
        data_list.append(results)

    Console.info("...done compiling data list.")

    Console.info("Writing to output file...")
    data_list_temp = []
    for i in data_list:
        data_list_temp += i

    # create file (overwrite if exists)
    nav_file = outpath / filename
    with nav_file.open("w") as fileout:
        json.dump(data_list_temp, fileout, indent=2)
    fileout.close()
    Console.info("...done writing to output file.")

    del data_list_temp
    del data_list

    # interlace the data based on timestamps
    Console.info("Interlacing data...")
    parse_interlacer(outpath, filename)
    Console.info("...done interlacing data. Output saved to",
                 outpath / filename)
    plot_parse_data(outpath, ftype)
    Console.info("Complete parse data")
def parse_autosub(mission, vehicle, category, ftype, outpath):
    # parser meta data
    sensor_string = "autosub"
    category = category
    output_format = ftype
    filename = mission.velocity.filename
    filepath = mission.velocity.filepath

    # autosub std models
    depth_std_factor = mission.depth.std_factor
    velocity_std_factor = mission.velocity.std_factor
    velocity_std_offset = mission.velocity.std_offset
    orientation_std_offset = mission.orientation.std_offset
    altitude_std_factor = mission.altitude.std_factor
    headingoffset = vehicle.dvl.yaw

    body_velocity = BodyVelocity(velocity_std_factor, velocity_std_offset,
                                 headingoffset)
    orientation = Orientation(headingoffset, orientation_std_offset)
    depth = Depth(depth_std_factor)
    altitude = Altitude(altitude_std_factor)

    body_velocity.sensor_string = sensor_string
    orientation.sensor_string = sensor_string
    depth.sensor_string = sensor_string
    altitude.sensor_string = sensor_string

    path = get_raw_folder(outpath / ".." / filepath / filename)

    autosub_log = loadmat(str(path))
    autosub_adcp = autosub_log["missionData"]["ADCPbin00"]
    autosub_ins = autosub_log["missionData"]["INSAttitude"]
    autosub_dep_ctl = autosub_log["missionData"]["DepCtlNode"]
    autosub_adcp_log1 = autosub_log["missionData"]["ADCPLog_1"]

    data_list = []
    if category == Category.VELOCITY:
        Console.info("... parsing autosub velocity")
        previous_timestamp = 0
        for i in range(len(autosub_adcp["eTime"])):
            body_velocity.from_autosub(autosub_adcp, i)
            data = body_velocity.export(output_format)
            if body_velocity.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = body_velocity.epoch_timestamp
    if category == Category.ORIENTATION:
        Console.info("... parsing autosub orientation")
        previous_timestamp = 0
        for i in range(len(autosub_ins["eTime"])):
            orientation.from_autosub(autosub_ins, i)
            data = orientation.export(output_format)
            if orientation.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = orientation.epoch_timestamp
    if category == Category.DEPTH:
        Console.info("... parsing autosub depth")
        previous_timestamp = 0
        for i in range(len(autosub_dep_ctl["eTime"])):
            depth.from_autosub(autosub_dep_ctl, i)
            data = depth.export(output_format)
            if depth.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = depth.epoch_timestamp
    if category == Category.ALTITUDE:
        Console.info("... parsing autosub altitude")
        previous_timestamp = 0
        for i in range(len(autosub_adcp_log1["eTime"])):
            altitude.from_autosub(autosub_adcp_log1, i)
            data = altitude.export(output_format)
            if altitude.epoch_timestamp > previous_timestamp:
                data_list.append(data)
            else:
                data_list[-1] = data
            previous_timestamp = altitude.epoch_timestamp
    return data_list
Exemple #17
0
def parse_usbl_dump(mission, vehicle, category, ftype, outpath):
    # parser meta data
    class_string = "measurement"
    sensor_string = "jamstec_usbl"
    frame_string = "inertial"

    # gaps std models
    distance_std_factor = mission.usbl.std_factor
    distance_std_offset = mission.usbl.std_offset

    timezone = mission.usbl.timezone
    timeoffset = mission.usbl.timeoffset
    filepath = mission.usbl.filepath
    filename = mission.usbl.filename
    label = mission.usbl.label

    filepath = get_raw_folder(outpath / ".." / filepath)

    latitude_reference = mission.origin.latitude
    longitude_reference = mission.origin.longitude

    # read in timezone
    timezone_offset = read_timezone(timezone)

    # convert to seconds from utc
    # timeoffset = -timezone_offset*60*60 + timeoffset

    # extract data from files
    Console.info("... parsing usbl dump")
    data_list = []
    if ftype == "acfr":
        data_list = ""
    usbl_file = filepath / filename
    with usbl_file.open("r", encoding="utf-8", errors="ignore") as filein:

        for line in filein.readlines():
            line_split = line.strip().split(",")

            if line_split[2] == label:

                date = line_split[0].split("-")

                # read in date
                yyyy = int(date[0])
                mm = int(date[1])
                dd = int(date[2])

                timestamp = line_split[1].split(":")

                # read in time
                hour = int(timestamp[0])
                mins = int(timestamp[1])
                secs = int(timestamp[2])
                msec = 0

                epoch_time = date_time_to_epoch(
                    yyyy, mm, dd, hour, mins, secs, timezone_offset
                )
                # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs)
                # time_tuple = dt_obj.timetuple()
                # epoch_time = time.mktime(time_tuple)
                epoch_timestamp = epoch_time + msec / 1000 + timeoffset

                if line_split[6] != "":

                    # get position
                    latitude_full = line_split[6].split(" ")
                    latitude_list = latitude_full[0].split("-")
                    latitude_degrees = int(latitude_list[0])
                    latitude_minutes = float(latitude_list[1])

                    if latitude_full[1] != "N":
                        latitude_degrees = latitude_degrees * -1  # southern hemisphere

                    latitude = latitude_degrees + latitude_minutes / 60

                    longitude_full = line_split[7].split(" ")
                    longitude_list = longitude_full[0].split("-")
                    longitude_degrees = int(longitude_list[0])
                    longitude_minutes = float(longitude_list[1])

                    if longitude_full[1] != "E":
                        longitude_degrees = (
                            longitude_degrees * -1
                        )  # southern hemisphere

                    longitude = longitude_degrees + longitude_minutes / 60

                    depth_full = line_split[8].split("=")
                    depth = float(depth_full[1])

                    distance_full = line_split[11].split("=")
                    distance = float(distance_full[1])

                    distance_std = distance_std_factor * distance + distance_std_offset

                    lateral_distance_full = line_split[9].split("=")
                    lateral_distance = float(lateral_distance_full[1])

                    bearing_full = line_split[10].split("=")
                    bearing = float(bearing_full[1])

                    # determine uncertainty in terms of latitude and longitude
                    latitude_offset, longitude_offset = metres_to_latlon(
                        latitude, longitude, distance_std, distance_std
                    )

                    latitude_std = latitude - latitude_offset
                    longitude_std = longitude - longitude_offset

                    # calculate in metres from reference
                    eastings_ship = 0
                    northings_ship = 0
                    latitude_ship = 0
                    longitude_ship = 0
                    heading_ship = 0

                    lateral_distance_target, bearing_target = latlon_to_metres(
                        latitude, longitude, latitude_reference, longitude_reference,
                    )
                    eastings_target = (
                        math.sin(bearing_target * math.pi / 180.0)
                        * lateral_distance_target
                    )
                    northings_target = (
                        math.cos(bearing_target * math.pi / 180.0)
                        * lateral_distance_target
                    )

                    if ftype == "oplab":
                        data = {
                            "epoch_timestamp": float(epoch_timestamp),
                            "class": class_string,
                            "sensor": sensor_string,
                            "frame": frame_string,
                            "category": category,
                            "data_ship": [
                                {
                                    "latitude": float(latitude_ship),
                                    "longitude": float(longitude_ship),
                                },
                                {
                                    "northings": float(northings_ship),
                                    "eastings": float(eastings_ship),
                                },
                                {"heading": float(heading_ship)},
                            ],
                            "data_target": [
                                {
                                    "latitude": float(latitude),
                                    "latitude_std": float(latitude_std),
                                },
                                {
                                    "longitude": float(longitude),
                                    "longitude_std": float(longitude_std),
                                },
                                {
                                    "northings": float(northings_target),
                                    "northings_std": float(distance_std),
                                },
                                {
                                    "eastings": float(eastings_target),
                                    "eastings_std": float(distance_std),
                                },
                                {
                                    "depth": float(depth),
                                    "depth_std": float(distance_std),
                                },
                                {"distance_to_ship": float(distance)},
                            ],
                        }
                        data_list.append(data)

                    if ftype == "acfr":
                        data = (
                            "SSBL_FIX: "
                            + str(float(epoch_timestamp))
                            + " ship_x: "
                            + str(float(northings_ship))
                            + " ship_y: "
                            + str(float(eastings_ship))
                            + " target_x: "
                            + str(float(northings_target))
                            + " target_y: "
                            + str(float(eastings_target))
                            + " target_z: "
                            + str(float(depth))
                            + " target_hr: "
                            + str(float(lateral_distance))
                            + " target_sr: "
                            + str(float(distance))
                            + " target_bearing: "
                            + str(float(bearing))
                            + "\n"
                        )
                        data_list += data

    return data_list
def parse_acfr_images(mission, vehicle, category, ftype, outpath):
    # parser meta data
    class_string = "measurement"
    frame_string = "body"
    category = "image"
    sensor_string = "acfr_standard"

    timezone = mission.image.timezone
    timeoffset = mission.image.timeoffset
    filepath = mission.image.cameras[0].path
    camera1_label = mission.image.cameras[0].name
    camera2_label = mission.image.cameras[1].name

    # read in timezone
    if isinstance(timezone, str):
        if timezone == "utc" or timezone == "UTC":
            timezone_offset = 0
        elif timezone == "jst" or timezone == "JST":
            timezone_offset = 9
    else:
        try:
            timezone_offset = float(timezone)
        except ValueError:
            print(
                "Error: timezone",
                timezone,
                "in mission.cfg not recognised, please enter value from UTC",
                "in hours",
            )
            return

    # convert to seconds from utc
    # timeoffset = -timezone_offset*60*60 + timeoffset

    Console.info("... parsing " + sensor_string + " images")

    # determine file paths

    filepath = get_raw_folder(outpath / ".." / filepath)
    all_list = os.listdir(str(filepath))

    camera1_filename = [
        line for line in all_list
        if camera1_label in line and ".txt" not in line and "._" not in line
    ]
    camera2_filename = [
        line for line in all_list
        if camera2_label in line and ".txt" not in line and "._" not in line
    ]

    data_list = []
    if ftype == "acfr":
        data_list = ""

    for i in range(len(camera1_filename)):
        epoch_timestamp = acfr_timestamp_from_filename(camera1_filename[i],
                                                       timezone_offset,
                                                       timeoffset)
        epoch_timestamp_camera1.append(str(epoch_timestamp))

    for i in range(len(camera2_filename)):
        epoch_timestamp = acfr_timestamp_from_filename(camera2_filename[i],
                                                       timezone_offset,
                                                       timeoffset)
        epoch_timestamp_camera2.append(str(epoch_timestamp))

    for i in range(len(camera1_filename)):
        # print(epoch_timestamp_camera1[i])
        values = []
        for j in range(len(camera2_filename)):
            # print(epoch_timestamp_camera2[j])
            values.append(
                abs(
                    float(epoch_timestamp_camera1[i]) -
                    float(epoch_timestamp_camera2[j])))
        (sync_difference, sync_pair) = min(
            (v, k) for k, v in enumerate(values))
        if sync_difference > tolerance:
            # Skip the pair
            continue
        if ftype == "oplab":
            data = {
                "epoch_timestamp":
                float(epoch_timestamp_camera1[i]),
                "class":
                class_string,
                "sensor":
                sensor_string,
                "frame":
                frame_string,
                "category":
                category,
                "camera1": [{
                    "epoch_timestamp":
                    float(epoch_timestamp_camera1[i]),
                    "filename":
                    str(filepath) + "/" + str(camera1_filename[i]),
                }],
                "camera2": [{
                    "epoch_timestamp":
                    float(epoch_timestamp_camera2[sync_pair]),
                    "filename":
                    str(filepath) + "/" + str(camera2_filename[sync_pair]),
                }],
            }
            data_list.append(data)
        if ftype == "acfr":
            data = ("VIS: " + str(float(epoch_timestamp_camera1[i])) + " [" +
                    str(float(epoch_timestamp_camera1[i])) + "] " +
                    str(camera1_filename[i]) + " exp: 0\n")
            # fileout.write(data)
            data_list += data
            data = ("VIS: " + str(float(epoch_timestamp_camera2[sync_pair])) +
                    " [" + str(float(epoch_timestamp_camera2[sync_pair])) +
                    "] " + str(camera2_filename[sync_pair]) + " exp: 0\n")
            # fileout.write(data)
            data_list += data

    return data_list
Exemple #19
0
def parse_biocam_images(mission, vehicle, category, ftype, outpath):
    # parser meta data
    class_string = "measurement"
    frame_string = "body"
    category = "image"
    sensor_string = "biocam"

    timezone = mission.image.timezone
    timezone_offset = 0
    timeoffset = mission.image.timeoffset
    filepath = mission.image.cameras[0].path
    camera1_label = mission.image.cameras[0].name
    camera2_label = mission.image.cameras[1].name

    # read in timezone
    if isinstance(timezone, str):
        if timezone == "utc" or timezone == "UTC":
            timezone_offset = 0
        elif timezone == "jst" or timezone == "JST":
            timezone_offset = 9
    else:
        try:
            timezone_offset = float(timezone)
        except ValueError:
            print(
                "Error: timezone",
                timezone,
                "in mission.cfg not recognised, please enter value from UTC",
                "in hours",
            )
            return

    # convert to seconds from utc
    # timeoffset = -timezone_offset*60*60 + timeoffset

    Console.info("... parsing " + sensor_string + " images")

    # determine file paths
    base_path = get_raw_folder(outpath / ".." / filepath)
    filepath1 = base_path / str(camera1_label + "_strobe/*.*")
    filepath1b = base_path / str(camera1_label + "_laser/**/*.*")
    filepath2 = base_path / str(camera2_label + "_strobe/*.*")
    filepath2b = base_path / str(camera2_label + "_laser/**/*.*")

    camera1_list = glob.glob(str(filepath1))
    camera1_list.extend(glob.glob(str(filepath1b), recursive=True))
    camera2_list = glob.glob(str(filepath2))
    camera2_list.extend(glob.glob(str(filepath2b), recursive=True))

    camera1_filename = [
        line for line in camera1_list
        if ".txt" not in line and "._" not in line
    ]
    camera2_filename = [
        line for line in camera2_list
        if ".txt" not in line and ".jpg" not in line
    ]
    camera3_filename = [line for line in camera2_list if ".jpg" in line]

    dive_folder = get_raw_folder(outpath / "..")
    camera1_relfilename = pathlist_relativeto(camera1_filename, dive_folder)
    camera2_relfilename = pathlist_relativeto(camera2_filename, dive_folder)
    camera3_relfilename = pathlist_relativeto(camera3_filename, dive_folder)

    Console.info("Found " + str(
        len(camera2_filename) + len(camera1_filename) +
        len(camera3_filename)) + " BioCam images!")

    data_list = []
    if ftype == "acfr":
        data_list = ""

    for i in range(len(camera1_filename)):
        t1, tc1 = biocam_timestamp_from_filename(
            Path(camera1_filename[i]).name, timezone_offset, timeoffset)
        stamp_pc1.append(str(t1))
        stamp_cam1.append(str(tc1))
    for i in range(len(camera2_filename)):
        t2, tc2 = biocam_timestamp_from_filename(
            Path(camera2_filename[i]).name, timezone_offset, timeoffset)
        stamp_pc2.append(str(t2))
        stamp_cam2.append(str(tc2))
    for i in range(len(camera1_filename)):
        values = []
        for j in range(len(camera2_filename)):
            values.append(abs(float(stamp_pc1[i]) - float(stamp_pc2[j])))

        (sync_difference, sync_pair) = min(
            (v, k) for k, v in enumerate(values))

        if sync_difference < tolerance:
            if ftype == "oplab":
                data = {
                    "epoch_timestamp":
                    float(stamp_pc1[i]),
                    "class":
                    class_string,
                    "sensor":
                    sensor_string,
                    "frame":
                    frame_string,
                    "category":
                    category,
                    "camera1": [{
                        "epoch_timestamp": float(stamp_pc1[i]),
                        # Duplicate for timestamp prediction purposes
                        "epoch_timestamp_cpu": float(stamp_pc1[i]),
                        "epoch_timestamp_cam": float(stamp_cam1[i]),
                        "filename": str(camera1_relfilename[i]),
                    }],
                    "camera2": [{
                        "epoch_timestamp":
                        float(stamp_pc2[sync_pair]),
                        # Duplicate for timestamp prediction purposes
                        "epoch_timestamp_cpu":
                        float(stamp_pc2[sync_pair]),
                        "epoch_timestamp_cam":
                        float(stamp_cam2[sync_pair]),
                        "filename":
                        str(camera2_relfilename[sync_pair]),
                    }],
                }
                data_list.append(data)
            if ftype == "acfr":
                data = ("VIS: " + str(float(stamp_pc1[i])) + " [" +
                        str(float(stamp_pc1[i])) + "] " +
                        str(camera1_relfilename[i]) + " exp: 0\n")
                # fileout.write(data)
                data_list += data
                data = ("VIS: " + str(float(stamp_pc2[sync_pair])) + " [" +
                        str(float(stamp_pc2[sync_pair])) + "] " +
                        str(camera2_relfilename[sync_pair]) + " exp: 0\n")
                # fileout.write(data)
                data_list += data
    for i in range(len(camera3_filename)):
        t3, tc3 = biocam_timestamp_from_filename(
            Path(camera3_filename[i]).name, timezone_offset, timeoffset)
        data = {
            "epoch_timestamp":
            float(t3),
            "class":
            class_string,
            "sensor":
            sensor_string,
            "frame":
            frame_string,
            "category":
            "laser",
            "camera3": [{
                "epoch_timestamp": float(t3),
                # Duplicate for timestamp prediction purposes
                "epoch_timestamp_cpu": float(t3),
                "epoch_timestamp_cam": float(tc3),
                "filename": str(camera3_relfilename[i]),
            }],
        }
        data_list.append(data)
    return data_list