def calculate_attenuation_parameters(images, distances, image_height,
                                     image_width, image_channels):
    """Compute attenuation parameters for all images

    Parameters
    -----------
    images : numpy.ndarray
        image memmap reshaped as a vector
    distances : numpy.ndarray
        distance memmap reshaped as a vector
    image_height : int
        height of an image
    image_width : int
        width of an image

    Returns
    -------
    numpy.ndarray
        attenuation_parameters
    """

    image_attenuation_parameters = np.empty(
        (image_channels, image_height, image_width, 3), dtype=np.float32)

    # Check available RAM and allocate threads accordingly
    mem = psutil.virtual_memory()
    available_bytes = mem.available  # in bytes
    required_bytes = image_channels * image_height * image_width * 4 * len(
        images)
    num_jobs = min(int(available_bytes / required_bytes), 12)

    # Keep one alive!
    cpus = psutil.cpu_count() - 1

    if num_jobs > cpus:
        num_jobs = cpus
    elif num_jobs <= 0:
        num_jobs = 1
        Console.warn("You might have not enough available RAM to continue.")

    if num_jobs < cpus - 1:
        Console.info("Assigning", num_jobs, "jobs to your CPU to save RAM")
    else:
        Console.info("Assigning", num_jobs, "jobs to your CPU")

    for i_channel in range(image_channels):
        with tqdm_joblib(
                tqdm(desc="Curve fitting", total=image_height * image_width)):
            results = joblib.Parallel(n_jobs=num_jobs, verbose=0)([
                joblib.delayed(curve_fitting)(distances[:, i_pixel],
                                              images[:, i_pixel, i_channel])
                for i_pixel in range(image_height * image_width)
            ])

            attenuation_parameters = np.array(results)
            attenuation_parameters = attenuation_parameters.reshape(
                [image_height, image_width, 3])
        image_attenuation_parameters[i_channel] = attenuation_parameters

    return image_attenuation_parameters
    def __init__(self, node):
        """__init__ is the constructor function

        Parameters
        -----------
        node : cdict
            dictionary object for an entry in correct_images.yaml file
        """
        valid_distance_metrics = ["uniform", "altitude", "depth_map"]
        self.distance_metric = node["distance_metric"]

        if self.distance_metric == "none":
            self.distance_metric = "uniform"
            Console.warn("Distance metric is set to none. This is deprecated.",
                "Please use uniform instead.")
        if self.distance_metric not in valid_distance_metrics:
            Console.error("Distance metric not valid. Please use one of the following:",
                valid_distance_metrics)
            Console.quit("Invalid distance metric: {}".format(self.distance_metric))

        self.metric_path = node["metric_path"]
        self.altitude_max = node["altitude_filter"]["max_m"]
        self.altitude_min = node["altitude_filter"]["min_m"]
        self.smoothing = node["smoothing"]
        self.window_size = node["window_size"]
        self.outlier_reject = node["curve_fitting_outlier_rejection"]
Example #3
0
def write_csv(
    csv_filepath: Path,
    data_list: Union[List[BodyVelocity], List[Orientation], List[Depth],
                     List[Altitude], List[Usbl], List[Tide], List[Other],
                     List[Camera], List[SyncedOrientationBodyVelocity], ],
    csv_filename: str,
    csv_flag: Optional[bool] = True,
    mutex: Optional[Lock] = None,
):
    if csv_flag is True and len(data_list) > 1:
        csv_file = Path(csv_filepath)

        if not csv_file.exists():
            if mutex is not None:
                mutex.acquire()
            csv_file.mkdir(parents=True, exist_ok=True)
            if mutex is not None:
                mutex.release()

        Console.info("Writing outputs to {}.csv ...".format(csv_filename))
        file = csv_file / "{}.csv".format(csv_filename)
        covariance_file = csv_file / "{}_cov.csv".format(csv_filename)

        fileout = None
        fileout_cov = None
        if len(data_list) > 0:
            fileout = file.open("w")
            # write headers
            str_to_write = data_list[0].get_csv_header()
            fileout.write(str_to_write)
            if hasattr(data_list[0], "covariance") and hasattr(
                    data_list[0], "get_csv_header_cov"):
                if data_list[0].covariance is not None:
                    fileout_cov = covariance_file.open("w")
                    str_to_write_cov = data_list[0].get_csv_header_cov()
                    fileout_cov.write(str_to_write_cov)

            # Loop for each line in csv
            for i in range(len(data_list)):
                try:
                    str_to_write = data_list[i].to_csv_row()
                    if fileout_cov is not None:
                        str_to_write_cov = data_list[i].to_csv_cov_row()
                        fileout_cov.write(str_to_write_cov)
                    fileout.write(str_to_write)
                except IndexError:
                    Console.error(
                        "There is something wrong with camera filenames and \
                        indexing for the file",
                        csv_filename,
                    )
                    Console.quit("Check write_csv function.")
            fileout.close()
            if fileout_cov is not None:
                fileout_cov.close()
            Console.info("... done writing {}.csv.".format(csv_filename))
        else:
            Console.warn("Empty data list {}".format(str(csv_filename)))
Example #4
0
    def test_console(self):
        with patch.object(Console, "get_version", return_value="testing"):
            Console.warn("This is a warning")
            Console.error("This is an error message")
            Console.info("This is an informative message")
            Console.banner()

            Console.get_username()
            Console.get_hostname()
            Console.get_date()
            Console.get_version()
            for i in range(1, 10):
                Console.progress(i, 10)
    def line_is_valid(self, line, line_split):
        start_or_heading = (line[0] == PhinsHeaders.START
                            or line[0] == PhinsHeaders.HEADING)
        if len(line_split) == 2 and start_or_heading:
            # Get timestamp
            # Do a check sum as a lot of broken packets are found in phins data
            check_sum = str(line_split[1])

            # extract from $ to * as per phins manual
            string_to_check = ",".join(line)
            string_to_check = string_to_check[1:len(
                string_to_check)]  # noqa E203
            string_sum = 0

            for i in range(len(string_to_check)):
                string_sum ^= ord(string_to_check[i])

            if str(hex(string_sum)[2:].zfill(2).upper()) == check_sum.upper():
                return True

            else:
                Console.warn("Broken packet: " + str(line))
                Console.warn("Check sum calculated " +
                             str(hex(string_sum).zfill(2).upper()))
                Console.warn("Does not match that provided " +
                             str(check_sum.upper()))
                Console.warn("Ignore and move on")
        return False
def determine_extension_and_images_per_folder(folder_path, image_list, label):
    """Determine filename extension and number of images per subfolder

    The number of images per subfolder dertermines how the subfolders are
    named. The subfolder name is 3 letters long and either starts from the
    first (index = 0) or the second (index = 1) digit of the image number.

    :param folder_path: Path where images are stored
    :type  folder_path: pathlib.Path
    :param image_list:  list of image 7-digit zeropadded image numbers
    :type  image_list:  list of str
    :param label:       Camera label
    :type  label:       str

    :returns:
        -index_start_of_folder_name (`int`) - Index where subfolder name starts
        -extension (`str`) - Filename extension of images ("jpg" or "tif")
    """
    Console.info("    Determine filename extension and images per subfolder "
                 "of camera {}...".format(label))

    if build_image_path(folder_path, image_list[-1], 0, "jpg").is_file():
        index_start_of_folder_name = 0
        extension = "jpg"
        Console.info('    ...Filename extension: "{}", 10000 images '
                     "per subfolder.".format(extension))
    elif build_image_path(folder_path, image_list[-1], 1, "jpg").is_file():
        index_start_of_folder_name = 1
        extension = "jpg"
        Console.info('    ...Filename extension: "{}", 1000 images '
                     "per subfolder.".format(extension))
    elif build_image_path(folder_path, image_list[-1], 0, "tif").is_file():
        index_start_of_folder_name = 0
        extension = "tif"
        Console.info('    ...Filename extension: "{}", 10000 images '
                     "per subfolder.".format(extension))
    elif build_image_path(folder_path, image_list[-1], 1, "tif").is_file():
        index_start_of_folder_name = 1
        extension = "tif"
        Console.info('    ...Filename extension: "{}", 1000 images '
                     "per subfolder.".format(extension))
    else:
        index_start_of_folder_name = 0
        extension = "jpg"
        Console.warn("    ...Did not find images from camera {} in {}. "
                     'Default to using extension "{}" and 10000 images per '
                     "subfolder.".format(label, folder_path, extension))

    return index_start_of_folder_name, extension
Example #7
0
 def get_camera_idx(self):
     idx = [
         i
         for i, camera_config in enumerate(self.cameraconfigs)
         if camera_config.camera_name == self.camera_name
     ]
     if len(idx) > 0:
         return idx[0]
     else:
         Console.warn(
             "The camera",
             self.camera_name,
             "could not be found in the correct_images.yaml",
         )
         return None
 def epoch_timestamp_from_phins(self, line):
     epoch_timestamp = None
     time_string = str(line[2])
     if len(time_string) == 10:
         hour = int(time_string[0:2])
         mins = int(time_string[2:4])
         try:
             secs = int(time_string[4:6])
             # phins sometimes returns 60s...
             if secs < 60:
                 msec = int(time_string[7:10])
                 epoch_timestamp = self.get(hour, mins, secs, msec)
         except Exception as exc:
             Console.warn("Badly formatted packet (PHINS TIME): " +
                          time_string + " Exception: " + str(exc))
     else:
         Console.warn("Badly formatted packet (PHINS TIME): " + str(line))
     return epoch_timestamp
Example #9
0
def write_sidescan_csv(csv_filepath, data_list, csv_filename, csv_flag):
    if csv_flag:
        csv_file = Path(csv_filepath)
        if not csv_file.exists():
            csv_file.mkdir(parents=True, exist_ok=True)
        Console.info("Writing SSS outputs to {}.txt ...".format(csv_filename))
        file = csv_file / "{}.txt".format(csv_filename)
        if len(data_list) > 0:
            str_to_write = data_list[0].get_sidescan_header()
            with file.open("w") as fileout:
                fileout.write(str_to_write)
                for i in range(len(data_list)):
                    try:
                        str_to_write = data_list[i].to_sidescan_row()
                        fileout.write(str_to_write)
                    except IndexError:
                        break
            Console.info(
                "... done writing SSS outputs to {}.txt.".format(csv_filename))
        else:
            Console.warn("Empty data list {}".format(str(csv_filename)))
Example #10
0
 def clamp_rotation(self, rotation):
     # rotation = (rotation % 2*math.pi)
     if abs(rotation) > 3 * math.pi:
         Console.warn(
             "Absolute value of angle (",
             rotation,
             ") > 3*pi. "
             "This is not supposed to happen. This tends to happen when "
             "there are no sensor readings for long periods of time, "
             "either because there aren't any in this span of time, or due "
             "to the Mahalanobis Distance threshold being exceeded "
             "repeatedly. Check the covariance matrices and the sensor "
             "uncertainties used for the EKF (probably need to bigger), "
             "or, as a workaround, use a larger Mahalanobis Distance "
             "threshold.",
         )
     while rotation > math.pi:
         rotation -= 2 * math.pi
     while rotation < -math.pi:
         rotation += 2 * math.pi
     return rotation
def rescale_images(
    imagenames_list,
    image_directory,
    interpolate_method,
    target_pixel_size_m,
    dataframe,
    output_directory,
    f_x,
    f_y,
    maintain_pixels,
):
    Console.info("Rescaling images...")

    for idx in trange(len(imagenames_list)):
        image_name = imagenames_list[idx]
        source_image_path = Path(image_directory) / image_name
        output_image_path = Path(output_directory) / image_name
        image_path_list = dataframe["relative_path"]
        trimmed_path_list = [
            path for path in image_path_list if Path(path).stem in image_name
        ]
        trimmed_dataframe = dataframe.loc[dataframe["relative_path"].isin(
            trimmed_path_list)]
        altitude = trimmed_dataframe["altitude [m]"]
        if len(altitude) > 0:
            image = imageio.imread(source_image_path).astype("uint8")
            rescaled_image = rescale(
                image,
                interpolate_method,
                target_pixel_size_m,
                altitude,
                f_x,
                f_y,
                maintain_pixels,
            )
            imageio.imwrite(output_image_path, rescaled_image, format="PNG-FI")
        else:
            Console.warn("Did not get distance values for image: " +
                         image_name)
    def mono(self):
        for c in self.calibration_config["cameras"]:
            cam_name = c["name"]
            # Find if the calibration file exists
            calibration_file = self.output_path / str("mono_" + cam_name +
                                                      ".yaml")
            Console.info("Looking for a calibration file at " +
                         str(calibration_file))
            if calibration_file.exists() and not self.fo:
                Console.warn(
                    "The camera " + c["name"] +
                    " has already been calibrated. If you want to overwrite " +
                    "the JSON, use the -F flag.")
            else:
                Console.info(
                    "The camera is not calibrated, running mono calibration..."
                )
                filepaths = build_filepath(
                    get_processed_folder(self.filepath),
                    c["camera_calibration"]["path"],
                )

                if "glob_pattern" not in c["camera_calibration"]:
                    Console.error(
                        "Could not find the key glob_pattern for the camera ",
                        c["name"],
                    )
                    Console.quit("glob_pattern expected in calibration.yaml")

                calibrate_mono(
                    cam_name,
                    filepaths,
                    str(c["camera_calibration"]["glob_pattern"]),
                    self.calibration_config["camera_calibration"],
                    calibration_file,
                    self.fo,
                    self.foa,
                )
def collect_image_files(image_dirs, file_pattern):
    # TODO try all different formats: png, jpeg, tif
    images = []
    if isinstance(image_dirs, list):
        for d in image_dirs:
            if d.is_dir():
                images.extend(list(d.glob(file_pattern)))
            else:
                Console.warn("Directory " "'{}'" " cannot be found".format(d))
    else:
        if image_dirs.is_dir():
            images = list(image_dirs.glob(file_pattern))
        else:
            Console.warn("Directory "
                         "'{}'"
                         " cannot be found".format(image_dirs))
    images.sort()

    resolved_images = []
    for i in images:
        p = Path(i).resolve()
        resolved_images.append(p)
    return resolved_images
    def __init__(self, filepath, force_overwite=False, overwrite_all=False):
        filepath = Path(filepath).resolve()
        self.filepath = get_raw_folder(filepath)
        self.fo = force_overwite
        self.foa = overwrite_all

        if self.foa:
            self.fo = True

        self.configuration_path = (get_config_folder(self.filepath.parent) /
                                   "calibration")
        calibration_config_file = self.configuration_path / "calibration.yaml"
        if calibration_config_file.exists():
            Console.info("Loading existing calibration.yaml at {}".format(
                calibration_config_file))
        else:
            root = Path(__file__).parents[1]
            default_file = root / "auv_cal/default_yaml" / "default_calibration.yaml"
            Console.warn("Cannot find {}, generating default from {}".format(
                calibration_config_file, default_file))
            # save localisation yaml to processed directory
            default_file.copy(calibration_config_file)

            Console.warn("Edit the file at: \n\t" +
                         str(calibration_config_file))
            Console.warn(
                "Try to use relative paths to the calibration datasets")
            Console.quit(
                "Modify the file calibration.yaml and run this code again.")

        with calibration_config_file.open("r") as stream:
            self.calibration_config = yaml.safe_load(stream)

        # Create the calibration folder at the same level as the dives
        self.output_path = get_processed_folder(
            self.filepath.parent) / "calibration"
        if not self.output_path.exists():
            self.output_path.mkdir(parents=True)
        if not self.configuration_path.exists():
            self.configuration_path.mkdir(parents=True)
def parse_seaxerocks_images(mission, vehicle, category, ftype, outpath):
    data_list = []
    if ftype == "acfr":
        data_list = ""

    # parser meta data
    class_string = "measurement"
    frame_string = "body"
    category_stereo = "image"
    category_laser = "laser"
    sensor_string = "seaxerocks_3"

    timezone = mission.image.timezone
    timeoffset = mission.image.timeoffset
    camera1_filepath = mission.image.cameras[0].path
    camera2_filepath = mission.image.cameras[1].path
    camera3_filepath = mission.image.cameras[2].path
    camera1_label = mission.image.cameras[0].name
    camera2_label = mission.image.cameras[1].name
    camera3_label = mission.image.cameras[2].name

    epoch_timestamp_stereo = []
    epoch_timestamp_laser = []
    epoch_timestamp_camera1 = []
    epoch_timestamp_camera2 = []
    epoch_timestamp_camera3 = []
    stereo_index = []
    laser_index = []
    camera1_index = []
    camera2_index = []
    camera3_index = []
    camera1_filename = []
    camera2_filename = []
    camera3_filename = []

    camera1_serial = list(camera1_label)
    camera2_serial = list(camera2_label)
    camera3_serial = list(camera3_label)

    for i in range(1, len(camera1_label)):
        if camera1_label[i] == "/":
            camera1_serial[i] = "_"

    for i in range(1, len(camera2_label)):
        if camera2_label[i] == "/":
            camera2_serial[i] = "_"

    for i in range(1, len(camera3_label)):
        if camera3_label[i] == "/":
            camera3_serial[i] = "_"

    camera1_serial = "".join(camera1_serial)
    camera2_serial = "".join(camera2_serial)
    camera3_serial = "".join(camera3_serial)

    i = 0
    # read in timezone
    # TODO change ALL timezones to integers
    if isinstance(timezone, str):
        if timezone == "utc" or timezone == "UTC":
            timezone_offset = 0
        elif timezone == "jst" or timezone == "JST":
            timezone_offset = 9
    else:
        try:
            timezone_offset = float(timezone)
        except ValueError:
            print(
                "Error: timezone",
                timezone,
                "in mission.cfg not recognised, \
                  please enter value from UTC in hours",
            )
            return

    # convert to seconds from utc
    # timeoffset = -timezone_offset*60*60 + timeoffset

    Console.info("  Parsing " + sensor_string + " images...")

    cam1_path = get_raw_folder(outpath / ".." / camera1_filepath / "..")
    cam1_filetime = cam1_path / "FileTime.csv"

    with cam1_filetime.open("r", encoding="utf-8", errors="ignore") as filein:
        for line in filein.readlines():
            stereo_index_timestamps = line.strip().split(",")

            index_string = stereo_index_timestamps[0]
            date_string = stereo_index_timestamps[1]
            time_string = stereo_index_timestamps[2]
            ms_time_string = stereo_index_timestamps[3]

            # read in date
            if date_string != "date":  # ignore header
                stereo_index.append(index_string)
                if len(date_string) != 8:
                    Console.warn(
                        "Date string ({}) in FileTime.csv file has "
                        "unexpected length. Expected length: 8.".format(
                            date_string))
                yyyy = int(date_string[0:4])
                mm = int(date_string[4:6])
                dd = int(date_string[6:8])

                # read in time
                if len(time_string) != 6:
                    Console.warn(
                        "Time string ({}) in FileTime.csv file has "
                        "unexpected length. Expected length: 6.".format(
                            time_string))
                hour = int(time_string[0:2])
                mins = int(time_string[2:4])
                secs = int(time_string[4:6])
                msec = int(ms_time_string[0:3])

                epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs,
                                                timezone_offset)

                epoch_timestamp_stereo.append(
                    float(epoch_time + msec / 1000 + timeoffset))

    camera1_list = ["{}.raw".format(i) for i in stereo_index]
    camera2_list = ["{}.raw".format(i) for i in stereo_index]

    for i in range(len(camera1_list)):
        camera1_image = camera1_list[i].split(".")
        camera2_image = camera2_list[i].split(".")

        camera1_index.append(camera1_image[0])
        camera2_index.append(camera2_image[0])

    j = 0
    for i in range(len(camera1_list)):
        # find corresponding timestamp even if some images are deletec
        if camera1_index[i] == stereo_index[j]:
            epoch_timestamp_camera1.append(epoch_timestamp_stereo[j])
            epoch_timestamp_camera2.append(epoch_timestamp_stereo[j])
            date = epoch_to_day(epoch_timestamp_stereo[0])
            if ftype == "acfr":
                camera1_filename.append("sx3_" + date[2:4] + date[5:7] +
                                        date[8:10] + "_image" +
                                        str(camera1_index[i]) + "_FC.png")
                camera2_filename.append("sx3_" + date[2:4] + date[5:7] +
                                        date[8:10] + "_image" +
                                        str(camera2_index[i]) + "_AC.png")
            j = j + 1
        elif stereo_index[j] > camera1_index[i]:
            j = j + 1
        else:
            j = j - 1

    if ftype == "oplab":
        camera1_filename = [line for line in camera1_list]
        camera2_filename = [line for line in camera2_list]

    for i in range(len(camera1_list)):
        if ftype == "acfr":
            data = ("VIS: " + str(float(epoch_timestamp_camera1[i])) + " [" +
                    str(float(epoch_timestamp_camera1[i])) + "] " +
                    str(camera1_filename[i]) + " exp: 0\n")
            data_list += data
            data = ("VIS: " + str(float(epoch_timestamp_camera2[i])) + " [" +
                    str(float(epoch_timestamp_camera2[i])) + "] " +
                    str(camera2_filename[i]) + " exp: 0\n")
            data_list += data

        if ftype == "oplab":
            data = {
                "epoch_timestamp":
                float(epoch_timestamp_camera1[i]),
                "class":
                class_string,
                "sensor":
                sensor_string,
                "frame":
                frame_string,
                "category":
                category_stereo,
                "camera1": [{
                    "epoch_timestamp":
                    float(epoch_timestamp_camera1[i]),
                    "serial":
                    camera1_serial,
                    "filename":
                    str(camera1_filepath + "/" + camera1_filename[i]),
                }],
                "camera2": [{
                    "epoch_timestamp":
                    float(epoch_timestamp_camera2[i]),
                    "serial":
                    camera2_serial,
                    "filename":
                    str(camera2_filepath + "/" + camera2_filename[i]),
                }],
            }
            data_list.append(data)

    cam3_path = get_raw_folder(outpath / ".." / camera3_filepath)
    cam3_filetime = cam3_path / "FileTime.csv"
    with cam3_filetime.open("r", encoding="utf-8", errors="ignore") as filein:
        for line in filein.readlines():
            laser_index_timestamps = line.strip().split(",")

            if len(laser_index_timestamps) < 4:
                Console.warn("The laser FileTime.csv is apparently corrupt...")
                continue
            index_string = laser_index_timestamps[0]
            date_string = laser_index_timestamps[1]
            time_string = laser_index_timestamps[2]
            ms_time_string = laser_index_timestamps[3]

            # read in date
            if date_string != "date":  # ignore header
                laser_index.append(index_string)

                yyyy = int(date_string[0:4])
                mm = int(date_string[4:6])
                dd = int(date_string[6:8])

                # read in time
                hour = int(time_string[0:2])
                mins = int(time_string[2:4])
                secs = int(time_string[4:6])
                msec = int(ms_time_string[0:3])

                epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs,
                                                timezone_offset)

                epoch_timestamp_laser.append(
                    float(epoch_time + msec / 1000 + timeoffset))

    # try use pandas for all parsers, should be faster
    camera3_list = ["{}".format(i) for i in laser_index]

    # The LM165 images are saved either as jpg or as tif, and are split into
    # subfolders either at every 1000 or every 10000 images. Find out which
    # convention is used in current dataset by looking at the files.
    if len(camera3_list) > 0:
        s, extension = determine_extension_and_images_per_folder(
            cam3_path, camera3_list, camera3_label)

    for i in range(len(camera3_list)):
        camera3_filename.append("{}/image{}.{}".format(
            camera3_list[i][s:(s + 3)],
            camera3_list[i],
            extension,  # noqa: E203
        ))
        camera3_index.append(camera3_list[i])

    j = 0
    # original comment: find corresponding timestamp even if some images are
    # deleted
    for i in range(len(camera3_filename)):
        if camera3_index[i] == laser_index[j]:
            epoch_timestamp_camera3.append(epoch_timestamp_laser[j])
            j = j + 1
        # Jin: incomplete? it means that laser data is missing for this image
        # file, so no epoch_timestamp data, and do what when this happens?
        elif laser_index[j] > camera3_index[i]:
            j = j + 1
        else:
            # Jin: incomplete and possibly wrong? it means that this laser
            # data is extra, with no accompanying image file, so it should be
            # j+1 till index match?
            j = j - 1

        if ftype == "oplab":
            data = {
                "epoch_timestamp": float(epoch_timestamp_camera3[i]),
                "class": class_string,
                "sensor": sensor_string,
                "frame": frame_string,
                "category": category_laser,
                "serial": camera3_serial,
                "filename": camera3_filepath + "/" + str(camera3_filename[i]),
            }
            data_list.append(data)

    Console.info("  ...done parsing " + sensor_string + " images.")

    return data_list
Example #16
0
def oplab_to_acfr(args):
    if not args.dive_folder:
        Console.error("No dive folder provided. Exiting...")
        Console.quit("Missing comandline arguments")
    if not args.output_folder:
        Console.error("No output folder provided. Exiting...")
        Console.quit("Missing comandline arguments")

    output_folder = get_processed_folder(args.output_folder)
    vf = output_folder / "vehicle_pose_est.data"
    sf = output_folder / "stereo_pose_est.data"
    if (vf.exists() or sf.exists()) and not args.force:
        Console.error(
            "Output folder already exists. Use -F to overwrite. Exiting...")
        Console.quit("Default behaviour: not to overwrite results")

    Console.info("Loading mission.yaml")
    path_processed = get_processed_folder(args.dive_folder)
    mission_file = path_processed / "mission.yaml"
    mission = Mission(mission_file)

    vehicle_file = path_processed / "vehicle.yaml"
    vehicle = Vehicle(vehicle_file)

    origin_lat = mission.origin.latitude
    origin_lon = mission.origin.longitude

    json_list = list(path_processed.glob("json_*"))
    if len(json_list) == 0:
        Console.quit(
            "No navigation solution could be found. Please run ",
            "auv_nav parse and process first",
        )
    navigation_path = path_processed / json_list[0]
    # Try if ekf exists:
    full_navigation_path = navigation_path / "csv" / "ekf"

    vpw = AcfrVehiclePoseWriter(vf, origin_lat, origin_lon)
    vehicle_navigation_file = full_navigation_path / "auv_ekf_centre.csv"
    dataframe = pd.read_csv(vehicle_navigation_file)
    dr_list = []
    for _, row in dataframe.iterrows():
        sb = SyncedOrientationBodyVelocity()
        sb.from_df(row)
        dr_list.append(sb)
    vpw.write(dr_list)
    Console.info("Vehicle pose written to", vf)

    if not mission.image.cameras:
        Console.warn("No cameras found in the mission file.")
        return
    if len(mission.image.cameras) == 1:
        Console.error("Only one camera found in the mission file. Exiting...")
        Console.quit("ACFR stereo pose est requires two cameras.")

    camera0_name = mission.image.cameras[0].name
    camera1_name = mission.image.cameras[1].name

    camera0_nav_file = full_navigation_path / ("auv_ekf_" + camera0_name +
                                               ".csv")
    dataframe = pd.read_csv(camera0_nav_file)
    cam1_list = []
    for _, row in dataframe.iterrows():
        sb = Camera()
        sb.from_df(row)
        cam1_list.append(sb)

    camera1_nav_file = full_navigation_path / ("auv_ekf_" + camera1_name +
                                               ".csv")

    dataframe = pd.read_csv(camera1_nav_file)
    cam2_list = []
    for _, row in dataframe.iterrows():
        sb = Camera()
        sb.from_df(row)
        cam2_list.append(sb)

    spw = AcfrStereoPoseWriter(sf, origin_lat, origin_lon)

    spw.write(cam1_list, cam2_list)
    Console.info("Stereo pose written to", sf)

    Console.info("Generating combined.RAW")
    nav_standard_file = path_processed / "nav" / "nav_standard.json"
    nav_standard_file = get_processed_folder(nav_standard_file)
    Console.info("Loading json file {}".format(nav_standard_file))

    with nav_standard_file.open("r") as nav_standard:
        parsed_json_data = json.load(nav_standard)

    start_datetime = ""
    finish_datetime = ""

    # setup start and finish date time
    if start_datetime == "":
        epoch_start_time = epoch_from_json(parsed_json_data[1])
        start_datetime = epoch_to_datetime(epoch_start_time)
    else:
        epoch_start_time = string_to_epoch(start_datetime)
    if finish_datetime == "":
        epoch_finish_time = epoch_from_json(parsed_json_data[-1])
        finish_datetime = epoch_to_datetime(epoch_finish_time)
    else:
        epoch_finish_time = string_to_epoch(finish_datetime)

    sensors_std = {
        "usbl": {
            "model": "sensor"
        },
        "dvl": {
            "model": "sensor"
        },
        "depth": {
            "model": "sensor"
        },
        "orientation": {
            "model": "sensor"
        },
    }

    exporter = AcfrCombinedRawWriter(mission, vehicle, output_folder)

    # read in data from json file
    # i here is the number of the data packet
    for i in range(len(parsed_json_data)):
        Console.progress(i, len(parsed_json_data))
        epoch_timestamp = parsed_json_data[i]["epoch_timestamp"]
        if epoch_timestamp >= epoch_start_time and epoch_timestamp <= epoch_finish_time:
            if "velocity" in parsed_json_data[i]["category"]:
                if "body" in parsed_json_data[i]["frame"]:
                    # to check for corrupted data point which have inertial
                    # frame data values
                    if "epoch_timestamp_dvl" in parsed_json_data[i]:
                        # confirm time stamps of dvl are aligned with main
                        # clock (within a second)
                        if (abs(parsed_json_data[i]["epoch_timestamp"] -
                                parsed_json_data[i]["epoch_timestamp_dvl"])
                            ) < 1.0:
                            velocity_body = BodyVelocity()
                            velocity_body.from_json(parsed_json_data[i],
                                                    sensors_std["dvl"])
                            exporter.add(velocity_body)
                if "inertial" in parsed_json_data[i]["frame"]:
                    velocity_inertial = InertialVelocity()
                    velocity_inertial.from_json(parsed_json_data[i])
                    exporter.add(velocity_inertial)

            if "orientation" in parsed_json_data[i]["category"]:
                orientation = Orientation()
                orientation.from_json(parsed_json_data[i],
                                      sensors_std["orientation"])
                exporter.add(orientation)

            if "depth" in parsed_json_data[i]["category"]:
                depth = Depth()
                depth.from_json(parsed_json_data[i], sensors_std["depth"])
                exporter.add(depth)

            if "altitude" in parsed_json_data[i]["category"]:
                altitude = Altitude()
                altitude.from_json(parsed_json_data[i])
                exporter.add(altitude)

            if "usbl" in parsed_json_data[i]["category"]:
                usbl = Usbl()
                usbl.from_json(parsed_json_data[i], sensors_std["usbl"])
                exporter.add(usbl)

            if "image" in parsed_json_data[i]["category"]:
                camera1 = Camera()
                # LC
                camera1.from_json(parsed_json_data[i], "camera1")
                exporter.add(camera1)
                camera2 = Camera()
                camera2.from_json(parsed_json_data[i], "camera2")
                exporter.add(camera2)
Example #17
0
def parse_gaps(mission, vehicle, category, ftype, outpath):

    Console.info("  Parsing GAPS data...")

    # parser meta data
    class_string = "measurement"
    sensor_string = "gaps"
    frame_string = "inertial"

    timezone = mission.usbl.timezone
    timeoffset = mission.usbl.timeoffset
    filepath = mission.usbl.filepath
    usbl_id = mission.usbl.label
    latitude_reference = mission.origin.latitude
    longitude_reference = mission.origin.longitude

    # define headers used in phins
    header_absolute = "$PTSAG"  # '<< $PTSAG' #georeferenced strings
    header_heading = "$HEHDT"  # '<< $HEHDT'

    # gaps std models
    distance_std_factor = mission.usbl.std_factor
    distance_std_offset = mission.usbl.std_offset
    broken_packet_flag = False

    # read in timezone
    timezone_offset = read_timezone(timezone)

    # convert to seconds from utc
    # timeoffset = -timezone_offset*60*60 + timeoffset

    # determine file paths
    path = (outpath / ".." / filepath).absolute()
    filepath = get_raw_folder(path)
    all_list = os.listdir(str(filepath))
    gaps_list = [line for line in all_list if ".dat" in line]
    Console.info("  " + str(len(gaps_list)) + " GAPS file(s) found")

    # extract data from files
    data_list = []
    if ftype == "acfr":
        data_list = ""
    for i in range(len(gaps_list)):
        path_gaps = filepath / gaps_list[i]

        with path_gaps.open("r", errors="ignore") as gaps:
            # initialise flag
            flag_got_time = 0
            for line in gaps.readlines():
                line_split = line.strip().split("*")
                line_split_no_checksum = line_split[0].strip().split(",")
                broken_packet_flag = False
                # print(line_split_no_checksum)
                # keep on upating ship position to find the prior interpolation
                #  value of ship coordinates
                # line_split_no_checksum[0] == header_absolute:
                if header_absolute in line_split_no_checksum[0]:
                    # start with a ship coordinate
                    if line_split_no_checksum[6] == str(
                            usbl_id) and flag_got_time == 2:
                        if (line_split_no_checksum[11] == "F"
                                and line_split_no_checksum[13] == "1"):
                            # read in date
                            yyyy = int(line_split_no_checksum[5])
                            mm = int(line_split_no_checksum[4])
                            dd = int(line_split_no_checksum[3])

                            # read in time
                            time_string = str(line_split_no_checksum[2])

                            try:
                                hour = int(time_string[0:2])
                                mins = int(time_string[2:4])
                                secs = int(time_string[4:6])
                                msec = int(time_string[7:10])
                            except ValueError:
                                broken_packet_flag = True
                                pass

                            if secs >= 60:
                                mins += 1
                                secs = 0
                                broken_packet_flag = True

                            if mins >= 60:
                                hour += 1
                                mins = 0
                                broken_packet_flag = True

                            if hour >= 24:
                                dd += 1
                                hour = 0
                                broken_packet_flag = True

                            epoch_time = date_time_to_epoch(
                                yyyy, mm, dd, hour, mins, secs,
                                timezone_offset)

                            # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs)
                            # time_tuple = dt_obj.timetuple()
                            # epoch_time = time.mktime(time_tuple)
                            epoch_timestamp = epoch_time + msec / 1000 + timeoffset

                            # get position
                            latitude_negative_flag = False
                            longitude_negative_flag = False
                            latitude_string = line_split_no_checksum[7]
                            latitude_degrees = int(latitude_string[0:2])
                            latitude_minutes = float(latitude_string[2:10])
                            if line_split_no_checksum[8] == "S":
                                latitude_negative_flag = True

                            longitude_string = line_split_no_checksum[9]
                            longitude_degrees = int(longitude_string[0:3])
                            longitude_minutes = float(longitude_string[3:11])
                            if line_split_no_checksum[10] == "W":
                                longitude_negative_flag = True

                            depth = float(line_split_no_checksum[12])

                            latitude = latitude_degrees + latitude_minutes / 60.0
                            longitude = longitude_degrees + longitude_minutes / 60.0

                            if latitude_negative_flag:
                                latitude = -latitude
                            if longitude_negative_flag:
                                longitude = -longitude

                            # flag raised to proceed
                            flag_got_time = 3
                        else:
                            flag_got_time = 0

                    if line_split_no_checksum[6] == "0":
                        if flag_got_time < 3:

                            # read in date

                            yyyy = int(line_split_no_checksum[5])
                            mm = int(line_split_no_checksum[4])
                            dd = int(line_split_no_checksum[3])

                            # print(yyyy,mm,dd)
                            # read in time
                            time_string = str(line_split_no_checksum[2])
                            # print(time_string)
                            hour = int(time_string[0:2])
                            mins = int(time_string[2:4])
                            secs = int(time_string[4:6])

                            try:
                                msec = int(time_string[7:10])
                            except ValueError:
                                broken_packet_flag = True
                                pass

                            if secs >= 60:
                                mins += 1
                                secs = 0
                                broken_packet_flag = True

                            if mins >= 60:
                                hour += 1
                                mins = 0
                                broken_packet_flag = True

                            if hour >= 24:
                                dd += 1
                                hour = 0
                                broken_packet_flag = True

                            epoch_time = date_time_to_epoch(
                                yyyy, mm, dd, hour, mins, secs,
                                timezone_offset)
                            # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs)
                            # time_tuple = dt_obj.timetuple()
                            # epoch_time = time.mktime(time_tuple)
                            epoch_timestamp_ship_prior = (epoch_time +
                                                          msec / 1000 +
                                                          timeoffset)

                            # get position
                            latitude_string = line_split_no_checksum[7]
                            latitude_degrees_ship_prior = int(
                                latitude_string[0:2])
                            latitude_minutes_ship_prior = float(
                                latitude_string[2:10])
                            latitude_prior = (
                                latitude_degrees_ship_prior +
                                latitude_minutes_ship_prior / 60.0)
                            if line_split_no_checksum[8] == "S":
                                latitude_prior = -latitude_prior

                            longitude_string = line_split_no_checksum[9]
                            longitude_degrees_ship_prior = int(
                                longitude_string[0:3])
                            longitude_minutes_ship_prior = float(
                                longitude_string[3:11])
                            longitude_prior = (
                                longitude_degrees_ship_prior +
                                longitude_minutes_ship_prior / 60.0)
                            if line_split_no_checksum[10] == "W":
                                longitude_prior = -longitude_prior

                            # flag raised to proceed
                            if flag_got_time < 2:
                                flag_got_time = flag_got_time + 1

                        elif flag_got_time >= 3:
                            if line_split_no_checksum[6] == "0":

                                # read in date
                                yyyy = int(line_split_no_checksum[5])
                                mm = int(line_split_no_checksum[4])
                                dd = int(line_split_no_checksum[3])

                                # read in time
                                time_string = str(line_split_no_checksum[2])
                                hour = int(time_string[0:2])
                                mins = int(time_string[2:4])
                                secs = int(time_string[4:6])
                                msec = int(time_string[7:10])

                                # calculate epoch time
                                epoch_time = date_time_to_epoch(
                                    yyyy,
                                    mm,
                                    dd,
                                    hour,
                                    mins,
                                    secs,
                                    timezone_offset,
                                )
                                # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs)
                                # time_tuple = dt_obj.timetuple()
                                # epoch_time = time.mktime(time_tuple)
                                epoch_timestamp_ship_posterior = (epoch_time +
                                                                  msec / 1000 +
                                                                  timeoffset)

                                # get position
                                latitude_string = line_split_no_checksum[7]
                                latitude_degrees_ship_posterior = int(
                                    latitude_string[0:2])
                                latitude_minutes_ship_posterior = float(
                                    latitude_string[2:10])
                                latitude_posterior = (
                                    latitude_degrees_ship_posterior +
                                    latitude_minutes_ship_posterior / 60.0)
                                if line_split_no_checksum[8] == "S":
                                    latitude_posterior = -latitude_posterior

                                longitude_string = line_split_no_checksum[9]
                                longitude_degrees_ship_posterior = int(
                                    longitude_string[0:3])
                                longitude_minutes_ship_posterior = float(
                                    longitude_string[3:11])
                                longitude_posterior = (
                                    longitude_degrees_ship_posterior +
                                    longitude_minutes_ship_posterior / 60.0)
                                if line_split_no_checksum[10] == "W":
                                    longitude_posterior = -longitude_posterior

                                # flag raised to proceed
                                flag_got_time = flag_got_time + 1

                # line_split_no_checksum[0] == header_heading:
                if header_heading in line_split_no_checksum[0]:
                    if flag_got_time < 3:
                        heading_ship_prior = float(line_split_no_checksum[1])
                        if flag_got_time < 2:
                            flag_got_time = flag_got_time + 1

                    else:

                        heading_ship_posterior = float(
                            line_split_no_checksum[1])
                        flag_got_time = flag_got_time + 1

                if flag_got_time >= 5:
                    # interpolate for the ships location and heading
                    inter_time = (epoch_timestamp - epoch_timestamp_ship_prior
                                  ) / (epoch_timestamp_ship_posterior -
                                       epoch_timestamp_ship_prior)
                    longitude_ship = (inter_time *
                                      (longitude_posterior - longitude_prior) +
                                      longitude_prior)
                    latitude_ship = (inter_time *
                                     (latitude_posterior - latitude_prior) +
                                     latitude_prior)
                    heading_ship = (
                        inter_time *
                        (heading_ship_posterior - heading_ship_prior) +
                        heading_ship_prior)

                    while heading_ship > 360:
                        heading_ship = heading_ship - 360
                    while heading_ship < 0:
                        heading_ship = heading_ship + 360

                    lateral_distance, bearing = latlon_to_metres(
                        latitude, longitude, latitude_ship, longitude_ship)

                    # determine range to input to uncertainty model
                    distance = math.sqrt(lateral_distance * lateral_distance +
                                         depth * depth)
                    distance_std = distance_std_factor * distance + distance_std_offset

                    # determine uncertainty in terms of latitude and longitude
                    latitude_offset, longitude_offset = metres_to_latlon(
                        abs(latitude),
                        abs(longitude),
                        distance_std,
                        distance_std,
                    )

                    latitude_std = abs(abs(latitude) - latitude_offset)
                    longitude_std = abs(abs(longitude) - longitude_offset)

                    # calculate in metres from reference
                    lateral_distance_ship, bearing_ship = latlon_to_metres(
                        latitude_ship,
                        longitude_ship,
                        latitude_reference,
                        longitude_reference,
                    )
                    eastings_ship = (math.sin(bearing_ship * math.pi / 180.0) *
                                     lateral_distance_ship)
                    northings_ship = (
                        math.cos(bearing_ship * math.pi / 180.0) *
                        lateral_distance_ship)

                    lateral_distance_target, bearing_target = latlon_to_metres(
                        latitude,
                        longitude,
                        latitude_reference,
                        longitude_reference,
                    )
                    eastings_target = (
                        math.sin(bearing_target * math.pi / 180.0) *
                        lateral_distance_target)
                    northings_target = (
                        math.cos(bearing_target * math.pi / 180.0) *
                        lateral_distance_target)

                    if not broken_packet_flag:

                        if ftype == "oplab" and category == Category.USBL:
                            data = {
                                "epoch_timestamp":
                                float(epoch_timestamp),
                                "class":
                                class_string,
                                "sensor":
                                sensor_string,
                                "frame":
                                frame_string,
                                "category":
                                category,
                                "data_ship": [
                                    {
                                        "latitude": float(latitude_ship),
                                        "longitude": float(longitude_ship),
                                    },
                                    {
                                        "northings": float(northings_ship),
                                        "eastings": float(eastings_ship),
                                    },
                                    {
                                        "heading": float(heading_ship)
                                    },
                                ],
                                "data_target": [
                                    {
                                        "latitude": float(latitude),
                                        "latitude_std": float(latitude_std),
                                    },
                                    {
                                        "longitude": float(longitude),
                                        "longitude_std": float(longitude_std),
                                    },
                                    {
                                        "northings": float(northings_target),
                                        "northings_std": float(distance_std),
                                    },
                                    {
                                        "eastings": float(eastings_target),
                                        "eastings_std": float(distance_std),
                                    },
                                    {
                                        "depth": float(depth),
                                        "depth_std": float(distance_std),
                                    },
                                    {
                                        "distance_to_ship": float(distance)
                                    },
                                ],
                            }
                            data_list.append(data)
                        elif ftype == "oplab" and category == Category.DEPTH:
                            data = {
                                "epoch_timestamp":
                                float(epoch_timestamp),
                                "epoch_timestamp_depth":
                                float(epoch_timestamp),
                                "class":
                                class_string,
                                "sensor":
                                sensor_string,
                                "frame":
                                "inertial",
                                "category":
                                Category.DEPTH,
                                "data": [{
                                    "depth": float(depth),
                                    "depth_std": float(distance_std),
                                }],
                            }
                            data_list.append(data)

                        if ftype == "acfr":
                            data = (
                                "SSBL_FIX: " + str(float(epoch_timestamp)) +
                                " ship_x: " + str(float(northings_ship)) +
                                " ship_y: " + str(float(eastings_ship)) +
                                " target_x: " + str(float(northings_target)) +
                                " target_y: " + str(float(eastings_target)) +
                                " target_z: " + str(float(depth)) +
                                " target_hr: " + str(float(lateral_distance)) +
                                " target_sr: " + str(float(distance)) +
                                " target_bearing: " + str(float(bearing)) +
                                "\n")
                            data_list += data

                    else:
                        Console.warn("Badly formatted packet (GAPS TIME)")
                        Console.warn(line)
                        # print(hour,mins,secs)

                    # reset flag
                    flag_got_time = 0

    Console.info("  ...done parsing GAPS data.")

    return data_list
Example #18
0
    def __new__(
        self,
        usbl_data,
        dvl_imu_data,
        N,
        sensors_std,
        dvl_noise_sigma_factor,
        imu_noise_sigma_factor,
        usbl_noise_sigma_factor,
        measurement_update_flag=True,
    ):
        # self.dvl_noise_sigma_factor = dvl_noise_sigma_factor
        # self.imu_noise_sigma_factor = imu_noise_sigma_factor
        # self.usbl_noise_sigma_factor = usbl_noise_sigma_factor
        """
        def eval(r, p):
            sum = 0.0
            for i in range(len(p)):  # calculate mean error
                dx = (
                    p[i].eastings[-1] - r.eastings[-1] + (world_size / 2.0)
                ) % world_size - (world_size / 2.0)
                dy = (
                    p[i].northings[-1] - r.northings[-1] + (world_size / 2.0)
                ) % world_size - (world_size / 2.0)
                err = math.sqrt(dx * dx + dy * dy)
                sum += err
            return sum / float(len(p))
        """

        # ========== Start Noise models ========== #
        def usbl_noise(usbl_datapoint):  # measurement noise
            # distance = usbl_datapoint.distance_to_ship # lateral_distance,bearing = latlon_to_metres(usbl_datapoint.latitude, usbl_datapoint.longitude, usbl_datapoint.latitude_ship, usbl_datapoint.longitude_ship)  # noqa
            # distance = math.sqrt(lateral_distance**2 + usbl_datapoint.depth**2) # noqa
            # error = usbl_noise_sigma_factor*(usbl_noise_std_offset + usbl_noise_std_factor*distance) # 5 is for the differential GPS, and the distance std factor 0.01 is used as 0.006 is too sall and unrealistic # This is moved to parse_gaps and parse_usbl_dump # noqa
            if usbl_datapoint.northings_std != 0:
                error = usbl_datapoint.northings_std * usbl_noise_sigma_factor
            else:
                usbl_noise_std_offset = 5
                usbl_noise_std_factor = 0.01
                distance = math.sqrt(usbl_datapoint.northings**2 +
                                     usbl_datapoint.eastings**2 +
                                     usbl_datapoint.depth**2)
                error = usbl_noise_sigma_factor * (
                    usbl_noise_std_offset + usbl_noise_std_factor * distance)
            return error

        def dvl_noise(dvl_imu_datapoint, mode="estimate"):  # sensor1 noise
            # Vinnay's dvl_noise model: velocity_std = (-0.0125*((velocity)**2)+0.2*(velocity)+0.2125)/100) assuming noise of x_velocity = y_velocity = z_velocity # noqa
            velocity_std_factor = 0.001  # from catalogue rdi whn1200/600. # should read this in from somewhere else, e.g. json # noqa
            velocity_std_offset = 0.002  # 0.02 # 0.2 #from catalogue rdi whn1200/600. # should read this in from somewhere else # noqa
            x_velocity_std = (
                abs(dvl_imu_datapoint.x_velocity) * velocity_std_factor +
                velocity_std_offset
            )  # (-0.0125*((dvl_imu_datapoint.x_velocity)**2)+0.2*(dvl_imu_datapoint.x_velocity)+0.2125)/100 # noqa
            y_velocity_std = (
                abs(dvl_imu_datapoint.y_velocity) * velocity_std_factor +
                velocity_std_offset
            )  # (-0.0125*((dvl_imu_datapoint.y_velocity)**2)+0.2*(dvl_imu_datapoint.y_velocity)+0.2125)/100 # noqa
            z_velocity_std = (
                abs(dvl_imu_datapoint.z_velocity) * velocity_std_factor +
                velocity_std_offset
            )  # (-0.0125*((dvl_imu_datapoint.z_velocity)**2)+0.2*(dvl_imu_datapoint.z_velocity)+0.2125)/100 # noqa
            if mode == "estimate":
                x_velocity_estimate = random.gauss(
                    dvl_imu_datapoint.x_velocity,
                    dvl_noise_sigma_factor * x_velocity_std,
                )
                y_velocity_estimate = random.gauss(
                    dvl_imu_datapoint.y_velocity,
                    dvl_noise_sigma_factor * y_velocity_std,
                )
                z_velocity_estimate = random.gauss(
                    dvl_imu_datapoint.z_velocity,
                    dvl_noise_sigma_factor * z_velocity_std,
                )
                return (
                    x_velocity_estimate,
                    y_velocity_estimate,
                    z_velocity_estimate,
                )
            elif mode == "std":
                return max([x_velocity_std, y_velocity_std, z_velocity_std])

        def imu_noise(
            previous_dvlimu_data_point,
            current_dvlimu_data_point,
            particle_list_data,
        ):  # sensor2 noise
            imu_noise = (
                0.003 * imu_noise_sigma_factor
            )  # each time_step + 0.003. assuming noise of roll = pitch = yaw
            if particle_list_data == 0:  # for initiation
                roll_estimate = random.gauss(current_dvlimu_data_point.roll,
                                             imu_noise)
                pitch_estimate = random.gauss(current_dvlimu_data_point.pitch,
                                              imu_noise)
                yaw_estimate = random.gauss(current_dvlimu_data_point.yaw,
                                            imu_noise)
            else:  # for propagation
                roll_estimate = particle_list_data.roll[-1] + random.gauss(
                    current_dvlimu_data_point.roll -
                    previous_dvlimu_data_point.roll,
                    imu_noise,
                )
                pitch_estimate = particle_list_data.pitch[-1] + random.gauss(
                    current_dvlimu_data_point.pitch -
                    previous_dvlimu_data_point.pitch,
                    imu_noise,
                )
                yaw_estimate = particle_list_data.yaw[-1] + random.gauss(
                    current_dvlimu_data_point.yaw -
                    previous_dvlimu_data_point.yaw,
                    imu_noise,
                )
            if yaw_estimate < 0:
                yaw_estimate += 360
            elif yaw_estimate > 360:
                yaw_estimate -= 360
            return roll_estimate, pitch_estimate, yaw_estimate

        # ========== End Noise models ========== #

        def initialize_particles(N, usbl_datapoint, dvl_imu_datapoint,
                                 init_dvl_imu_datapoint):
            particles = []
            northings_estimate = (usbl_datapoint.northings -
                                  dvl_imu_datapoint.northings +
                                  init_dvl_imu_datapoint.northings)
            eastings_estimate = (usbl_datapoint.eastings -
                                 dvl_imu_datapoint.eastings +
                                 init_dvl_imu_datapoint.eastings)
            for i in range(N):
                temp_particle = Particle()

                roll_estimate, pitch_estimate, yaw_estimate = imu_noise(
                    0, init_dvl_imu_datapoint, 0)
                (
                    x_velocity_estimate,
                    y_velocity_estimate,
                    z_velocity_estimate,
                ) = dvl_noise(init_dvl_imu_datapoint)

                usbl_uncertainty = usbl_noise(usbl_datapoint)
                # usbl_uncertainty = 0

                temp_particle.set(
                    random.gauss(eastings_estimate, usbl_uncertainty),
                    random.gauss(northings_estimate, usbl_uncertainty),
                    init_dvl_imu_datapoint.epoch_timestamp,
                    x_velocity_estimate,
                    y_velocity_estimate,
                    z_velocity_estimate,
                    roll_estimate,
                    pitch_estimate,
                    yaw_estimate,
                    init_dvl_imu_datapoint.altitude,
                    init_dvl_imu_datapoint.depth,
                )
                temp_particle.set_weight(1)
                particles.append(temp_particle)

            # Normalize weights
            weights_list = []
            for i in particles:
                weights_list.append(i.weight)
            normalized_weights = normalize_weights(weights_list)
            for index, particle_ in enumerate(particles):
                particle_.weight = normalized_weights[index]

            return particles

        def normalize_weights(weights_list):
            normalized_weights = []
            for i in weights_list:
                normalized_weights.append(i / sum(weights_list))
            return normalized_weights

        def propagate_particles(particles, previous_data_point,
                                current_data_point):
            for i in particles:
                # Propagation error model
                time_difference = (current_data_point.epoch_timestamp -
                                   previous_data_point.epoch_timestamp)

                roll_estimate, pitch_estimate, yaw_estimate = imu_noise(
                    previous_data_point, current_data_point, i)
                (
                    x_velocity_estimate,
                    y_velocity_estimate,
                    z_velocity_estimate,
                ) = dvl_noise(current_data_point)

                [
                    north_velocity_estimate,
                    east_velocity_estimate,
                    down_velocity_estimate,
                ] = body_to_inertial(
                    roll_estimate,
                    pitch_estimate,
                    yaw_estimate,
                    x_velocity_estimate,
                    y_velocity_estimate,
                    z_velocity_estimate,
                )
                [
                    previous_north_velocity_estimate,
                    previous_east_velocity_estimate,
                    previous_down_velocity_estimate,
                ] = body_to_inertial(
                    i.roll[-1],
                    i.pitch[-1],
                    i.yaw[-1],
                    i.x_velocity[-1],
                    i.y_velocity[-1],
                    i.z_velocity[-1],
                )
                # DR motion model
                northing_estimate = (0.5 * time_difference *
                                     (north_velocity_estimate +
                                      previous_north_velocity_estimate) +
                                     i.northings[-1])
                easting_estimate = (
                    0.5 * time_difference *
                    (east_velocity_estimate + previous_east_velocity_estimate)
                    + i.eastings[-1])
                i.set(
                    easting_estimate,
                    northing_estimate,
                    current_data_point.epoch_timestamp,
                    x_velocity_estimate,
                    y_velocity_estimate,
                    z_velocity_estimate,
                    roll_estimate,
                    pitch_estimate,
                    yaw_estimate,
                    current_data_point.altitude,
                    current_data_point.depth,
                )

        def measurement_update(
            N,
            usbl_measurement,
            particles_list,
            resample_flag=True
        ):  # updates weights of particles and resamples them # USBL uncertainty follow the readings (0.06/100* depth)! assuming noise of northing = easting # noqa

            # Update weights (particle weighting)
            for i in particles_list[-1]:
                weight = i.measurement_prob(usbl_measurement,
                                            usbl_noise(usbl_measurement))
                # weights.append(weight)
                # i.weight.append(weight)
                i.weight = i.weight * weight
            # Normalize weights # this should be in particles...
            weights_list = []
            for i in particles_list[-1]:
                weights_list.append(i.weight)
            normalized_weights = normalize_weights(weights_list)
            for index, particle_ in enumerate(particles_list[-1]):
                particle_.weight = normalized_weights[index]

            # calculate Neff
            weights_list = []
            for i in particles_list[-1]:
                weights_list.append(i.weight)
            effectiveParticleSize = 1 / sum([i**2 for i in weights_list])

            if effectiveParticleSize < len(particles_list[-1]) / 2:
                resample_flag = True
            else:
                resample_flag = False

            if resample_flag:
                # resampling wheel
                temp_particles = []
                index = int(random.random() * N)
                beta = 0.0
                mw = max(weights_list)
                for i in range(N):
                    beta += random.random() * 2.0 * mw
                    while beta > weights_list[index]:
                        beta -= weights_list[index]
                        index = (index + 1) % N
                    temp_particle = Particle()
                    temp_particle.parentID = "{}-{}".format(
                        len(particles_list) - 1, index)
                    particles_list[-1][index].childIDList.append(
                        "{}-{}".format(len(particles_list),
                                       len(temp_particles)))
                    temp_particle.set(
                        particles_list[-1][index].eastings[-1],
                        particles_list[-1][index].northings[-1],
                        particles_list[-1][index].timestamps[-1],
                        particles_list[-1][index].x_velocity[-1],
                        particles_list[-1][index].y_velocity[-1],
                        particles_list[-1][index].z_velocity[-1],
                        particles_list[-1][index].roll[-1],
                        particles_list[-1][index].pitch[-1],
                        particles_list[-1][index].yaw[-1],
                        particles_list[-1][index].altitude[-1],
                        particles_list[-1][index].depth[-1],
                    )
                    temp_particle.set_weight(
                        1 / N)  # particles_list[-1][index].weight)
                    # temp_particle.set_error(usbl_measurement) # maybe can remove this? # noqa
                    temp_particles.append(temp_particle)
                return (True, temp_particles)
            else:
                return (False, particles_list)

        def extract_trajectory(final_particle):
            northings_trajectory = final_particle.northings
            eastings_trajectory = final_particle.eastings
            timestamp_list = final_particle.timestamps
            roll_list = final_particle.roll
            pitch_list = final_particle.pitch
            yaw_list = final_particle.yaw
            altitude_list = final_particle.altitude
            depth_list = final_particle.depth

            parentID = final_particle.parentID
            while parentID != "":
                particle_list = int(parentID.split("-")[0])
                element_list = int(parentID.split("-")[1])

                northings_trajectory = (
                    particles_list[particle_list][element_list].northings[:-1]
                    + northings_trajectory)
                eastings_trajectory = (
                    particles_list[particle_list][element_list].eastings[:-1] +
                    eastings_trajectory)
                timestamp_list = (
                    particles_list[particle_list][element_list].timestamps[:-1]
                    + timestamp_list)
                roll_list = (
                    particles_list[particle_list][element_list].roll[:-1] +
                    roll_list)
                pitch_list = (
                    particles_list[particle_list][element_list].pitch[:-1] +
                    pitch_list)
                yaw_list = (
                    particles_list[particle_list][element_list].yaw[:-1] +
                    yaw_list)
                altitude_list = (
                    particles_list[particle_list][element_list].altitude[:-1] +
                    altitude_list)
                depth_list = (
                    particles_list[particle_list][element_list].depth[:-1] +
                    depth_list)

                parentID = particles_list[particle_list][element_list].parentID
            return (
                northings_trajectory,
                eastings_trajectory,
                timestamp_list,
                roll_list,
                pitch_list,
                yaw_list,
                altitude_list,
                depth_list,
            )

        def mean_trajectory(particles):
            x_list_ = []
            y_list_ = []
            for i in particles:
                x_list_.append(i.weight * i.eastings[-1])
                y_list_.append(i.weight * i.northings[-1])
            x = sum(x_list_)
            y = sum(y_list_)
            return x, y

        x_list = []
        y_list = []

        particles_list = []
        usbl_datapoints = []

        # print('Initializing particles around first point of dead reckoning solution offset by averaged usbl readings') # noqa
        # Interpolate dvl_imu_data to usbl_data to initializing particles at first appropriate usbl timestamp. # noqa
        # if dvl_imu_data[dvl_imu_data_index].epoch_timestamp > usbl_data[usbl_data_index].epoch_timestamp: # noqa
        #     while dvl_imu_data[dvl_imu_data_index].epoch_timestamp > usbl_data[usbl_data_index].epoch_timestamp: # noqa
        #         usbl_data_index += 1
        # interpolate usbl_data to dvl_imu_data to initialize particles
        usbl_data_index = 0
        dvl_imu_data_index = 0
        if (usbl_data[usbl_data_index].epoch_timestamp >
                dvl_imu_data[dvl_imu_data_index].epoch_timestamp):
            while (usbl_data[usbl_data_index].epoch_timestamp >
                   dvl_imu_data[dvl_imu_data_index].epoch_timestamp):
                dvl_imu_data_index += 1
        if (dvl_imu_data[dvl_imu_data_index].epoch_timestamp ==
                usbl_data[usbl_data_index].epoch_timestamp):
            particles = initialize_particles(
                N,
                usbl_data[usbl_data_index],
                dvl_imu_data[dvl_imu_data_index],
                dvl_imu_data[0],
            )  # *For now assume eastings_std = northings_std, usbl_data[usbl_data_index].eastings_std) # noqa
            usbl_data_index += 1
            dvl_imu_data_index += 1
        elif (usbl_data[usbl_data_index].epoch_timestamp <
              dvl_imu_data[dvl_imu_data_index].epoch_timestamp):
            while (usbl_data[usbl_data_index + 1].epoch_timestamp <
                   dvl_imu_data[dvl_imu_data_index].epoch_timestamp):
                if len(usbl_data) - 2 == usbl_data_index:
                    Console.warn(
                        "USBL data does not span to DVL data. Is your data right?"  # noqa
                    )
                    break
                usbl_data_index += 1
            # interpolated_data = interpolate_data(usbl_data[usbl_data_index].epoch_timestamp, dvl_imu_data[dvl_imu_data_index], dvl_imu_data[dvl_imu_data_index+1]) # noqa
            interpolated_data = interpolate_usbl(
                dvl_imu_data[dvl_imu_data_index].epoch_timestamp,
                usbl_data[usbl_data_index],
                usbl_data[usbl_data_index + 1],
            )
            # dvl_imu_data.insert(dvl_imu_data_index+1, interpolated_data)
            usbl_data.insert(usbl_data_index + 1, interpolated_data)
            particles = initialize_particles(
                N,
                usbl_data[usbl_data_index],
                dvl_imu_data[dvl_imu_data_index + 1],
                dvl_imu_data[0],
            )  # *For now assume eastings_std = northings_std, usbl_data[usbl_data_index].eastings_std) # noqa
            usbl_data_index += 1
            dvl_imu_data_index += 1
        else:
            Console.quit(
                "Check dvl_imu_data and usbl_data in particle_filter.py.")
        usbl_datapoints.append(usbl_data[usbl_data_index - 1])
        particles_list.append(particles)
        # Force to start at DR init
        dvl_imu_data_index = 0

        x_, y_ = mean_trajectory(particles)
        x_list.append(x_)
        y_list.append(y_)

        max_uncertainty = 0
        usbl_uncertainty_list = []
        n = 0
        if measurement_update_flag is True:  # perform resampling
            last_usbl_flag = False
            while dvl_imu_data[dvl_imu_data_index] != dvl_imu_data[-1]:
                Console.progress(dvl_imu_data_index, len(dvl_imu_data))
                time_difference = (
                    dvl_imu_data[dvl_imu_data_index + 1].epoch_timestamp -
                    dvl_imu_data[dvl_imu_data_index].epoch_timestamp)
                max_uncertainty += (
                    dvl_noise(dvl_imu_data[dvl_imu_data_index], mode="std") *
                    time_difference)  # * 2

                if (dvl_imu_data[dvl_imu_data_index + 1].epoch_timestamp <
                        usbl_data[usbl_data_index].epoch_timestamp):
                    propagate_particles(
                        particles_list[-1],
                        dvl_imu_data[dvl_imu_data_index],
                        dvl_imu_data[dvl_imu_data_index + 1],
                    )
                    dvl_imu_data_index += 1
                else:
                    if not last_usbl_flag:
                        # interpolate, insert, propagate, resample measurement_update, add new particles to list, check and assign parent id, check parents that have no children and delete it (skip this step for now) ### # noqa
                        interpolated_data = interpolate_dvl(
                            usbl_data[usbl_data_index].epoch_timestamp,
                            dvl_imu_data[dvl_imu_data_index],
                            dvl_imu_data[dvl_imu_data_index + 1],
                        )
                        dvl_imu_data.insert(dvl_imu_data_index + 1,
                                            interpolated_data)
                        propagate_particles(
                            particles_list[-1],
                            dvl_imu_data[dvl_imu_data_index],
                            dvl_imu_data[dvl_imu_data_index + 1],
                        )

                        usbl_uncertainty_list.append(
                            usbl_noise(usbl_data[usbl_data_index]))

                        n += 1
                        resampled, new_particles = measurement_update(
                            N, usbl_data[usbl_data_index], particles_list)

                        if resampled:
                            particles_list.append(new_particles)
                            usbl_datapoints.append(usbl_data[usbl_data_index])
                            # reset usbl_uncertainty_list
                            usbl_uncertainty_list = []
                        else:
                            particles_list = new_particles

                        if usbl_data[usbl_data_index] == usbl_data[-1]:
                            last_usbl_flag = True
                            dvl_imu_data_index += 1
                        else:
                            usbl_data_index += 1
                            dvl_imu_data_index += 1
                    else:
                        propagate_particles(
                            particles_list[-1],
                            dvl_imu_data[dvl_imu_data_index],
                            dvl_imu_data[dvl_imu_data_index + 1],
                        )
                        dvl_imu_data_index += 1

                x_, y_ = mean_trajectory(particles_list[-1])
                x_list.append(x_)
                y_list.append(y_)

            # print (max_uncertainty)

            # select particle trajectory with largest overall weight
            # particles_weight_list = []
            particles_error_list = []
            for i in range(len(particles_list[-1])):
                parentID = particles_list[-1][i].parentID
                particles_error_list.append([])
                if len(particles_list[-1][i].error) != 0:
                    particles_error_list[-1] += particles_list[-1][i].error
                while parentID != "":
                    particle_list = int(parentID.split("-")[0])
                    element_list = int(parentID.split("-")[1])
                    parentID = particles_list[particle_list][
                        element_list].parentID
                    particles_error_list[-1] += particles_list[particle_list][
                        element_list].error
            for i in range(len(particles_error_list)):
                particles_error_list[i] = sum(particles_error_list[i])
            selected_particle = particles_list[-1][particles_error_list.index(
                min(particles_error_list))]
            (
                northings_trajectory,
                eastings_trajectory,
                timestamp_list,
                roll_list,
                pitch_list,
                yaw_list,
                altitude_list,
                depth_list,
            ) = extract_trajectory(selected_particle)
        else:  # do not perform resampling, only propagate
            while dvl_imu_data[dvl_imu_data_index] != dvl_imu_data[-1]:
                propagate_particles(
                    particles_list[-1],
                    dvl_imu_data[dvl_imu_data_index],
                    dvl_imu_data[dvl_imu_data_index + 1],
                )
                dvl_imu_data_index += 1

            ## select particle trajectory with least average error (maybe assign weights without resampling and compare total or average weight? actually doesn't really matter because path won't be used anyway, main purpose of this is to see the std plot) # noqa
            particles_error_list = []
            for i in range(len(particles_list[-1])):
                parentID = particles_list[-1][i].parentID
                particles_error_list.append([])
                particles_error_list[-1].append(particles_list[-1][i].error)
                while parentID != "":
                    particle_list = int(parentID.split("-")[0])
                    element_list = int(parentID.split("-")[1])
                    parentID = particles_list[particle_list][
                        element_list].parentID
                    particles_error_list[-1].append(
                        particles_list[particle_list][element_list].error)
            for i in range(len(particles_error_list)):
                particles_error_list[i] = sum(particles_error_list[i]) / len(
                    particles_error_list[i])
            selected_particle = particles_list[-1][particles_error_list.index(
                min(particles_error_list))]
            (
                northings_trajectory,
                eastings_trajectory,
                timestamp_list,
                roll_list,
                pitch_list,
                yaw_list,
                altitude_list,
                depth_list,
            ) = extract_trajectory(selected_particle)

        # calculate northings std, eastings std, yaw std of particles
        northings_std = []
        eastings_std = []
        yaw_std = []

        arr_northings = []
        arr_eastings = []
        arr_yaw = []
        for i in particles_list[0]:
            arr_northings.append([])
            arr_eastings.append([])
            arr_yaw.append([])
        for i in range(len(particles_list)):
            for j in range(len(particles_list[i])):
                if i != len(particles_list) - 1:
                    arr_northings[j] += particles_list[i][j].northings[:-1]
                    arr_eastings[j] += particles_list[i][j].eastings[:-1]
                    arr_yaw[j] += particles_list[i][j].yaw[:-1]
                else:
                    arr_northings[j] += particles_list[i][j].northings
                    arr_eastings[j] += particles_list[i][j].eastings
                    arr_yaw[j] += particles_list[i][j].yaw
        arr_northings = numpy.array(arr_northings)
        arr_eastings = numpy.array(arr_eastings)
        arr_yaw = numpy.array(arr_yaw)

        for i in numpy.std(arr_northings, axis=0):
            northings_std.append(i)
        for i in numpy.std(arr_eastings, axis=0):
            eastings_std.append(i)
        # yaw_std step check for different extreme values around 0 and 360. not sure if this method below is robust. # noqa
        arr_std_yaw = numpy.std(arr_yaw, axis=0)
        arr_yaw_change = []
        for i in range(len(arr_std_yaw)):
            if (
                    arr_std_yaw[i] > 30
            ):  # if std is more than 30 deg, means there's two extreme values, so minus 360 for anything above 180 deg. # noqa
                arr_yaw_change.append(i)
            # yaw_std.append(i)
        for i in arr_yaw:
            for j in arr_yaw_change:
                if i[j] > 180:
                    i[j] -= 360
        arr_std_yaw = numpy.std(arr_yaw, axis=0)
        for i in arr_std_yaw:
            yaw_std.append(i)
        # numpy.mean(arr, axis=0)

        pf_fusion_dvl_list = []
        for i in range(len(timestamp_list)):
            pf_fusion_dvl = SyncedOrientationBodyVelocity()
            pf_fusion_dvl.epoch_timestamp = timestamp_list[i]
            pf_fusion_dvl.northings = northings_trajectory[i]
            pf_fusion_dvl.eastings = eastings_trajectory[i]
            pf_fusion_dvl.depth = depth_list[i]
            pf_fusion_dvl.roll = roll_list[i]
            pf_fusion_dvl.pitch = pitch_list[i]
            pf_fusion_dvl.yaw = yaw_list[i]
            pf_fusion_dvl.altitude = altitude_list[i]
            pf_fusion_dvl_list.append(pf_fusion_dvl)

        # plt.scatter(x_list, y_list)

        return (
            pf_fusion_dvl_list,
            usbl_datapoints,
            particles_list,
            northings_std,
            eastings_std,
            yaw_std,
        )
Example #19
0
    def create_output_directories(self):
        """Handle the creation of output directories for each camera"""
        self.output_dir_path = self.path_processed / "image"
        self.output_dir_path /= self.camera_name

        # Create output directories depending on the correction method
        parameters_folder_str = "params_"
        developed_folder_str = "corrected_"

        if self.correction_method == "colour_correction":
            parameters_folder_str += self.distance_metric
            developed_folder_str += self.distance_metric
            developed_folder_str += (
                "_mean_" + str(int(self.brightness)) + "_std_" + str(int(self.contrast))
            )
        elif self.correction_method == "manual_balance":
            parameters_folder_str += "manual"
            developed_folder_str += "manual"
            developed_folder_str += (
                "_gain_"
                + str(self.color_gain_matrix_rgb[0])
                + "_"
                + str(self.color_gain_matrix_rgb[4])
                + "_"
                + str(self.color_gain_matrix_rgb[8])
                + "_sub_"
                + str(self.subtractors_rgb[0])
                + "_"
                + str(self.subtractors_rgb[1])
                + "_"
                + str(self.subtractors_rgb[2])
            )
        # Accept suffixes for the output directories
        if self.suffix:
            parameters_folder_str += "_" + self.suffix
            developed_folder_str += "_" + self.suffix

        self.attenuation_parameters_folder = (
            self.output_dir_path / parameters_folder_str
        )
        self.output_images_folder = self.output_dir_path / developed_folder_str

        if not self.attenuation_parameters_folder.exists():
            self.attenuation_parameters_folder.mkdir(parents=True)
        else:
            file_list = list(self.attenuation_parameters_folder.glob("*.npy"))
            if len(file_list) > 0:
                if not self.force:
                    Console.quit(
                        "Parameters exist for current configuration.",
                        "Run parse with Force (-F flag)...",
                    )
                else:
                    Console.warn(
                        "Code will overwrite existing parameters for ",
                        "current configuration...",
                    )

        if not self.output_images_folder.exists():
            self.output_images_folder.mkdir(parents=True)
        else:
            file_list = list(self.output_images_folder.glob("*.*"))
            if len(file_list) > 0:
                if not self.force:
                    Console.quit(
                        "Corrected images exist for current configuration. ",
                        "Run process with Force (-F flag)...",
                    )
                else:
                    Console.warn(
                        "Code will overwrite existing corrected images for ",
                        "current configuration...",
                    )
Example #20
0
    def generate_attenuation_correction_parameters(self):
        """Generates image stats and attenuation coefficients and saves the
        parameters for process"""

        if len(self.altitude_list) < 3:
            Console.quit(
                "Insufficient number of images to compute attenuation ",
                "parameters...",
            )

        # create empty matrices to store image correction parameters
        self.image_raw_mean = np.empty(
            (self.image_channels, self.image_height, self.image_width),
            dtype=np.float32,
        )
        self.image_raw_std = np.empty(
            (self.image_channels, self.image_height, self.image_width),
            dtype=np.float32,
        )
        self.image_attenuation_parameters = np.empty(
            (self.image_channels, self.image_height, self.image_width, 3),
            dtype=np.float32,
        )
        self.image_corrected_mean = np.empty(
            (self.image_channels, self.image_height, self.image_width),
            dtype=np.float32,
        )
        self.image_corrected_std = np.empty(
            (self.image_channels, self.image_height, self.image_width),
            dtype=np.float32,
        )
        self.correction_gains = np.empty(
            (self.image_channels, self.image_height, self.image_width),
            dtype=np.float32,
        )

        image_size_gb = (
            self.image_channels
            * self.image_height
            * self.image_width
            * 4.0
            / (1024.0 ** 3)
        )
        max_bin_size_gb = 50.0
        max_bin_size = int(max_bin_size_gb / image_size_gb)

        self.bin_band = 0.1
        hist_bins = np.arange(self.altitude_min, self.altitude_max, self.bin_band)
        # Watch out: need to substract 1 to get the correct number of bins
        # because the last bin is not included in the range

        images_fn, images_map = open_memmap(
            shape=(
                len(hist_bins) - 1,
                self.image_height * self.image_width,
                self.image_channels,
            ),
            dtype=np.float32,
        )

        distances_fn, distances_map = open_memmap(
            shape=(len(hist_bins) - 1, self.image_height * self.image_width),
            dtype=np.float32,
        )

        distance_vector = None

        if self.depth_map_list and self.distance_metric == "depth_map":
            Console.info("Computing depth map histogram with", hist_bins.size, " bins")

            distance_vector = np.zeros((len(self.depth_map_list), 1))
            for i, dm_file in enumerate(self.depth_map_list):
                dm_np = depth_map.loader(dm_file, self.image_width, self.image_height)
                distance_vector[i] = dm_np.mean(axis=1)

        elif self.altitude_list and self.distance_metric == "altitude":
            Console.info("Computing altitude histogram with", hist_bins.size, " bins")
            distance_vector = np.array(self.altitude_list)

        if distance_vector is not None:
            idxs = np.digitize(distance_vector, hist_bins) - 1

            # Display histogram in console
            for idx_bin in range(hist_bins.size - 1):
                tmp_idxs = np.where(idxs == idx_bin)[0]
                Console.info(
                    "  Bin",
                    format(idx_bin, "02d"),
                    "(",
                    round(hist_bins[idx_bin], 1),
                    "m < x <",
                    round(hist_bins[idx_bin + 1], 1),
                    "m):",
                    len(tmp_idxs),
                    "images",
                )

            with tqdm_joblib(
                tqdm(desc="Computing altitude histogram", total=hist_bins.size - 1,)
            ):
                joblib.Parallel(n_jobs=-2, verbose=0)(
                    joblib.delayed(self.compute_distance_bin)(
                        idxs,
                        idx_bin,
                        images_map,
                        distances_map,
                        max_bin_size,
                        max_bin_size_gb,
                        distance_vector,
                    )
                    for idx_bin in range(hist_bins.size - 1)
                )

            # calculate attenuation parameters per channel
            self.image_attenuation_parameters = corrections.calculate_attenuation_parameters(
                images_map,
                distances_map,
                self.image_height,
                self.image_width,
                self.image_channels,
            )

            # delete memmap handles
            del images_map
            os.remove(images_fn)
            del distances_map
            os.remove(distances_fn)

            # Save attenuation parameter results.
            np.save(
                self.attenuation_params_filepath, self.image_attenuation_parameters,
            )

            corrections.save_attenuation_plots(
                self.attenuation_parameters_folder,
                attn=self.image_attenuation_parameters,
            )

            # compute correction gains per channel
            target_altitude = distance_vector.mean()
            Console.info(
                "Computing correction gains for target altitude", target_altitude,
            )
            self.correction_gains = corrections.calculate_correction_gains(
                target_altitude,
                self.image_attenuation_parameters,
                self.image_height,
                self.image_width,
                self.image_channels,
            )
            Console.warn("Saving correction gains")
            # Save correction gains
            np.save(self.correction_gains_filepath, self.correction_gains)

            corrections.save_attenuation_plots(
                self.attenuation_parameters_folder, gains=self.correction_gains,
            )

            # Useful if fails, to reload precomputed numpyfiles.
            # TODO: offer as a new step.
            # self.image_attenuation_parameters = np.load(
            #   self.attenuation_params_filepath)
            # self.correction_gains = np.load(self.correction_gains_filepath)

            # apply gains to images
            Console.info("Applying attenuation corrections to images...")
            image_properties = [
                self.image_height,
                self.image_width,
                self.image_channels,
            ]
            runner = RunningMeanStd(image_properties)

            memmap_filename, memmap_handle = open_memmap(
                shape=(
                    len(self.camera_image_list),
                    self.image_height,
                    self.image_width,
                    self.image_channels,
                ),
                dtype=np.float32,
            )

            # DEBUG: can be removed
            # Console.error("depth_map_list size", len(self.depth_map_list))
            # Console.error("camera_image_list size", len(self.camera_image_list))
            ###################################################################################################
            for i in trange(len(self.camera_image_list)):
                # Load the image
                img = self.loader(self.camera_image_list[i])

                # Load the distance matrix
                if not self.depth_map_list:
                    # TODO: Show the depth_map creation
                    # if self.depth_map_list is None:
                    # Generate matrices on the fly
                    distance = distance_vector[i]
                    distance_mtx = np.empty((self.image_height, self.image_width))
                    distance_mtx.fill(distance)
                else:
                    distance_mtx = depth_map.loader(
                        self.depth_map_list[i], self.image_width, self.image_height,
                    )
                # TODO: Show the size of the produced distance_mtx
                # Correct the image
                corrected_img = corrections.attenuation_correct(
                    img,
                    distance_mtx,
                    self.image_attenuation_parameters,
                    self.correction_gains,
                )
                # TODO: Inspect the corrected image after attenuation correction
                # Before calling compute, let's show the corrected_img dimensions
                # Console.error("corrected_img.shape", corrected_img.shape)
                runner.compute(corrected_img)
                memmap_handle[i] = corrected_img.reshape(
                    self.image_height, self.image_width, self.image_channels
                )

            image_corrected_mean = runner.mean.reshape(
                self.image_height, self.image_width, self.image_channels
            )
            image_corrected_std = runner.std.reshape(
                self.image_height, self.image_width, self.image_channels
            )

            # save parameters for process
            np.save(
                self.corrected_mean_filepath, image_corrected_mean
            )  # TODO: make member
            np.save(
                self.corrected_std_filepath, image_corrected_std
            )  # TODO: make member

            corrections.save_attenuation_plots(
                self.attenuation_parameters_folder,
                img_mean=image_corrected_mean,
                img_std=image_corrected_std,
            )

        else:
            Console.info(
                "No altitude or depth maps available.", "Computing raw mean and std.",
            )

            image_raw_mean, image_raw_std = running_mean_std(
                self.camera_image_list, self.loader
            )
            np.save(self.raw_mean_filepath, image_raw_mean)
            np.save(self.raw_std_filepath, image_raw_std)

            # image_raw_mean = np.load(self.raw_mean_filepath)
            # image_raw_std = np.load(self.raw_std_filepath)

            ch = image_raw_mean.shape[0]
            if ch == 3:
                image_raw_mean = image_raw_mean.transpose((1, 2, 0))
                image_raw_std = image_raw_std.transpose((1, 2, 0))

            imageio.imwrite(
                Path(self.attenuation_parameters_folder) / "image_raw_mean.png",
                image_raw_mean,
            )
            imageio.imwrite(
                Path(self.attenuation_parameters_folder) / "image_raw_std.png",
                image_raw_std,
            )

            corrections.save_attenuation_plots(
                self.attenuation_parameters_folder,
                img_mean=image_raw_mean,
                img_std=image_raw_std,
            )

        Console.info("Correction parameters saved...")
Example #21
0
    def get_imagelist(self):
        """Generate list of source images"""

        # Store a copy of the currently stored image list in the Corrector object
        _original_image_list = self.camera_image_list
        _original_altitude_list = self.altitude_list
        # Replaces Corrector object's image_list with the camera image list
        # OLD: self.camera_image_list = self.camera.image_list # <---- Now this is done at the end (else condition)
        self.camera_image_list = []
        self.altitude_list = []

        # If using colour_correction, we need to read in the navigation
        if self.correction_method == "colour_correction":
            if self.distance_path == "json_renav_*":
                Console.info(
                    "Picking first JSON folder as the default path to auv_nav",
                    "csv files...",
                )
                dir_ = self.path_processed
                json_list = list(dir_.glob("json_*"))
                if len(json_list) == 0:
                    Console.quit(
                        "No navigation solution could be found. Please run ",
                        "auv_nav parse and process first",
                    )
                self.distance_path = json_list[0]
            metric_path = self.path_processed / self.distance_path
            # Try if ekf exists:
            full_metric_path = metric_path / "csv" / "ekf"
            metric_file = "auv_ekf_" + self.camera_name + ".csv"

            if not full_metric_path.exists():
                full_metric_path = metric_path / "csv" / "dead_reckoning"
                metric_file = "auv_dr_" + self.camera_name + ".csv"
            self.altitude_csv_path = full_metric_path / metric_file

            # Check if file exists
            if not self.altitude_csv_path.exists():
                Console.quit(
                    "The navigation CSV file is not present. Run auv_nav first."
                )

            # read dataframe for corresponding distance csv path
            dataframe = pd.read_csv(self.altitude_csv_path)

            # Deal with bagfiles:
            # if the extension is bag, the list is a list of bagfiles
            if self.camera.extension == "bag":
                self.camera.bagfile_list = copy.deepcopy(self.camera.image_list)
                self.loader.set_bagfile_list_and_topic(
                    self.camera.bagfile_list, self.camera.topic
                )

            # get imagelist for given camera object
            if self.user_specified_image_list != "none":
                path_file_list = Path(self.path_config) / self.user_specified_image_list
                trimmed_csv_file = "trimmed_csv_" + self.camera_name + ".csv"
                self.trimmed_csv_path = Path(self.path_config) / trimmed_csv_file

                if not self.altitude_csv_path.exists():
                    message = "Path to " + metric_file + " does not exist..."
                    Console.quit(message)
                else:
                    # create trimmed csv based on user's  list of images
                    dataframe = trim_csv_files(
                        path_file_list, self.altitude_csv_path, self.trimmed_csv_path,
                    )

            # Check images exist:
            valid_idx = []
            for idx, entry in enumerate(dataframe["relative_path"]):
                im_path = self.path_raw / entry
                if im_path.exists() or self.camera.extension == "bag":
                    valid_idx.append(idx)
            filtered_dataframe = dataframe.iloc[valid_idx]
            filtered_dataframe.reset_index(drop=True)
            # WARNING: if the column does not contain any 'None' entry, it will be parsed as float, and the .str() accesor will fail
            filtered_dataframe["altitude [m]"] = filtered_dataframe[
                "altitude [m]"
            ].astype("string")
            filtered_dataframe = filtered_dataframe[
                ~filtered_dataframe["altitude [m]"].str.contains("None")
            ]  # drop rows with None altitude
            distance_list = filtered_dataframe["altitude [m]"].tolist()
            for _, row in filtered_dataframe.iterrows():
                alt = float(row["altitude [m]"])
                if alt > self.altitude_min and alt < self.altitude_max:
                    if self.camera.extension == "bag":
                        timestamp_from_filename = Path(row["relative_path"]).stem
                        if "\\" in timestamp_from_filename:
                            timestamp_from_filename = timestamp_from_filename.split(
                                "\\"
                            )[-1]
                        self.camera_image_list.append(timestamp_from_filename)
                    else:
                        self.camera_image_list.append(
                            self.path_raw / row["relative_path"]
                        )
                    self.altitude_list.append(alt)

            if len(distance_list) == 0:
                Console.error("No images exist / can be found!")
                Console.error(
                    "Check the file",
                    self.altitude_csv_path,
                    "and make sure that the 'relative_path' column points to",
                    "existing images relative to the raw mission folder (e.g.",
                    self.path_raw,
                    ")",
                )
                Console.error("You may need to reprocess the dive with auv_nav")
                Console.quit("No images were found.")

            # WARNING: what happens in a multidive setup when the current dive has no images (but the rest of the dive does)?
            Console.info(
                len(self.altitude_list),
                "/",
                len(distance_list),
                "Images filtered as per altitude range...",
            )
            if len(self.altitude_list) < 3:
                Console.quit(
                    "Insufficient number of images to compute attenuation ",
                    "parameters...",
                )
        else:
            # Copy the images list from the camera
            self.camera_image_list = self.camera.image_list

            # Join the current image list with the original image list (copy)
            self.camera_image_list.extend(_original_image_list)
            # Show size of the extended image list
            Console.warn(
                ">> The camera image list is now", len(self.camera_image_list)
            )  # JC: I'm leaving this as it is informative for multidive
            # Join the current image list with the original image list (copy)
            self.altitude_list.extend(_original_altitude_list)
    def fit_and_save(self, cloud, processed_folder):
        """Fit mean plane and uncertainty bounding planes to point cloud

        Parameters
        ----------
        cloud : ndarray of shape (nx3)
            Point cloud
        processed_folder : Path
            Path of the processed folder where outputs are written

        Returns
        -------
        String
            Plane parameters of the mean plane and a set of uncertainty
            bounding planes of the point cloud in yaml-file format.
        """

        total_no_points = len(cloud)

        # Fit mean plane
        Console.info("Fitting a plane to", total_no_points, "points...")
        p = Plane([1, 0, 0, 1.5])
        mean_plane, self.inliers_cloud_list = p.fit(cloud, self.mdt)
        # p.plot(cloud=cloud)

        filename = time.strftime("pointclouds_and_best_model_%Y%m%d_%H%M%S.html")
        plot_pointcloud_and_planes(
            [np.array(cloud), np.array(self.inliers_cloud_list)],
            [np.array(mean_plane)],
            str(processed_folder / filename),
        )

        scale = 1.0 / mean_plane[0]
        mean_plane = np.array(mean_plane) * scale
        mean_plane = mean_plane.tolist()

        Console.info("Least squares found", len(self.inliers_cloud_list), "inliers")

        if len(self.inliers_cloud_list) < 0.5 * len(cloud) * self.gip:
            Console.warn("The number of inliers found are off from what you expected.")
            Console.warn(" * Expected inliers:", len(cloud) * self.gip)
            Console.warn(" * Found inliers:", len(self.inliers_cloud_list))
            Console.warn(
                "Check the output cloud to see if the found plane makes sense."
            )
            Console.warn("Try to increase your distance threshold.")

        inliers_cloud = np.array(self.inliers_cloud_list)
        mean_x = np.mean(inliers_cloud[:, 0])
        mean_y = np.mean(inliers_cloud[:, 1])
        mean_z = np.mean(inliers_cloud[:, 2])
        mean_xyz = np.array([mean_x, mean_y, mean_z])

        # Determine minimum distance between points as function of inlier
        # point cloud size
        std_y = np.std(inliers_cloud[:, 1])
        std_z = np.std(inliers_cloud[:, 2])
        # print("Min y: " + str(np.min(inliers_cloud[:, 1])))
        # print("Max y: " + str(np.max(inliers_cloud[:, 1])))
        # print("Std y: " + str(std_y))
        # print("Min z: " + str(np.min(inliers_cloud[:, 2])))
        # print("Max z: " + str(np.max(inliers_cloud[:, 2])))
        # print("Std z: " + str(std_z))
        min_dist = 2 * math.sqrt(std_y ** 2 + std_z ** 2)
        Console.info("Minimum distance for poisson disc sampling: {}".format(min_dist))
        min_sin_angle = 0.866  # = sin(60°)

        # Append 1 to the points, so they can be multiplied (dot product) with
        # plane paramters to find out if they are in front, behind or on a
        # plane.
        self.inliers_1 = np.concatenate(
            [inliers_cloud, np.ones((inliers_cloud.shape[0], 1))], axis=1
        )

        planes_enclose_inliers = False
        Console.info("Generating uncertainty planes...")
        max_uncertainty_planes = 300
        tries = 0
        failed_distance = 0
        failed_angle = 0
        while (
            planes_enclose_inliers is False
            and len(self.uncertainty_planes) < max_uncertainty_planes
        ):
            tries += 1
            point_cloud_local = random.sample(self.inliers_cloud_list, 3)

            # Check if the points are sufficiently far apart and not aligned
            p0p1 = point_cloud_local[1][1:3] - point_cloud_local[0][1:3]
            p0p2 = point_cloud_local[2][1:3] - point_cloud_local[0][1:3]
            p1p2 = point_cloud_local[2][1:3] - point_cloud_local[1][1:3]
            p0p1_norm = np.linalg.norm(p0p1)
            p0p2_norm = np.linalg.norm(p0p2)
            p1p2_norm = np.linalg.norm(p1p2)

            # Poisson disc sampling: reject points that are too close together
            if p0p1_norm < min_dist or p0p2_norm < min_dist or p1p2_norm < min_dist:
                failed_distance += 1
                if failed_distance % 100000 == 0:
                    Console.info(
                        "Combinations rejected due to distance criterion",
                        "(Poisson disk sampling):",
                        failed_distance,
                        "times,",
                        "due to angle criterion:",
                        failed_angle,
                        "times",
                    )
                continue

            # Reject points that are too closely aligned
            if abs(np.cross(p0p1, p0p2)) / (p0p1_norm * p0p2_norm) < min_sin_angle:
                failed_angle += 1
                if failed_angle % 100000 == 0:
                    print(
                        "Combinations rejected due to distance criterion",
                        "(Poisson disk sampling):",
                        failed_distance,
                        "times,",
                        "due to angle criterion:",
                        failed_angle,
                        "times",
                    )
                continue

            # Compute plane through the 3 points and append to list
            self.triples.append(np.array(point_cloud_local))
            self.uncertainty_planes.append(plane_through_3_points(point_cloud_local))
            Console.info(
                "Number of planes: ",
                len(self.uncertainty_planes),
                ", " "Number of tries so far: ",
                tries,
                ".",
                "Combinations rejected due to distance criterion",
                "(Poisson disk sampling):",
                failed_distance,
                "times,",
                "due to angle criterion:",
                failed_angle,
                "times",
            )
            planes_enclose_inliers = self._do_planes_enclose_inliers()

        Console.info(
            "... finished generating {} uncertainty planes.".format(
                len(self.uncertainty_planes)
            )
        )

        if len(self.uncertainty_planes) >= max_uncertainty_planes:
            Console.warn("Stopped due to reaching max_uncertainty_planes")

        filename = time.strftime(
            "pointclouds_and_uncertainty_planes_all_" "%Y%m%d_%H%M%S.html"
        )
        plot_pointcloud_and_planes(
            self.triples + [np.array(cloud), inliers_cloud],
            self.uncertainty_planes,
            str(processed_folder / filename),
        )
        # uncomment to save for debugging
        # np.save('inliers_cloud.npy', inliers_cloud)
        # for i, plane in enumerate(self.uncertainty_planes):
        #     np.save('plane' + str(i) + '.npy', plane)

        # Console.info("Removing uncertainty planes that do not contribute...")
        # self._remove_unnecessary_uncertainty_planes()
        # Console.info("... finished removing non-contributing planes. Reduced"
        #              "number of uncertainty planes to {}."
        #              .format(len(self.uncertainty_planes)))
        # assert self._check_if_planes_enclose_inliers()  # Sanity check
        filename = time.strftime(
            "pointclouds_and_uncertainty_planes_%Y%m%d_" "%H%M%S.html"
        )
        plot_pointcloud_and_planes(
            self.triples + [np.array(cloud), inliers_cloud],
            self.uncertainty_planes,
            str(processed_folder / filename),
        )

        yaml_msg = (
            "mean_xyz_m: "
            + str(mean_xyz.tolist())
            + "\n"
            + "mean_plane: "
            + str(mean_plane)
            + "\n"
        )

        if len(self.uncertainty_planes) > 0:
            uncertainty_planes_str = "uncertainty_planes:\n"
            for i, up in enumerate(self.uncertainty_planes):
                uncertainty_planes_str += "  - " + str(up.tolist()) + "\n"
            yaml_msg += uncertainty_planes_str

        yaml_msg += (
            'date: "'
            + Console.get_date()
            + '" \n'
            + 'user: "******" \n'
            + 'host: "'
            + Console.get_hostname()
            + '" \n'
            + 'version: "'
            + Console.get_version()
            + '" \n'
        )

        return yaml_msg
Example #23
0
def parse_single(filepath, force_overwrite):
    # initiate data and processing flags
    filepath = Path(filepath).resolve()
    filepath = get_raw_folder(filepath)

    if not force_overwrite:
        existing_files = check_output_files_exist(
            get_processed_folder(filepath))
        if existing_files:
            msg = ("It looks like this dataset has already been parsed.\n" +
                   "The following file(s) already exist:\n" + existing_files +
                   "If you would like auv_nav to overwrite existing file," +
                   " rerun it with the flag -F.\n" +
                   "Example:   auv_nav parse -F PATH")
            Console.warn(msg)
            Console.warn("Dataset skipped")
            return

    ftype = "oplab"

    # load mission.yaml config file

    mission_file = filepath / "mission.yaml"
    vehicle_file = filepath / "vehicle.yaml"
    mission_file = get_raw_folder(mission_file)
    vehicle_file = get_raw_folder(vehicle_file)
    Console.info("Loading mission.yaml at", mission_file)
    mission = Mission(mission_file)

    Console.info("Loading vehicle.yaml at", vehicle_file)
    vehicle = Vehicle(vehicle_file)

    # copy mission.yaml and vehicle.yaml to processed folder for process step
    mission_processed = get_processed_folder(mission_file)
    vehicle_processed = get_processed_folder(vehicle_file)

    # Write mission with metadata (username, date and hostname)
    mission.write(mission_processed)
    # mission_file.copy(mission_processed)
    vehicle.write(vehicle_processed)

    # check for recognised formats and create nav file
    outpath = get_processed_folder(filepath)
    outpath = outpath / "nav"
    filename = "nav_standard.json"

    # make file path if not exist
    if not outpath.is_dir():
        try:
            outpath.mkdir()
        except Exception as e:
            print("Warning:", e)

    Console.info("Loading raw data...")

    if multiprocessing.cpu_count() < 4:
        cpu_to_use = 1
    else:
        cpu_to_use = multiprocessing.cpu_count() - 2

    try:
        pool = multiprocessing.Pool(cpu_to_use)
    except AttributeError as e:
        print(
            "Error: ",
            e,
            "\n===============\nThis error is known to \
                happen when running the code more than once from the same \
                console in Spyder. Please run the code from a new console \
                to prevent this error from happening. You may close the \
                current console.\n==============",
        )
    pool_list = []

    # read in, parse data and write data
    if not mission.image.empty():
        if mission.image.format == "acfr_standard" or mission.image.format == "unagi":
            pool_list.append(
                pool.apply_async(
                    parse_acfr_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif mission.image.format == "seaxerocks_3":
            pool_list.append(
                pool.apply_async(
                    parse_seaxerocks_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif (mission.image.format == "biocam"
              or mission.image.format == "biocam4000_15c"):
            pool_list.append(
                pool.apply_async(
                    parse_biocam_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif mission.image.format == "ntnu_stereo":
            pool_list.append(
                pool.apply_async(
                    parse_ntnu_stereo_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif mission.image.format == "rosbag_extracted_images":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag_extracted_images,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        elif mission.image.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "images", ftype, outpath],
                ))
        else:
            Console.quit("Mission image format", mission.image.format,
                         "not supported.")
    if not mission.usbl.empty():
        if mission.usbl.format == "gaps":
            pool_list.append(
                pool.apply_async(parse_gaps,
                                 [mission, vehicle, "usbl", ftype, outpath]))
        elif mission.usbl.format == "usbl_dump":
            pool_list.append(
                pool.apply_async(parse_usbl_dump,
                                 [mission, vehicle, "usbl", ftype, outpath]))
        elif mission.usbl.format == "NOC_nmea":
            pool_list.append(
                pool.apply_async(parse_NOC_nmea,
                                 [mission, vehicle, "usbl", ftype, outpath]))
        elif mission.usbl.format == "eiva_navipac":
            pool_list.append(
                pool.apply_async(
                    parse_eiva_navipac,
                    [mission, vehicle, "usbl", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "usbl", ftype, outpath],
                ))
        else:
            Console.quit("Mission usbl format", mission.usbl.format,
                         "not supported.")

    if not mission.velocity.empty():
        if mission.velocity.format == "phins":
            pool_list.append(
                pool.apply_async(
                    parse_phins,
                    [mission, vehicle, "velocity", ftype, outpath]))
        elif mission.velocity.format == "ae2000":
            pool_list.append(
                pool.apply_async(
                    parse_ae2000,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        elif mission.velocity.format == "alr":
            pool_list.append(
                pool.apply_async(
                    parse_alr,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        elif mission.velocity.format == "autosub":
            pool_list.append(
                pool.apply_async(
                    parse_autosub,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        elif mission.velocity.format == "rdi":
            pool_list.append(
                pool.apply_async(
                    parse_rdi, [mission, vehicle, "velocity", ftype, outpath]))
        elif mission.velocity.format == "ntnu_dvl":
            pool_list.append(
                pool.apply_async(
                    parse_ntnu_dvl,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "velocity", ftype, outpath],
                ))
        else:
            Console.quit(
                "Mission velocity format",
                mission.velocity.format,
                "not supported.",
            )

    if not mission.orientation.empty():
        if mission.orientation.format == "phins":
            pool_list.append(
                pool.apply_async(
                    parse_phins,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "ae2000":
            pool_list.append(
                pool.apply_async(
                    parse_ae2000,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "alr":
            pool_list.append(
                pool.apply_async(
                    parse_alr,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "autosub":
            pool_list.append(
                pool.apply_async(
                    parse_autosub,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "rdi":
            pool_list.append(
                pool.apply_async(
                    parse_rdi,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.orientation.format == "eiva_navipac":
            pool_list.append(
                pool.apply_async(
                    parse_eiva_navipac,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "orientation", ftype, outpath],
                ))
        else:
            Console.quit(
                "Mission orientation format",
                mission.orientation.format,
                "not supported.",
            )

    if not mission.depth.empty():
        if mission.depth.format == "phins":
            pool_list.append(
                pool.apply_async(parse_phins,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "ae2000":
            pool_list.append(
                pool.apply_async(parse_ae2000,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "alr":
            pool_list.append(
                pool.apply_async(parse_alr,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "autosub":
            pool_list.append(
                pool.apply_async(parse_autosub,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "gaps":
            pool_list.append(
                pool.apply_async(parse_gaps,
                                 [mission, vehicle, "depth", ftype, outpath]))
        elif mission.depth.format == "eiva_navipac":
            pool_list.append(
                pool.apply_async(
                    parse_eiva_navipac,
                    [mission, vehicle, "depth", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "depth", ftype, outpath],
                ))
        else:
            Console.quit("Mission depth format", mission.depth.format,
                         "not supported.")

    if not mission.altitude.empty():
        if mission.altitude.format == "phins":
            pool_list.append(
                pool.apply_async(
                    parse_phins,
                    [mission, vehicle, "altitude", ftype, outpath]))
        elif mission.altitude.format == "ae2000":
            pool_list.append(
                pool.apply_async(
                    parse_ae2000,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        elif mission.altitude.format == "alr":
            pool_list.append(
                pool.apply_async(
                    parse_alr,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        elif mission.altitude.format == "autosub":
            pool_list.append(
                pool.apply_async(
                    parse_autosub,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        elif mission.altitude.format == "rdi":
            pool_list.append(
                pool.apply_async(
                    parse_rdi, [mission, vehicle, "altitude", ftype, outpath]))
        elif mission.altitude.format == "ntnu_dvl":
            pool_list.append(
                pool.apply_async(
                    parse_ntnu_dvl,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        elif mission.usbl.format == "rosbag":
            pool_list.append(
                pool.apply_async(
                    parse_rosbag,
                    [mission, vehicle, "altitude", ftype, outpath],
                ))
        else:
            Console.quit(
                "Mission altitude format",
                mission.altitude.format,
                "not supported.",
            )

    if not mission.tide.empty():
        if mission.tide.format == "NOC_polpred":
            tide_list = parse_NOC_polpred(mission, vehicle, "tide", ftype,
                                          outpath)
        else:
            Console.quit("Mission tide format", mission.tide.format,
                         "not supported.")
    else:
        tide_list = None

    pool.close()
    pool.join()

    Console.info("...done loading raw data.")
    Console.info("Compile data list...")

    data_list = [[{
        "epoch_timestamp":
        0.0,
        "class":
        "origin",
        "category":
        "origin",
        "data": [{
            "latitude": mission.origin.latitude,
            "longitude": mission.origin.longitude,
            "crs": mission.origin.crs,
            "date": mission.origin.date,
        }],
    }]]

    # Set advance True/False flag so not comparing for every i in pool_list
    if mission.image.format == "biocam":
        correcting_timestamps = True
    else:
        correcting_timestamps = False
    for i in pool_list:
        results = i.get()
        # If current retrieved data is DEPTH
        # and if TIDE data is available
        if len(results) < 1:
            continue
        if results[0] is None:
            Console.warn(
                "Some results are empty. Please check whether this is correct or not"
            )
            continue
        if correcting_timestamps:
            if results[0]["category"] == "image":
                Console.info("Correction of BioCam cpu timestamps...")
                results = correct_timestamps(results)
        if (results[0]["category"] == Category.DEPTH
                or results[0]["category"] == Category.USBL):
            if not mission.tide.empty():
                # proceed to tidal correction
                Console.info("Tidal correction of depth vector...")
                # Offset depth to acknowledge for tides
                j = 0
                for k in range(len(results)):
                    while (j < len(tide_list)
                           and tide_list[j]["epoch_timestamp"] <
                           results[k]["epoch_timestamp"]):
                        j = j + 1

                    if j >= 1:
                        _result = interpolate(
                            results[k]["epoch_timestamp"],
                            tide_list[j - 1]["epoch_timestamp"],
                            tide_list[j]["epoch_timestamp"],
                            tide_list[j - 1]["data"][0]["height"],
                            tide_list[j]["data"][0]["height"],
                        )
                        if results[0]["category"] == Category.DEPTH:
                            results[k]["data"][0]["depth"] = (
                                results[k]["data"][0]["depth"] - _result)
                        elif results[0]["category"] == Category.USBL:
                            results[k]["data_target"][4]["depth"] = (
                                results[k]["data_target"][4]["depth"] -
                                _result)
        data_list.append(results)

    Console.info("...done compiling data list.")

    Console.info("Writing to output file...")
    data_list_temp = []
    for i in data_list:
        data_list_temp += i

    # create file (overwrite if exists)
    nav_file = outpath / filename
    with nav_file.open("w") as fileout:
        json.dump(data_list_temp, fileout, indent=2)
    fileout.close()
    Console.info("...done writing to output file.")

    del data_list_temp
    del data_list

    # interlace the data based on timestamps
    Console.info("Interlacing data...")
    parse_interlacer(outpath, filename)
    Console.info("...done interlacing data. Output saved to",
                 outpath / filename)
    plot_parse_data(outpath, ftype)
    Console.info("Complete parse data")
def call_parse(args):
    """Perform parsing of configuration yaml files and generate image
    correction parameters

    Parameters
    -----------
    args : parse_args object
        User provided arguments for path of source images
    """

    # Now args.path is a list of paths (str / os.PathLike objects)
    path_list = [Path(path).resolve() for path in args.path]
    if len(path_list) == 1:
        path = path_list[0]
        Console.info("Single path provided, normal single dive mode...")
    else:
        Console.warn(
            "Multiple paths provided [{}]. Checking each path...".format(
                len(path_list)))
        for path in path_list:
            # chec if path is valid
            if not path.exists():
                Console.error("Path", path,
                              "does not exist! Exiting...")  # quit
                sys.exit(1)
            else:
                Console.info("\t", path, " [OK]")

    # Populating the configuration and camerasystem lists for each dive path
    # The camera system is pulled first from <config> folder if available, if not from <raw> folder
    correct_config_list, camerasystem_list = zip(*[
        load_configuration_and_camera_system(path, args.suffix)
        for path in path_list
    ])
    # correct_config <--- from correct_images.yaml  (<config> folder)
    # camerasystem   <--- from camera.yaml          (<config> folder or from <raw> folder)

    # Let's check that both lists have the same length and are not empty (same number of dives)
    if len(correct_config_list) != len(camerasystem_list):
        Console.error("Number of [camerasystem] and [configuration] differ!")
        sys.exit(1)
    if len(correct_config_list) == 0:
        Console.error("No valid camerasystem/configuration found!")
        sys.exit(1)

    # When in multidive mode, check if all camerasystem are the same. For this we test camera_system.camera_system
    if len(
            camerasystem_list
    ) > 1:  # this test is still valid for single dive mode, so we could remove this [if] sentence
        camera_system = camerasystem_list[0]
        for cs in camerasystem_list:
            # the first entry will be repeated, no problem with that
            # TODO: Extend is_equivalent() method allowing checking cameras in different orders
            # WARNING: We decide not to use equivalent() here, because it is not robust enough. Enforce same camera order
            if not camera_system.camera_system == cs.camera_system:
                Console.error("Camera systems differ!")
                Console.error("\tFirst camera system (reference) ",
                              camera_system.camera_system)
                Console.error("\tWrong camera system (current)   ",
                              cs.camera_system)
                sys.exit(1)
        Console.warn(
            "Camera systems are the same for all dives.")  # so far so good

    # Check if the correct_config_lists elements are the same (equivalent)
    if len(correct_config_list) > 1:
        correct_config = correct_config_list[0]
        for cc in correct_config_list:
            # Check if the relevant fields of the configuration are the same (including per-dive camera setup)
            if not correct_config.is_equivalent(cc):
                Console.error("Configurations [correct_config] do not match!")
                sys.exit(1)
        Console.warn("Configurations are equivalent for all dives.")

    camerasystem = camerasystem_list[0]
    for camera in camerasystem.cameras:
        # check if the camera also exists in the configuration
        if camera.name not in [
                c.camera_name for c in correct_config.configs.camera_configs
        ]:  # ignore if not present
            Console.warn(
                "Camera [", camera.name,
                "] defined in <camera.yaml> but not found in configuration. Skipping..."
            )
        else:
            Console.info("Parsing for camera", camera.name)
            # Create a Corrector object for each camera with empty configuration
            # The configuration and the paths will be populated later on a per-dive basis
            corrector = Corrector(args.force,
                                  args.suffix,
                                  camera,
                                  correct_config=None)
            # call new list-compatible implementation of parse()
            corrector.parse(path_list, correct_config_list)

    Console.info(
        "Parse completed for all cameras. Please run process to develop ",
        "corrected images...",
    )
Example #25
0
def warn_if_zero(val, name):
    if val == 0:
        Console.warn("The value for", name, "is zero. Is this expected?")
def load_configuration_and_camera_system(path, suffix=None):
    """Generate correct_config and camera system objects from input config
    yaml files

    Parameters
    -----------
    path : Path
        User provided Path of source images
    """

    Console.warn("Parsing multipaths with suffix:", suffix)

    # resolve paths to raw, processed and config folders
    path_raw_folder = get_raw_folder(path)
    path_config_folder = get_config_folder(path)

    # resolve path to mission.yaml
    path_mission = path_raw_folder / "mission.yaml"

    # find mission and correct_images yaml files
    if path_mission.exists():
        Console.info("File mission.yaml found at", path_mission)
    else:
        Console.quit("File mission.yaml file not found at", path_raw_folder)

    # load mission parameters
    mission = Mission(path_mission)

    # resolve path to camera.yaml file
    camera_yaml_raw_path = path_raw_folder / "camera.yaml"
    camera_yaml_config_path = path_config_folder / "camera.yaml"

    camera_yaml_path = None
    default_file_path_correct_config = None

    if not camera_yaml_raw_path.exists(
    ) and not camera_yaml_config_path.exists():
        Console.info(
            "Not found camera.yaml file in /raw folder...Using default ",
            "camera.yaml file...",
        )
        # find out default yaml paths
        root = Path(__file__).resolve().parents[1]

        acfr_std_camera_file = "auv_nav/default_yaml/ts1/SSK17-01/camera.yaml"
        sx3_camera_file = "auv_nav/default_yaml/ae2000/YK17-23C/camera.yaml"
        biocam_camera_file = "auv_nav/default_yaml/as6/DY109/camera.yaml"
        biocam4000_15c_camera_file = "auv_nav/default_yaml/alr/jc220/camera.yaml"
        hybis_camera_file = "auv_nav/default_yaml/hybis/camera.yaml"
        ntnu_camera_file = "auv_nav/default_yaml/ntnu_stereo/tautra21/camera.yaml"
        rosbag_camera_file = "auv_nav/default_yaml/rosbag/grassmap/camera.yaml"

        acfr_std_correct_config_file = (
            "correct_images/default_yaml/acfr/correct_images.yaml")
        sx3_std_correct_config_file = (
            "correct_images/default_yaml/sx3/correct_images.yaml")
        biocam_std_correct_config_file = (
            "correct_images/default_yaml/biocam/correct_images.yaml")
        biocam4000_15c_std_correct_config_file = (
            "correct_images/default_yaml/biocam4000_15c/correct_images.yaml")
        hybis_std_correct_config_file = (
            "correct_images/default_yaml/hybis/correct_images.yaml")
        ntnu_std_correct_config_file = (
            "correct_images/default_yaml/ntnu_stereo/correct_images.yaml")
        rosbag_std_correct_config_file = (
            "correct_images/default_yaml/rosbag/correct_images.yaml")

        Console.info("Image format:", mission.image.format)

        if mission.image.format == "acfr_standard":
            camera_yaml_path = root / acfr_std_camera_file
            default_file_path_correct_config = root / acfr_std_correct_config_file
        elif mission.image.format == "seaxerocks_3":
            camera_yaml_path = root / sx3_camera_file
            default_file_path_correct_config = root / sx3_std_correct_config_file
        elif mission.image.format == "biocam":
            camera_yaml_path = root / biocam_camera_file
            default_file_path_correct_config = root / biocam_std_correct_config_file
        elif mission.image.format == "biocam4000_15c":
            camera_yaml_path = root / biocam4000_15c_camera_file
            default_file_path_correct_config = (
                root / biocam4000_15c_std_correct_config_file)
        elif mission.image.format == "hybis":
            camera_yaml_path = root / hybis_camera_file
            default_file_path_correct_config = root / hybis_std_correct_config_file
        elif mission.image.format == "ntnu_stereo":
            camera_yaml_path = root / ntnu_camera_file
            default_file_path_correct_config = root / ntnu_std_correct_config_file
        elif mission.image.format == "rosbag":
            camera_yaml_path = root / rosbag_camera_file
            default_file_path_correct_config = root / rosbag_std_correct_config_file
            camera_yaml_path.copy(camera_yaml_config_path)
            Console.info(
                "Copied camera.yaml file to config folder. Please edit it.")
            Console.info("The file is located at", camera_yaml_config_path)
        else:
            Console.quit(
                "Image system in camera.yaml does not match with mission.yaml",
                "Provide correct camera.yaml in /raw folder... ",
            )
    elif camera_yaml_raw_path.exists(
    ) and not camera_yaml_config_path.exists():
        Console.info("Found camera.yaml file in /raw folder")
        camera_yaml_path = camera_yaml_raw_path
    elif not camera_yaml_raw_path.exists() and camera_yaml_config_path.exists(
    ):
        Console.info("Found camera.yaml file in /config folder")
        camera_yaml_path = camera_yaml_config_path
    elif camera_yaml_raw_path.exists() and camera_yaml_config_path.exists():
        Console.info("Found camera.yaml both in /raw and in /config folder")
        Console.info("Using camera.yaml from /config folder")
        camera_yaml_path = camera_yaml_config_path
    else:
        Console.quit(
            "rosbag image type requires a camera.yaml file in /config folder",
            "Please provide camera.yaml file in /config folder",
        )

    # instantiate the camera system and setup cameras from mission and
    # config files / auv_nav
    camera_system = CameraSystem(camera_yaml_path, path_raw_folder)
    if camera_system.camera_system != mission.image.format:
        Console.quit(
            "Image system in camera.yaml does not match with mission.yaml",
            "Provide correct camera.yaml in /raw folder",
        )
    # check for correct_config yaml path
    path_correct_images = None
    if suffix == "" or suffix is None:
        path_correct_images = path_config_folder / "correct_images.yaml"
    else:
        path_correct_images = path_config_folder / ("correct_images_" +
                                                    suffix + ".yaml")
    if path_correct_images.exists():
        Console.info(
            "Configuration file correct_images.yaml file found at",
            path_correct_images,
        )
    else:
        default_file_path_correct_config.copy(path_correct_images)
        Console.warn(
            "Configuration file not found, copying a default one at",
            path_correct_images,
        )

    # load parameters from correct_config.yaml
    correct_config = CorrectConfig(path_correct_images)
    return correct_config, camera_system
Example #27
0
    def compute_distance_bin(
        self,
        idxs,
        idx_bin,
        images_map,
        distances_map,
        max_bin_size,
        max_bin_size_gb,
        distance_vector,
    ):
        dimensions = [self.image_height, self.image_width, self.image_channels]
        tmp_idxs = np.where(idxs == idx_bin)[0]

        if len(tmp_idxs) > 2:
            bin_images = [self.camera_image_list[i] for i in tmp_idxs]
            bin_distances_sample = None
            bin_images_sample = None

            # Random sample if memmap has to be created
            if self.smoothing != "mean" and len(bin_images) > max_bin_size:
                Console.info(
                    "Random sampling altitude bin to fit in", max_bin_size_gb, "Gb",
                )
                bin_images = random.sample(bin_images, max_bin_size)

            if not self.depth_map_list:
                # Generate matrices on the fly
                distance_bin = distance_vector[tmp_idxs]
                distance_bin_sample = distance_bin.mean()
                if distance_bin_sample <= 0 or np.isnan(distance_bin_sample):
                    Console.warn("The mean distance is equal or lower than zero!")
                    Console.warn("Printing the entire vector:", distance_bin)
                    Console.warn("Printing the mean:", distance_bin_sample)
                    distance_bin_sample = self.altitude_min + self.bin_band * idx_bin

                bin_distances_sample = np.empty((self.image_height, self.image_width))
                bin_distances_sample.fill(distance_bin_sample)
            else:
                bin_distances = [self.depth_map_list[i] for i in tmp_idxs]
                bin_distances_sample = running_mean_std(
                    bin_distances, loader=self.loader
                )[0]

            if self.smoothing == "mean":
                bin_images_sample = running_mean_std(bin_images, loader=self.loader)[0]
            elif self.smoothing == "mean_trimmed":
                memmap_filename, memmap_handle = create_memmap(
                    bin_images, dimensions, loader=self.loader,
                )
                bin_images_sample = image_mean_std_trimmed(memmap_handle)[0]
                del memmap_handle
                os.remove(memmap_filename)
            elif self.smoothing == "median":
                memmap_filename, memmap_handle = create_memmap(
                    bin_images, dimensions, loader=self.loader,
                )
                bin_images_sample = median_array(memmap_handle)
                del memmap_handle
                os.remove(memmap_filename)

            base_path = Path(self.attenuation_parameters_folder)

            fig = plt.figure()
            plt.imshow(bin_images_sample)
            plt.colorbar()
            plt.title("Image bin " + str(idx_bin))
            fig_name = base_path / (
                "bin_images_sample_" + str(distance_bin_sample) + "m.png"
            )
            # Console.info("Saved figure at", fig_name)
            plt.savefig(fig_name, dpi=600)
            plt.close(fig)

            fig = plt.figure()
            plt.imshow(bin_distances_sample)
            plt.colorbar()
            plt.title("Distance bin " + str(idx_bin))
            fig_name = base_path / (
                "bin_distances_sample_" + str(distance_bin_sample) + "m.png"
            )
            # Console.info("Saved figure at", fig_name)
            plt.savefig(fig_name, dpi=600)
            plt.close(fig)

            images_map[idx_bin] = bin_images_sample.reshape(
                [self.image_height * self.image_width, self.image_channels]
            )
            distances_map[idx_bin] = bin_distances_sample.reshape(
                [self.image_height * self.image_width]
            )
Example #28
0
    def load_configuration(self, correct_config=None):
        if correct_config is None:  # nothing to do here, we expect an explicit call
            Console.warn("No correct_config provided. Skipping load_configuration()")
            return

        self.correct_config = correct_config

        """Load general configuration parameters"""
        self.correction_method = self.correct_config.method
        if self.correction_method == "colour_correction":
            self.distance_metric = self.correct_config.color_correction.distance_metric
            self.distance_path = self.correct_config.color_correction.metric_path
            self.altitude_max = self.correct_config.color_correction.altitude_max
            self.altitude_min = self.correct_config.color_correction.altitude_min
            self.smoothing = self.correct_config.color_correction.smoothing
            self.window_size = self.correct_config.color_correction.window_size
            self.outlier_rejection = self.correct_config.color_correction.outlier_reject
        self.cameraconfigs = self.correct_config.configs.camera_configs
        self.undistort = self.correct_config.output_settings.undistort_flag
        self.output_format = self.correct_config.output_settings.compression_parameter

        # Load camera parameters
        cam_idx = self.get_camera_idx()
        self.camera_found = False
        if cam_idx is None:
            Console.info(
                "Camera not included in correct_images.yaml. No",
                "processing will be done for this camera.",
            )
            return
        else:
            self.camera_found = True
        self.user_specified_image_list_parse = self.cameraconfigs[
            cam_idx
        ].imagefilelist_parse
        self.user_specified_image_list_process = self.cameraconfigs[
            cam_idx
        ].imagefilelist_process

        if self.correction_method == "colour_correction":
            # Brighness and contrast are percentages of 255
            # e.g. brightness of 30 means 30% of 255 = 77
            self.brightness = float(self.cameraconfigs[cam_idx].brightness)
            self.contrast = float(self.cameraconfigs[cam_idx].contrast)
        elif self.correction_method == "manual_balance":
            self.subtractors_rgb = np.array(self.cameraconfigs[cam_idx].subtractors_rgb)
            self.color_gain_matrix_rgb = np.array(
                self.cameraconfigs[cam_idx].color_gain_matrix_rgb
            )

        # Create output directories and needed attributes
        self.create_output_directories()

        # Define basic filepaths
        if self.correction_method == "colour_correction":
            p = Path(self.attenuation_parameters_folder)
            self.attenuation_params_filepath = p / "attenuation_parameters.npy"
            self.correction_gains_filepath = p / "correction_gains.npy"
            self.corrected_mean_filepath = p / "image_corrected_mean.npy"
            self.corrected_std_filepath = p / "image_corrected_std.npy"
            self.raw_mean_filepath = p / "image_raw_mean.npy"
            self.raw_std_filepath = p / "image_raw_std.npy"

        # Define image loader
        # Use default loader
        self.loader = loader.Loader()
        self.loader.bit_depth = self.camera.bit_depth
        if self.camera.extension == "raw":
            self.loader.set_loader("xviii")
        elif self.camera.extension == "bag":
            self.loader.set_loader("rosbag")
            tz_offset_s = (
                read_timezone(self.mission.image.timezone) * 60
                + self.mission.image.timeoffset
            )
            self.loader.tz_offset_s = tz_offset_s
        else:
            self.loader.set_loader("default")
Example #29
0
def interpolate_sensor_list(
    sensor_list,
    sensor_name,
    sensor_offsets,
    origin_offsets,
    latlon_reference,
    _centre_list,
):
    j = 0
    [origin_x_offset, origin_y_offset, origin_z_offset] = origin_offsets
    [latitude_reference, longitude_reference] = latlon_reference
    # Check if camera activates before dvl and orientation sensors.
    start_time = _centre_list[0].epoch_timestamp
    end_time = _centre_list[-1].epoch_timestamp
    if (sensor_list[0].epoch_timestamp > end_time
            or sensor_list[-1].epoch_timestamp < start_time):
        Console.warn(
            "{} timestamps does not overlap with dead reckoning data, "
            "check timestamp_history.pdf via -v option.".format(sensor_name))
    else:
        sensor_overlap_flag = 0
        for i in range(len(sensor_list)):
            if sensor_list[i].epoch_timestamp < start_time:
                sensor_overlap_flag = 1
                pass
            else:
                if i > 0:
                    Console.warn(
                        "Deleted",
                        i,
                        "entries from sensor",
                        sensor_name,
                        ". Reason: data before start of mission",
                    )
                    del sensor_list[:i]
                break
        for i in range(len(sensor_list)):
            if j >= len(_centre_list) - 1:
                ii = len(sensor_list) - i
                if ii > 0:
                    Console.warn(
                        "Deleted",
                        ii,
                        "entries from sensor",
                        sensor_name,
                        ". Reason: data after end of mission",
                    )
                    del sensor_list[i:]
                sensor_overlap_flag = 1
                break
            while _centre_list[j].epoch_timestamp < sensor_list[
                    i].epoch_timestamp:
                if (j + 1 > len(_centre_list) - 1
                        or _centre_list[j + 1].epoch_timestamp >
                        sensor_list[-1].epoch_timestamp):
                    break
                j += 1
            # if j>=1: ?

            sensor_list[i].roll = interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].roll,
                _centre_list[j].roll,
            )
            sensor_list[i].pitch = interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].pitch,
                _centre_list[j].pitch,
            )
            if abs(_centre_list[j].yaw - _centre_list[j - 1].yaw) > 180:
                if _centre_list[j].yaw > _centre_list[j - 1].yaw:
                    sensor_list[i].yaw = interpolate(
                        sensor_list[i].epoch_timestamp,
                        _centre_list[j - 1].epoch_timestamp,
                        _centre_list[j].epoch_timestamp,
                        _centre_list[j - 1].yaw,
                        _centre_list[j].yaw - 360,
                    )
                else:
                    sensor_list[i].yaw = interpolate(
                        sensor_list[i].epoch_timestamp,
                        _centre_list[j - 1].epoch_timestamp,
                        _centre_list[j].epoch_timestamp,
                        _centre_list[j - 1].yaw - 360,
                        _centre_list[j].yaw,
                    )
                if sensor_list[i].yaw < 0:
                    sensor_list[i].yaw += 360
                elif sensor_list[i].yaw > 360:
                    sensor_list[i].yaw -= 360
            else:
                sensor_list[i].yaw = interpolate(
                    sensor_list[i].epoch_timestamp,
                    _centre_list[j - 1].epoch_timestamp,
                    _centre_list[j].epoch_timestamp,
                    _centre_list[j - 1].yaw,
                    _centre_list[j].yaw,
                )
            sensor_list[i].x_velocity = interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].x_velocity,
                _centre_list[j].x_velocity,
            )
            sensor_list[i].y_velocity = interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].y_velocity,
                _centre_list[j].y_velocity,
            )
            sensor_list[i].z_velocity = interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].z_velocity,
                _centre_list[j].z_velocity,
            )
            sensor_list[i].altitude = interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].altitude,
                _centre_list[j].altitude,
            )

            [x_offset, y_offset, z_offset] = body_to_inertial(
                sensor_list[i].roll,
                sensor_list[i].pitch,
                sensor_list[i].yaw,
                origin_x_offset - sensor_offsets[0],
                origin_y_offset - sensor_offsets[1],
                origin_z_offset - sensor_offsets[2],
            )

            sensor_list[i].northings = (interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].northings,
                _centre_list[j].northings,
            ) - x_offset)
            sensor_list[i].eastings = (interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].eastings,
                _centre_list[j].eastings,
            ) - y_offset)
            sensor_list[i].altitude = (interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].altitude,
                _centre_list[j].altitude,
            ) - z_offset)
            sensor_list[i].depth = (interpolate(
                sensor_list[i].epoch_timestamp,
                _centre_list[j - 1].epoch_timestamp,
                _centre_list[j].epoch_timestamp,
                _centre_list[j - 1].depth,
                _centre_list[j].depth,
            ) - z_offset)
            [sensor_list[i].latitude,
             sensor_list[i].longitude] = metres_to_latlon(
                 latitude_reference,
                 longitude_reference,
                 sensor_list[i].eastings,
                 sensor_list[i].northings,
             )

            sensor_list[i].northings_std = interpolate_property(
                _centre_list, i, sensor_list, j, "northings_std")
            sensor_list[i].eastings_std = interpolate_property(
                _centre_list, i, sensor_list, j, "eastings_std")
            sensor_list[i].depth_std = interpolate_property(
                _centre_list, i, sensor_list, j, "depth_std")
            sensor_list[i].roll_std = interpolate_property(
                _centre_list, i, sensor_list, j, "roll_std")
            sensor_list[i].pitch_std = interpolate_property(
                _centre_list, i, sensor_list, j, "pitch_std")
            sensor_list[i].yaw_std = interpolate_property(
                _centre_list, i, sensor_list, j, "yaw_std")
            sensor_list[i].x_velocity_std = interpolate_property(
                _centre_list, i, sensor_list, j, "x_velocity_std")
            sensor_list[i].y_velocity_std = interpolate_property(
                _centre_list, i, sensor_list, j, "y_velocity_std")
            sensor_list[i].z_velocity_std = interpolate_property(
                _centre_list, i, sensor_list, j, "z_velocity_std")
            sensor_list[i].vroll_std = interpolate_property(
                _centre_list, i, sensor_list, j, "vroll_std")
            sensor_list[i].vpitch_std = interpolate_property(
                _centre_list, i, sensor_list, j, "vpitch_std")
            sensor_list[i].vyaw_std = interpolate_property(
                _centre_list, i, sensor_list, j, "vyaw_std")

            if _centre_list[j].covariance is not None:
                sensor_list[i].covariance = interpolate_covariance(
                    sensor_list[i].epoch_timestamp,
                    _centre_list[j - 1].epoch_timestamp,
                    _centre_list[j].epoch_timestamp,
                    _centre_list[j - 1].covariance,
                    _centre_list[j].covariance,
                )

        if sensor_overlap_flag == 1:
            Console.warn(
                "Sensor data from {} spans further than dead reckoning data."
                " Data outside DR is ignored.".format(sensor_name))
        Console.info("Complete interpolation and coordinate transfomations "
                     "for {}".format(sensor_name))
def rescale_camera(path, camera_system, camera):
    name = camera.camera_name
    distance_path = camera.distance_path
    interpolate_method = camera.interpolate_method
    image_path = camera.path
    target_pixel_size = camera.target_pixel_size
    maintain_pixels = bool(camera.maintain_pixels)
    output_folder = camera.output_folder

    idx = [
        i for i, camera in enumerate(camera_system.cameras)
        if camera.name == name
    ]

    if len(idx) > 0:
        Console.info("Camera found in camera.yaml file...")
    else:
        Console.warn(
            "Camera not found in camera.yaml file. Please provide a relevant \
            camera.yaml file...")
        return False

    # obtain images to be rescaled
    path_processed = get_processed_folder(path)
    image_path = path_processed / image_path

    # obtain path to distance / altitude values
    full_distance_path = path_processed / distance_path
    full_distance_path = full_distance_path / "csv" / "ekf"
    distance_file = "auv_ekf_" + name + ".csv"
    distance_path = full_distance_path / distance_file

    # obtain focal lengths from calibration file
    camera_params_folder = path_processed / "calibration"
    camera_params_filename = "mono_" + name + ".yaml"
    camera_params_file_path = camera_params_folder / camera_params_filename

    if not camera_params_file_path.exists():
        Console.quit("Calibration file not found...")
    else:
        Console.info("Calibration file found...")

    monocam = MonoCamera(camera_params_file_path)
    focal_length_x = monocam.K[0, 0]
    focal_length_y = monocam.K[1, 1]

    # create output path
    output_directory = path_processed / output_folder
    if not output_directory.exists():
        output_directory.mkdir(parents=True)

    # call rescale function
    dataframe = pd.read_csv(Path(distance_path))
    imagenames_list = [
        filename for filename in os.listdir(image_path)
        if filename[-4:] == ".jpg" or filename[-4:] == ".png"
        or filename[-4:] == ".tif"
    ]
    Console.info("Distance values loaded...")
    rescale_images(
        imagenames_list,
        image_path,
        interpolate_method,
        target_pixel_size,
        dataframe,
        output_directory,
        focal_length_x,
        focal_length_y,
        maintain_pixels,
    )
    return True