def run_matches_importer_from_kapture(colmap_binary_path: str,
                                      colmap_use_cpu: bool,
                                      colmap_gpu_index: Optional[str],
                                      colmap_db_path: str,
                                      kapture_data: kapture.Kapture,
                                      force: bool = True,
                                      clean: bool = True) -> None:
    """
    export list of matches from kapture data then run colmap matches_importer

    :param colmap_binary_path: path to colmap executable
    :type colmap_binary_path: str
    :param colmap_use_cpu: add --SiftExtraction.use_gpu 0
    :type colmap_use_cpu: bool
    :param colmap_gpu_index: add --SiftExtraction.gpu_index {colmap_gpu_index}
    :type colmap_gpu_index: str
    :param colmap_db_path: value for --database_path
    :type colmap_db_path: str
    :param kapture_data: kapture data that contains the matches (that are already in the colmap database) to verify
    :type kapture_data: kapture.Kapture
    :param force: do not ask before overwriting match_list.txt, defaults to True
    :type force: bool, optional
    :param clean: remove match_list.txt before exiting, defaults to True
    :type clean: bool, optional
    """
    db_dir = path.dirname(colmap_db_path)
    match_list_path = path.join(db_dir, 'match_list.txt')
    safe_remove_file(match_list_path, force)
    save_match_list(kapture_data, match_list_path)
    run_matches_importer(colmap_binary_path, colmap_use_cpu, colmap_gpu_index, colmap_db_path, match_list_path)
    if clean:
        os.remove(match_list_path)
Exemple #2
0
def export_image_list(kapture_path: str, output_path: str,
                      export_camera_params: bool, force: bool) -> None:
    """
    Export image list in a text file.

    :param kapture_path: top directory of the kapture
    :param output_path: path of the image list file
    :param export_camera_params: if True, add camera parameters after every file name
    :param force: Silently overwrite image list file if already exists.
    """
    os.makedirs(os.path.dirname(output_path), exist_ok=True)
    safe_remove_file(output_path, force)

    kapture_to_export = kapture_from_dir(kapture_path)
    output_content = []
    logger.info('starting conversion...')
    for _, sensor_id, filename in kapture.flatten(
            kapture_to_export.records_camera, is_sorted=True):
        line = filename
        if export_camera_params:
            camera = kapture_to_export.sensors[sensor_id]
            assert isinstance(camera, kapture.Camera)
            line += ' ' + ' '.join(camera.sensor_params)
        output_content.append(line)

    logger.info('writing exported data...')
    with open(output_path, 'w') as fid:
        fid.write('\n'.join(output_content))
def write_results_to_file(output_folder: str, labels: List[str],
                          results: List[List[Tuple[str, float, float]]],
                          force: bool) -> None:
    """
    Writes evaluation results to text files. Results and labels must be synchronised.
    :param output_folder: full path of folder to write files in.
    :param labels: labels for the result files
    :param results: results to write
    :param force: Silently overwrite results files if already exists.
    """
    for i in range(0, len(results)):
        label = labels[i]
        translation_map = label.maketrans("\\/:*?<>|", "________")
        label_name_safe = label.translate(translation_map)
        result = results[i]
        full_path = path.join(
            output_folder,
            RESULTS_FILENAME + '_{}'.format(label_name_safe) + '.txt')
        safe_remove_file(full_path, force)
        results_as_lines = [
            '{} {} {}\n'.format(name, position_error, rotation_error)
            for name, position_error, rotation_error in result
        ]
        with open(full_path, 'w') as fid:
            fid.writelines(results_as_lines)
def plot_localized_over_position_threshold(output_folder: str,
                                           labels: List[str],
                                           results: List[List[Tuple[str, float, float]]],
                                           rotation_threshold: float, plot_max: int,
                                           title: str,
                                           plot_loc: str, plot_font_size: int, plot_legend_font_size: int,
                                           force: bool) -> None:
    """
    Plot image with localized positions.
    :param output_folder: full path of folder to write plot files in.
    :param labels: labels for the plot images
    :param results: results to compute the images to plot
    :param rotation_threshold: rotation threshold
    :param plot_max: max points to plot
    :param title: title for the plotted image
    :param plot_loc: location for the legend
    :param plot_font_size: font size to plot with
    :param plot_legend_font_size: font size for the legend
    :param force: Silently overwrite plot files if already exists.
    """
    import matplotlib.pylab as plt
    plt.rcParams['font.size'] = plot_font_size
    plt.rcParams['legend.fontsize'] = plot_legend_font_size
    position_thresholds = np.linspace(0.0, 1.0, num=101, dtype=np.float64) * plot_max  # thresholds in cm
    bins = [(position_threshold / 100.0, rotation_threshold)
            for position_threshold in position_thresholds]  # convert to thresholds in m
    number_of_images = None
    for i in range(0, len(results)):
        label = labels[i]
        result = results[i]
        if number_of_images is None:
            number_of_images = len(result)
        else:
            # just make sure we are comparing comparable things
            assert(number_of_images == len(result))
        filled_bins = [(t[2] / number_of_images) * 100.0 for t in fill_bins(result, bins)]  # convert back to cm
        plt.plot(position_thresholds, filled_bins, lw=2, label=label)
    plt.xlabel('position error threshold (cm)')
    plt.ylabel('localized images (%)')
    plt.ylim([0, 100])  # 0 to 100 %
    plt.legend(loc=plot_loc)
    plt.xlim([position_thresholds[0], position_thresholds[-1]])
    # plot grid
    grid_step = int(plot_max / 10.0)
    for i in range(grid_step, plot_max - grid_step + 1, grid_step):
        plt.plot([i, i], [0, 100], 'k', alpha=0.1)
    for i in range(10, 91, 10):
        plt.plot([0, plot_max], [i, i], 'k', alpha=0.1)

    plot_title = title
    if rotation_threshold >= 0:
        plot_title += (' ' if title else '') + r'$\alpha_{th}$' + '={} deg'.format(rotation_threshold)
    plt.title(plot_title)

    full_path = path.join(output_folder, PLOT_FILENAME + '.png')
    safe_remove_file(full_path, force)
    plt.savefig(full_path, bbox_inches='tight')
Exemple #5
0
def export_openmvg(kapture_path: str, openmvg_path: str,
                   image_action: TransferAction, force: bool = False) -> None:
    """
    Export the kapture data to an openMVG JSON file.
    If the openmvg_path is a directory, it will create a JSON file (using the default name sfm_data.json)
    in that directory.

    :param kapture_path: full path to the top kapture directory
    :param openmvg_path: path of the file or directory where to store the data as JSON
    :param image_action: an action to apply on the images: relative linking, absolute linking, copy or move. Or top
     directory linking or skip to do nothing.
    :param force: if true, will remove existing openMVG data without prompting the user.
    """

    if path.isdir(openmvg_path):  # Existing directory
        json_file = path.join(openmvg_path, DEFAULT_JSON_FILE_NAME)
    else:
        file_ext = path.splitext(openmvg_path)[1]
        if len(file_ext) == 0:  # No extension: -> new directory
            json_file = path.join(openmvg_path, DEFAULT_JSON_FILE_NAME)
        elif file_ext.lower() != '.json':
            logger.warning(f'Creating output directory with file extension {file_ext}')
            json_file = path.join(openmvg_path, DEFAULT_JSON_FILE_NAME)
        else:  # Json file
            json_file = openmvg_path
    json_dir = path.dirname(json_file)
    safe_remove_file(json_file, force)
    if path.exists(json_file):
        raise ValueError(f'{json_file} file already exist')
    if image_action != TransferAction.skip and path.exists(json_dir) and any(pathlib.Path(json_dir).iterdir()):
        safe_remove_any_path(json_dir, force)
        if path.isdir(json_dir):
            raise ValueError(f'Images directory {json_dir} exist with remaining files')
    os.makedirs(json_dir, exist_ok=True)

    kapture_data = load_kapture(kapture_path)
    openmvg_data = kapture_to_openmvg(kapture_data, kapture_path, image_action, json_dir)
    logger.info(f'Saving to openmvg file {json_file}')
    with open(json_file, "w") as fid:
        json.dump(openmvg_data, fid, indent=4)
def run_colmap_gv_from_loaded_data(kapture_none_matches: kapture.Kapture,
                                   kapture_colmap_matches: kapture.Kapture,
                                   kapture_none_matches_dirpath: str,
                                   kapture_colmap_matches_dirpath: str,
                                   tar_handlers_none_matches: Optional[TarCollection],
                                   tar_handlers_colmap_matches: Optional[TarCollection],
                                   colmap_binary: str,
                                   keypoints_type: Optional[str],
                                   skip_list: List[str],
                                   force: bool):
    logger.info('run_colmap_gv...')
    if not (kapture_none_matches.records_camera and kapture_none_matches.sensors and
            kapture_none_matches.keypoints and kapture_none_matches.matches):
        raise ValueError('records_camera, sensors, keypoints, matches are mandatory')

    # COLMAP does not fully support rigs.
    if kapture_none_matches.rigs is not None and kapture_none_matches.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_none_matches.trajectories, kapture_none_matches.rigs)

    # Set fixed name for COLMAP database
    colmap_db_path = os.path.join(kapture_colmap_matches_dirpath, 'colmap.db')
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)

    if keypoints_type is None:
        keypoints_type = try_get_only_key_from_collection(kapture_none_matches.matches)
    assert keypoints_type is not None
    assert keypoints_type in kapture_none_matches.keypoints
    assert keypoints_type in kapture_none_matches.matches

    if 'matches_importer' not in skip_list:
        logger.debug('compute matches difference.')
        if kapture_colmap_matches.matches is not None and keypoints_type in kapture_colmap_matches.matches:
            colmap_matches = kapture_colmap_matches.matches[keypoints_type]
        else:
            colmap_matches = kapture.Matches()
        matches_to_verify = kapture.Matches(kapture_none_matches.matches[keypoints_type].difference(colmap_matches))
        kapture_data_to_export = kapture.Kapture(sensors=kapture_none_matches.sensors,
                                                 trajectories=kapture_none_matches.trajectories,
                                                 records_camera=kapture_none_matches.records_camera,
                                                 keypoints={
                                                     keypoints_type: kapture_none_matches.keypoints[keypoints_type]
                                                 },
                                                 matches={
                                                     keypoints_type: matches_to_verify
                                                 })
        # creates a new database with matches
        logger.debug('export matches difference to db.')
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.kapture_to_colmap(kapture_data_to_export, kapture_none_matches_dirpath,
                                         tar_handlers_none_matches,
                                         colmap_db,
                                         keypoints_type,
                                         None,
                                         export_two_view_geometry=False)
        # close db before running colmap processes in order to avoid locks
        colmap_db.close()

        logger.debug('run matches_importer command.')
        colmap_lib.run_matches_importer_from_kapture_matches(
            colmap_binary,
            colmap_use_cpu=True,
            colmap_gpu_index=None,
            colmap_db_path=colmap_db_path,
            kapture_matches=matches_to_verify,
            force=force
        )

    if 'import' not in skip_list:
        logger.debug('import verified matches.')
        os.umask(0o002)
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        kapture_data = kapture.Kapture()
        kapture_data.records_camera, _ = get_images_and_trajectories_from_database(colmap_db)
        kapture_data.matches = {
            keypoints_type: get_matches_from_database(colmap_db, kapture_data.records_camera,
                                                      kapture_colmap_matches_dirpath,
                                                      tar_handlers_colmap_matches,
                                                      keypoints_type,
                                                      no_geometric_filtering=False)
        }
        colmap_db.close()

        if kapture_colmap_matches.matches is None:
            kapture_colmap_matches.matches = {}
        if keypoints_type not in kapture_colmap_matches.matches:
            kapture_colmap_matches.matches[keypoints_type] = kapture.Matches()
        kapture_colmap_matches.matches[keypoints_type].update(kapture_data.matches[keypoints_type])

    if 'delete_db' not in skip_list:
        logger.debug('delete intermediate colmap db.')
        os.remove(colmap_db_path)
Exemple #7
0
def colmap_build_sift_map(kapture_path: str,
                          colmap_path: str,
                          colmap_binary: str,
                          colmap_use_cpu: bool,
                          colmap_gpu_index: str,
                          vocab_tree_path: str,
                          point_triangulator_options: List[str],
                          skip_list: List[str],
                          force: bool) -> None:
    """
    Build a colmap model using default SIFT features with the kapture data.

    :param kapture_path: path to the kapture to use
    :param colmap_path: path to the colmap build
    :param colmap_binary: path to the colmap executable
    :param colmap_use_cpu: to use cpu only (and ignore gpu) or to use also gpu
    :param colmap_gpu_index: gpu index for sift extractor and mapper
    :param vocab_tree_path: path to the colmap vocabulary tree file
    :param point_triangulator_options: options for the point triangulator
    :param skip_list: list of steps to skip
    :param force: Silently overwrite kapture files if already exists.
    """
    os.makedirs(colmap_path, exist_ok=True)

    # Load input files first to make sure it is OK
    logger.info('loading kapture files...')
    kapture_data = kapture.io.csv.kapture_from_dir(kapture_path)

    if not (kapture_data.records_camera and kapture_data.sensors):
        raise ValueError('records_camera, sensors are mandatory')
    if not kapture_data.trajectories:
        logger.info('there are no trajectories, running mapper instead of point_triangulator')

    if not os.path.isfile(vocab_tree_path):
        raise ValueError(f'Vocabulary Tree file does not exist: {vocab_tree_path}')

    # COLMAP does not fully support rigs.
    if kapture_data.rigs is not None and kapture_data.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
        kapture_data.rigs.clear()

    # Set fixed name for COLMAP database
    colmap_db_path = path.join(colmap_path, 'colmap.db')
    image_list_path = path.join(colmap_path, 'images.list')
    reconstruction_path = path.join(colmap_path, "reconstruction")
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)
        safe_remove_file(image_list_path, force)
        safe_remove_any_path(reconstruction_path, force)
    os.makedirs(reconstruction_path, exist_ok=True)

    if 'feature_extract' not in skip_list:
        logger.info("Step 1: Feature extraction using colmap")
        with open(image_list_path, 'w') as fid:
            for timestamp, sensor_id in sorted(kapture_data.records_camera.key_pairs()):
                fid.write(kapture_data.records_camera[timestamp][sensor_id] + "\n")

        colmap_lib.run_feature_extractor(
            colmap_binary,
            colmap_use_cpu,
            colmap_gpu_index,
            colmap_db_path,
            get_image_fullpath(kapture_path),
            image_list_path
        )

    # Update cameras in COLMAP:
    # - use only one camera for all images taken with the same camera (update all camera IDs)
    # - import camera intrinsics
    # - import camera pose
    if 'update_db_cameras' not in skip_list:
        logger.info("Step 2: Populate COLMAP DB with cameras and poses")
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.update_DB_cameras_and_poses(colmap_db, kapture_data)
        # close db before running colmap processes in order to avoid locks
        colmap_db.close()

    # Extract matches with COLMAP
    if 'matches' not in skip_list:
        logger.info("Step 3: Compute matches with colmap")

        colmap_lib.run_vocab_tree_matcher(
            colmap_binary,
            colmap_use_cpu,
            colmap_gpu_index,
            colmap_db_path,
            vocab_tree_path)

    if kapture_data.trajectories is not None:
        # Generate priors for reconstruction
        txt_path = path.join(colmap_path, "priors_for_reconstruction")
        os.makedirs(txt_path, exist_ok=True)
        if 'priors_for_reconstruction' not in skip_list:
            logger.info('Step 4: Exporting priors for reconstruction.')
            colmap_db = COLMAPDatabase.connect(colmap_db_path)
            database_extra.generate_priors_for_reconstruction(kapture_data, colmap_db, txt_path)
            colmap_db.close()

        # Point triangulator
        reconstruction_path = path.join(colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        if 'triangulation' not in skip_list:
            logger.info("Step 5: Triangulation")
            colmap_lib.run_point_triangulator(
                colmap_binary,
                colmap_db_path,
                get_image_fullpath(kapture_path),
                txt_path,
                reconstruction_path,
                point_triangulator_options
            )
    else:
        # mapper
        reconstruction_path = path.join(colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        if 'triangulation' not in skip_list:
            logger.info("Step 5: Triangulation")
            colmap_lib.run_mapper(
                colmap_binary,
                colmap_db_path,
                get_image_fullpath(kapture_path),
                None,
                reconstruction_path,
                point_triangulator_options
            )
            # use reconstruction 0 as main
            first_reconstruction = os.path.join(reconstruction_path, '0')
            files = os.listdir(first_reconstruction)
            for f in files:
                shutil.move(os.path.join(first_reconstruction, f), os.path.join(reconstruction_path, f))
            shutil.rmtree(first_reconstruction)

    # run model_converter
    if 'model_converter' not in skip_list:
        logger.info("Step 6: Export reconstruction results to txt")
        colmap_lib.run_model_converter(
            colmap_binary,
            reconstruction_path,
            reconstruction_path
        )
Exemple #8
0
def colmap_localize_sift(kapture_path: str,
                         colmap_path: str,
                         input_database_path: str,
                         input_reconstruction_path: str,
                         colmap_binary: str,
                         colmap_use_cpu: bool,
                         colmap_gpu_index: str,
                         vocab_tree_path: str,
                         image_registrator_options: List[str],
                         skip_list: List[str],
                         force: bool) -> None:
    """
    Localize images on a colmap model using default SIFT features with the kapture data.

    :param kapture_path: path to the kapture to use
    :param colmap_path: path to the colmap build
    :param input_database_path: path to the map colmap.db
    :param input_database_path: path to the map colmap.db
    :param input_reconstruction_path: path to the map reconstruction folder
    :param colmap_use_cpu: to use cpu only (and ignore gpu) or to use also gpu
    :param colmap_gpu_index: gpu index for sift extractor and mapper
    :param vocab_tree_path: path to the colmap vocabulary tree file
    :param image_registrator_options: options for the image registrator
    :param skip_list: list of steps to skip
    :param force: Silently overwrite kapture files if already exists.
    """
    os.makedirs(colmap_path, exist_ok=True)
    # Set fixed name for COLMAP database

    # Load input files first to make sure it is OK
    logger.info('loading kapture files...')
    kapture_data = kapture.io.csv.kapture_from_dir(kapture_path)

    if not (kapture_data.records_camera and kapture_data.sensors):
        raise ValueError('records_camera, sensors are mandatory')

    if kapture_data.trajectories:
        logger.warning("Input data contains trajectories: they will be ignored")
        kapture_data.trajectories.clear()
    else:
        kapture_data.trajectories = kapture.Trajectories()

    if not os.path.isfile(vocab_tree_path):
        raise ValueError(f'Vocabulary Tree file does not exist: {vocab_tree_path}')

    # COLMAP does not fully support rigs.
    if kapture_data.rigs is not None and kapture_data.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
        kapture_data.rigs.clear()

    # Prepare output
    # Set fixed name for COLMAP database
    colmap_db_path = path.join(colmap_path, 'colmap.db')
    image_list_path = path.join(colmap_path, 'images.list')
    reconstruction_path = path.join(colmap_path, "reconstruction")
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)
        safe_remove_file(image_list_path, force)
        safe_remove_any_path(reconstruction_path, force)
    os.makedirs(reconstruction_path, exist_ok=True)

    # Copy colmap db to output
    if not os.path.exists(colmap_db_path):
        shutil.copy(input_database_path, colmap_db_path)

    # find correspondences between the colmap db and the kapture data
    images_all = {image_path: (ts, cam_id)
                  for ts, shot in kapture_data.records_camera.items()
                  for cam_id, image_path in shot.items()}

    colmap_db = COLMAPDatabase.connect(colmap_db_path)
    colmap_image_ids = database_extra.get_colmap_image_ids_from_db(colmap_db)
    colmap_cameras = database_extra.get_camera_ids_from_database(colmap_db)
    colmap_images = database_extra.get_images_from_database(colmap_db)
    colmap_db.close()

    # dict ( kapture_camera -> colmap_camera_id )
    colmap_camera_ids = {images_all[image_path][1]: colmap_cam_id
                         for image_path, colmap_cam_id in colmap_images if image_path in images_all}

    images_to_add = {image_path: value
                     for image_path, value in images_all.items()
                     if image_path not in colmap_image_ids}

    flatten_images_to_add = [(ts, kapture_cam_id, image_path)
                             for image_path, (ts, kapture_cam_id) in images_to_add.items()]

    if 'feature_extract' not in skip_list:
        logger.info("Step 1: Feature extraction using colmap")
        with open(image_list_path, 'w') as fid:
            for image in images_to_add.keys():
                fid.write(image + "\n")

        colmap_lib.run_feature_extractor(
            colmap_binary,
            colmap_use_cpu,
            colmap_gpu_index,
            colmap_db_path,
            get_image_fullpath(kapture_path),
            image_list_path
        )

    if 'matches' not in skip_list:
        logger.info("Step 2: Compute matches with colmap")
        colmap_lib.run_vocab_tree_matcher(
            colmap_binary,
            colmap_use_cpu,
            colmap_gpu_index,
            colmap_db_path,
            vocab_tree_path,
            image_list_path
        )

    if 'fix_db_cameras' not in skip_list:
        logger.info("Step 3: Replace colmap generated cameras with kapture cameras")
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.foreign_keys_off(colmap_db)

        # remove colmap generated cameras
        after_feature_extraction_colmap_cameras = database_extra.get_camera_ids_from_database(colmap_db)
        colmap_cameras_to_remove = [cam_id
                                    for cam_id in after_feature_extraction_colmap_cameras
                                    if cam_id not in colmap_cameras]
        for cam_id in colmap_cameras_to_remove:
            database_extra.remove_camera(colmap_db, cam_id)

        # put the correct cameras and image extrinsic back into the database
        cameras_to_add = kapture.Sensors()
        for image_path, (ts, kapture_cam_id) in images_to_add.items():
            if kapture_cam_id not in colmap_camera_ids:
                kapture_cam = kapture_data.sensors[kapture_cam_id]
                cameras_to_add[kapture_cam_id] = kapture_cam
        colmap_added_camera_ids = database_extra.add_cameras_to_database(cameras_to_add, colmap_db)
        colmap_camera_ids.update(colmap_added_camera_ids)

        database_extra.update_images_in_database_from_flatten(
            colmap_db,
            flatten_images_to_add,
            kapture_data.trajectories,
            colmap_camera_ids
        )

        database_extra.foreign_keys_on(colmap_db)
        colmap_db.commit()
        colmap_db.close()

    if 'image_registrator' not in skip_list:
        logger.info("Step 4: Run image_registrator")
        # run image_registrator
        colmap_lib.run_image_registrator(
            colmap_binary,
            colmap_db_path,
            input_reconstruction_path,
            reconstruction_path,
            image_registrator_options
        )

    # run model_converter
    if 'model_converter' not in skip_list:
        logger.info("Step 5: Export reconstruction results to txt")
        colmap_lib.run_model_converter(
            colmap_binary,
            reconstruction_path,
            reconstruction_path
        )
def write_statistics_to_file(output_folder: str,
                             labels: List[str],
                             title: str,
                             results: List[List[Tuple[str, float, float]]],
                             bins_as_str: List[str],
                             force: bool) -> None:
    """
    Writes evaluation statistics to text files. Results and labels must be synchronised.
    :param output_folder: full path of folder to write statistics files in.
    :param labels: labels for the statistics files
    :param title: title for the summary table
    :param results: results to compute statistics
    :param bins_as_str: list of bin names
    :param force: Silently overwrite results files if already exists.
    """
    full_path = path.join(output_folder, STATISTICS_FILENAME + '.txt')
    safe_remove_file(full_path, force)

    bins = [(float(split_bin[0]), float(split_bin[1])) for split_bin in map(lambda x: x.split(), bins_as_str)]
    print_line = ''
    bins_table_data = []
    for i in range(0, len(results)):
        label = labels[i]
        result = results[i]
        number_of_images = len(result)

        positions_errors_all = [position_error if not isnan(position_error) else float("inf")
                                for name, position_error, rotation_error in result]
        rotation_errors_all = [rotation_error if not isnan(rotation_error) else float("inf")
                               for name, position_error, rotation_error in result]

        positions_errors = [position_error
                            for name, position_error, rotation_error in result if not isnan(position_error)]
        rotation_errors = [rotation_error
                           for name, position_error, rotation_error in result if not isnan(rotation_error)]

        print_line += 'Model: {}\n\n'.format(label)
        print_line += 'Found {} / {} image positions ({:.2f} %).\n'.format(
            len(positions_errors), number_of_images, float(100.0*len(positions_errors)/number_of_images))
        print_line += 'Found {} / {} image rotations ({:.2f} %).\n'.format(
            len(rotation_errors), number_of_images, float(100.0*len(rotation_errors)/number_of_images))

        print_line += 'Localized images: mean=({:.4f}m, {:.4f} deg) / median=({:.4f}m, {:.4f} deg)\n'.format(
            mean(positions_errors),
            mean(rotation_errors),
            median(positions_errors),
            median(rotation_errors))
        print_line += 'All: median=({:.4f}m, {:.4f} deg)\n'.format(median(positions_errors_all),
                                                                   median(rotation_errors_all))
        print_line += 'Min: {:.4f}m; {:.4f} deg\n'.format(min(positions_errors), min(rotation_errors))
        print_line += 'Max: {:.4f}m; {:.4f} deg\n\n'.format(max(positions_errors), max(rotation_errors))

        filled_bins = fill_bins(result, bins)
        bins_lines = ['({}m, {} deg): {:.2f}%\n'.format(
            position_error,
            rotation_error,
            (number_of_images_in_bin / number_of_images) * 100.0)
            for position_error, rotation_error, number_of_images_in_bin in filled_bins]
        print_line += ''.join(bins_lines)
        print_line += '\n'
        bins_table_data.append([label, bins_lines[0].rstrip().split(
            ': ')[1], bins_lines[1].rstrip().split(': ')[1], bins_lines[2].rstrip().split(': ')[1]])
    if len(results) > 1:
        print_line += '\n'
        print_line += tabulate(bins_table_data, headers=(title, bins[0], bins[1], bins[2]), tablefmt='latex')
        print_line += '\n\n'
        print_line += tabulate(bins_table_data, headers=(title, bins[0], bins[1], bins[2]))
    print(print_line)
    with open(full_path, 'w') as fid:
        fid.write(print_line)
Exemple #10
0
def export_openmvg(kapture_path: str,
                   openmvg_sfm_data_file_path: str,
                   openmvg_image_root_path: str = None,
                   openmvg_regions_dir_path: str = None,
                   openmvg_matches_file_path: str = None,
                   image_action: TransferAction = TransferAction.skip,
                   image_path_flatten: bool = False,
                   force: bool = False) -> None:
    """
    Export the kapture data to an openMVG files.
    If the openmvg_path is a directory, it will create a JSON file (using the default name sfm_data.json)
    in that directory.

    :param kapture_path: full path to input kapture directory
    :param openmvg_sfm_data_file_path: input path to the SfM data file to be written.
    :param openmvg_image_root_path: optional input path to openMVG image directory to be created.
    :param openmvg_regions_dir_path: optional input path to openMVG regions (feat, desc) directory to be created.
    :param openmvg_matches_file_path: optional input path to openMVG matches file to be created.
    :param image_action: an action to apply on the images: relative linking, absolute linking, copy or move. Or top
     directory linking or skip to do nothing. If not "skip" equires openmvg_image_root_path to be defined.
    :param image_path_flatten: flatten image path (eg. to avoid image name collision in openMVG regions).
    :param force: if true, will remove existing openMVG data without prompting the user.
    """

    if any(arg is not None and not isinstance(arg, str) for arg in [
            kapture_path, openmvg_image_root_path, openmvg_regions_dir_path,
            openmvg_matches_file_path
    ]):
        raise ValueError('expect str (or None) as path argument.')

    # clean before export
    safe_remove_file(openmvg_sfm_data_file_path, force)
    if path.exists(openmvg_sfm_data_file_path):
        raise ValueError(f'{openmvg_sfm_data_file_path} file already exist')

    # load kapture
    logger.info(f'loading kapture {kapture_path}...')
    kapture_data = kapture.io.csv.kapture_from_dir(kapture_path)
    if kapture_data is None or not isinstance(kapture_data, kapture.Kapture):
        raise ValueError(f'unable to load kapture from {kapture_path}')
    kapture_to_openmvg_view_ids = {}

    logger.info(f'exporting sfm data to {openmvg_sfm_data_file_path} ...')
    export_openmvg_sfm_data(
        kapture_data=kapture_data,
        kapture_path=kapture_path,
        openmvg_sfm_data_file_path=openmvg_sfm_data_file_path,
        openmvg_image_root_path=openmvg_image_root_path,
        image_action=image_action,
        image_path_flatten=image_path_flatten,
        force=force,
        kapture_to_openmvg_view_ids=kapture_to_openmvg_view_ids)

    if openmvg_regions_dir_path is not None:
        try:
            logger.info(f'exporting regions to {openmvg_regions_dir_path} ...')
            export_openmvg_regions(
                kapture_path=kapture_path,
                kapture_keypoints=kapture_data.keypoints,
                kapture_descriptors=kapture_data.descriptors,
                openmvg_regions_dir_path=openmvg_regions_dir_path,
                image_path_flatten=image_path_flatten)
        except ValueError as e:
            logger.error(e)

    if openmvg_matches_file_path is not None:
        try:
            logger.info(
                f'exporting matches to {openmvg_matches_file_path} ...')
            export_openmvg_matches(
                kapture_path=kapture_path,
                kapture_data=kapture_data,
                openmvg_matches_file_path=openmvg_matches_file_path,
                kapture_to_openmvg_view_ids=kapture_to_openmvg_view_ids)
        except ValueError as e:
            logger.error(e)
Exemple #11
0
def localize_pipeline(kapture_map_path: str,
                      kapture_query_path: str,
                      merge_path: Optional[str],
                      keypoints_path: str,
                      descriptors_path: str,
                      global_features_path: str,
                      matches_path: str,
                      matches_gv_path: str,
                      colmap_map_path: str,
                      localization_output_path: str,
                      colmap_binary: str,
                      python_binary: Optional[str],
                      topk: int,
                      config: int,
                      prepend_cam: bool,
                      bins_as_str: List[str],
                      skip_list: List[str],
                      force_overwrite_existing: bool) -> None:
    """
    Localize on colmap map

    :param kapture_map_path: path to the kapture map directory
    :type kapture_map_path: str
    :param kapture_query_path: path to the kapture query directory
    :type kapture_query_path: str
    :param merge_path: path to the kapture map+query directory
    :type merge_path: Optional[str]
    :param keypoints_path: input path to the orphan keypoints folder
    :type keypoints_path: str
    :param descriptors_path: input path to the orphan descriptors folder
    :type descriptors_path: str
    :param global_features_path: input path to the orphan global_features folder
    :type global_features_path: str
    :param matches_path: input path to the orphan matches (not verified) folder
    :type matches_path: str
    :param matches_gv_path: input path to the orphan matches (verified) folder
    :type matches_gv_path: str
    :param colmap_map_path: input path to the colmap reconstruction folder
    :type colmap_map_path: str
    :param localization_output_path: output path to the localization results
    :type localization_output_path: str
    :param colmap_binary: path to the colmap executable
    :type colmap_binary: str
    :param python_binary: path to the python executable
    :type python_binary: Optional[str]
    :param topk: the max number of top retained images when computing image pairs from global features
    :type topk: int
    :param config: index of the config parameters to use for image registrator
    :type config: int
    :param prepend_cam: prepend camera names to filename in LTVL2020 formatted output
    :type prepend_cam: bool
    :param bins_as_str: list of bin names
    :type bins_as_str: List[str]
    :param skip_list: list of steps to ignore
    :type skip_list: List[str]
    :param force_overwrite_existing: silently overwrite files if already exists
    :type force_overwrite_existing: bool
    """
    os.makedirs(localization_output_path, exist_ok=True)
    pairfile_path = path.join(localization_output_path, f'pairs_localization_{topk}.txt')
    map_plus_query_path = path.join(localization_output_path,
                                    'kapture_inputs/map_plus_query') if merge_path is None else merge_path
    colmap_localize_path = path.join(localization_output_path, f'colmap_localized')
    os.makedirs(colmap_localize_path, exist_ok=True)
    kapture_localize_import_path = path.join(localization_output_path, f'kapture_localized')
    eval_path = path.join(localization_output_path, f'eval')
    LTVL2020_output_path = path.join(localization_output_path, 'LTVL2020_style_result.txt')

    if not path.isdir(matches_path):
        os.makedirs(matches_path)
    if not path.isdir(matches_gv_path):
        os.makedirs(matches_gv_path)

    # build proxy kapture map in output folder
    proxy_kapture_map_path = path.join(localization_output_path, 'kapture_inputs/proxy_mapping')
    create_kapture_proxy(proxy_kapture_map_path,
                         kapture_map_path,
                         keypoints_path,
                         descriptors_path,
                         global_features_path,
                         matches_path,
                         force_overwrite_existing)

    # build proxy kapture query in output folder
    proxy_kapture_query_path = path.join(localization_output_path, 'kapture_inputs/proxy_query')
    create_kapture_proxy(proxy_kapture_query_path,
                         kapture_query_path,
                         keypoints_path,
                         descriptors_path,
                         global_features_path,
                         matches_path,
                         force_overwrite_existing)

    # kapture_compute_image_pairs.py
    if 'compute_image_pairs' not in skip_list:
        local_image_pairs_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_compute_image_pairs.py')
        if os.path.isfile(pairfile_path):
            safe_remove_file(pairfile_path, force_overwrite_existing)
        compute_image_pairs_args = ['-v', str(logger.level),
                                    '--mapping', proxy_kapture_map_path,
                                    '--query', proxy_kapture_query_path,
                                    '--topk', str(topk),
                                    '-o', pairfile_path]
        run_python_command(local_image_pairs_path, compute_image_pairs_args, python_binary)

    # kapture_merge.py
    if merge_path is None:
        local_merge_path = path.join(pipeline_import_paths.HERE_PATH, '../../kapture/tools/kapture_merge.py')
        merge_args = ['-v', str(logger.level),
                      '-i', proxy_kapture_map_path, proxy_kapture_query_path,
                      '-o', map_plus_query_path,
                      '-s', 'keypoints', 'descriptors', 'global_features', 'matches',
                      '--image_transfer', 'link_absolute']
        if force_overwrite_existing:
            merge_args.append('-f')
        run_python_command(local_merge_path, merge_args, python_binary)

    # build proxy kapture map+query in output folder
    proxy_kapture_map_plus_query_path = path.join(localization_output_path, 'kapture_inputs/proxy_map_plus_query')
    create_kapture_proxy(proxy_kapture_map_plus_query_path,
                         map_plus_query_path,
                         keypoints_path,
                         descriptors_path,
                         global_features_path,
                         matches_path,
                         force_overwrite_existing)

    # kapture_compute_matches.py
    if 'compute_matches' not in skip_list:
        local_compute_matches_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_compute_matches.py')
        compute_matches_args = ['-v', str(logger.level),
                                '-i', proxy_kapture_map_plus_query_path,
                                '--pairsfile-path', pairfile_path]
        run_python_command(local_compute_matches_path, compute_matches_args, python_binary)

    # build proxy gv kapture in output folder
    proxy_kapture_map_plus_query_gv_path = path.join(localization_output_path, 'kapture_inputs/proxy_map_plus_query_gv')
    create_kapture_proxy(proxy_kapture_map_plus_query_gv_path,
                         map_plus_query_path,
                         keypoints_path,
                         descriptors_path,
                         global_features_path,
                         matches_gv_path,
                         force_overwrite_existing)

    # kapture_run_colmap_gv.py
    if 'geometric_verification' not in skip_list:
        local_run_colmap_gv_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_run_colmap_gv.py')
        run_colmap_gv_args = ['-v', str(logger.level),
                              '-i', proxy_kapture_map_plus_query_path,
                              '-o', proxy_kapture_map_plus_query_gv_path,
                              '--pairsfile-path', pairfile_path,
                              '-colmap', colmap_binary]
        if force_overwrite_existing:
            run_colmap_gv_args.append('-f')
        run_python_command(local_run_colmap_gv_path, run_colmap_gv_args, python_binary)

    # kapture_colmap_localize.py
    if 'colmap_localize' not in skip_list:
        local_localize_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_colmap_localize.py')
        localize_args = ['-v', str(logger.level),
                         '-i', proxy_kapture_map_plus_query_gv_path,
                         '-o', colmap_localize_path,
                         '-colmap', colmap_binary,
                         '--pairs-file-path', pairfile_path,
                         '-db', path.join(colmap_map_path, 'colmap.db'),
                         '-txt', path.join(colmap_map_path, 'reconstruction')]
        if force_overwrite_existing:
            localize_args.append('-f')
        localize_args += CONFIGS[config]
        run_python_command(local_localize_path, localize_args, python_binary)

    # kapture_import_colmap.py
    if 'import_colmap' not in skip_list:
        local_import_colmap_path = path.join(pipeline_import_paths.HERE_PATH,
                                             '../../kapture/tools/kapture_import_colmap.py')
        import_colmap_args = ['-v', str(logger.level),
                              '-db', path.join(colmap_localize_path, 'colmap.db'),
                              '-txt', path.join(colmap_localize_path, 'reconstruction'),
                              '-o', kapture_localize_import_path,
                              '--skip_reconstruction']
        if force_overwrite_existing:
            import_colmap_args.append('-f')
        run_python_command(local_import_colmap_path, import_colmap_args, python_binary)

    # kapture_evaluate.py
    if 'evaluate' not in skip_list and path.isfile(path.join(kapture_query_path, 'sensors/trajectories.txt')):
        local_evaluate_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_evaluate.py')
        evaluate_args = ['-v', str(logger.level),
                         '-i', kapture_localize_import_path,
                         '--labels', f'colmap_config_{config}',
                         '-gt', kapture_query_path,
                         '-o', eval_path]
        evaluate_args += ['--bins'] + bins_as_str
        if force_overwrite_existing:
            evaluate_args.append('-f')
        run_python_command(local_evaluate_path, evaluate_args, python_binary)

    # kapture_export_LTVL2020.py
    if 'export_LTVL2020' not in skip_list:
        local_export_LTVL2020_path = path.join(pipeline_import_paths.HERE_PATH,
                                               '../../kapture/tools/kapture_export_LTVL2020.py')
        export_LTVL2020_args = ['-v', str(logger.level),
                                '-i', kapture_localize_import_path,
                                '-o', LTVL2020_output_path]
        if prepend_cam:
            export_LTVL2020_args.append('-p')
        if force_overwrite_existing:
            export_LTVL2020_args.append('-f')
        run_python_command(local_export_LTVL2020_path, export_LTVL2020_args, python_binary)
    def reconstruct(self, kapture_data):
        os.makedirs(self._colmap_path, exist_ok=True)

        if not (kapture_data.records_camera and kapture_data.sensors
                and kapture_data.keypoints and kapture_data.matches
                and kapture_data.trajectories):
            raise ValueError(
                'records_camera, sensors, keypoints, matches, trajectories are mandatory'
            )

        # Set fixed name for COLMAP database
        colmap_db_path = path.join(self._colmap_path, 'colmap.db')
        reconstruction_path = path.join(self._colmap_path, "reconstruction")
        priors_txt_path = path.join(self._colmap_path,
                                    "priors_for_reconstruction")

        safe_remove_file(colmap_db_path, True)
        safe_remove_any_path(reconstruction_path, True)
        safe_remove_any_path(priors_txt_path, True)
        os.makedirs(reconstruction_path, exist_ok=True)

        # COLMAP does not fully support rigs.
        print("Step 1. Remove rigs")
        if kapture_data.rigs is not None and kapture_data.trajectories is not None:
            # make sure, rigs are not used in trajectories.
            rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
            kapture_data.rigs.clear()

        print("Step 2. Kapture to colmap")
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.kapture_to_colmap(kapture_data,
                                         kapture_data.kapture_path,
                                         colmap_db,
                                         export_two_view_geometry=True)
        colmap_db.close()

        os.makedirs(priors_txt_path, exist_ok=True)

        print("Step 3. Generate priors for reconstruction")
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.generate_priors_for_reconstruction(
            kapture_data, colmap_db, priors_txt_path)
        colmap_db.close()

        # Point triangulator
        print("Step 4. Point triangulator")
        reconstruction_path = path.join(self._colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        run_point_triangulator(self._colmap_binary, colmap_db_path,
                               kapture_data.image_path, priors_txt_path,
                               reconstruction_path,
                               self._point_triangulator_options)
        print("Step 5. Model converter")
        run_model_converter(self._colmap_binary, reconstruction_path,
                            reconstruction_path)
        print("Step 5. Reconstruction import")
        points3d, observations = import_from_colmap_points3d_txt(
            os.path.join(reconstruction_path, "points3D.txt"),
            kapture_data.image_names)
        kapture_data.observations = observations
        kapture_data.points3d = points3d
def local_sfm(map_plus_query_path: str, map_plus_query_gv_path: str,
              query_path: str, pairsfile_path: str, output_path_root: str,
              colmap_binary: str, force: bool):
    """
    Localize query images in a COLMAP model built from topk retrieved images.

    :param map_plus_query_path: path to the kapture data consisting of mapping and query data (sensors and reconstruction)
    :param map_plus_query_gv_path: path to the kapture data consisting of mapping and query data after geometric verification (sensors and reconstruction)
    :param query_path: path to the query kapture data (sensors)
    :param pairsfile_path: path to the pairsfile that contains the topk retrieved mapping images for each query image
    :param output_path_root: root path where outputs should be stored
    :param colmap_binary: path to the COLMAP binary
    :param force: silently overwrite already existing results
    """

    # load query kapture (we use query kapture to reuse sensor_ids etc.)
    kdata_query = kapture_from_dir(query_path)
    if kdata_query.trajectories:
        logger.warning(
            "Query data contains trajectories: they will be ignored")
        kdata_query.trajectories.clear()
    else:
        kdata_query.trajectories = kapture.Trajectories()

    # load output kapture
    output_path = os.path.join(output_path_root, 'localized')
    if os.path.exists(os.path.join(output_path, 'sensors/trajectories.txt')):
        kdata_output = kapture_from_dir(output_path)
        if kdata_query.records_camera == kdata_output.records_camera and len(
                kdata_output.trajectories) != 0 and not force:
            kdata_query.trajectories = kdata_output.trajectories

    # load kapture maps
    kdata_map = kapture_from_dir(map_plus_query_path)
    if kdata_map.rigs != None:
        rigs_remove_inplace(kdata_map.trajectories, kdata_map.rigs)
    kdata_map_gv = kapture_from_dir(map_plus_query_gv_path)
    if kdata_map_gv.rigs != None:
        rigs_remove_inplace(kdata_map_gv.trajectories, kdata_map_gv.rigs)

    # load pairsfile
    pairs = {}
    with open(pairsfile_path, 'r') as fid:
        table = table_from_file(fid)
        for img_query, img_map, score in table:
            if not img_query in pairs:
                pairs[img_query] = []
            pairs[img_query].append(img_map)

    kdata_sub_colmap_path = os.path.join(output_path_root, 'colmap')
    kdata_reg_query_path = os.path.join(output_path_root, 'query_registered')
    sub_kapture_pairsfile_path = os.path.join(output_path_root,
                                              'tmp_pairs_map.txt')
    query_img_kapture_pairsfile_path = os.path.join(output_path_root,
                                                    'tmp_pairs_query.txt')

    # loop over query images
    for img_query, img_list_map in pairs.items():
        if pose_found(kdata_query, img_query):
            logger.info(f'{img_query} already processed, skipping...')
            continue
        else:
            logger.info(f'processing {img_query}')

        # write pairsfile for sub-kapture
        map_pairs = write_pairfile_from_img_list(img_list_map,
                                                 sub_kapture_pairsfile_path)

        # write pairsfile for query_img_kapture
        query_pairs = write_pairfile_img_vs_img_list(
            img_query, img_list_map, query_img_kapture_pairsfile_path)

        # create sub-kapture
        kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path,
                                              img_list_map, map_pairs)
        kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv,
                                                 map_plus_query_gv_path,
                                                 img_list_map, map_pairs)

        # match missing pairs for mapping
        compute_matches_from_loaded_data(map_plus_query_path, kdata_sub,
                                         map_pairs)

        # kdata_sub needs to be re-created to add the new matches
        kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path,
                                              img_list_map, map_pairs)

        # run colmap gv on missing pairs
        if len(kdata_sub.matches) != len(kdata_sub_gv.matches):
            run_colmap_gv_from_loaded_data(kdata_sub, kdata_sub_gv,
                                           map_plus_query_path,
                                           map_plus_query_gv_path,
                                           colmap_binary, [], True)
            # kdata_sub_gv needs to be re-created to add the new matches
            kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv,
                                                     map_plus_query_gv_path,
                                                     img_list_map, map_pairs)

        # sanity check
        if len(map_pairs) != len(kdata_sub_gv.matches):
            logger.info(f'not all mapping matches available')

        # build COLMAP map
        try:
            colmap_build_map_from_loaded_data(kdata_sub_gv,
                                              map_plus_query_gv_path,
                                              kdata_sub_colmap_path,
                                              colmap_binary, False, [],
                                              ['model_converter'], True)
        except ValueError:
            logger.info(f'{img_query} was not localized')
            continue

        if not os.path.exists(
                os.path.join(kdata_sub_colmap_path,
                             'reconstruction/images.bin')):
            logger.info(
                f'colmap mapping for {img_query} did not work, image was not localized'
            )
            continue

        # create single image kapture (kdata_sub needs to be recreated because descriptors are deleted in build_colmap_model)
        kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path,
                                              img_list_map, map_pairs)
        kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv,
                                                 map_plus_query_gv_path,
                                                 img_list_map, map_pairs)
        query_img_kapture = add_image_to_kapture(kdata_map,
                                                 map_plus_query_path,
                                                 kdata_sub, img_query,
                                                 query_pairs)
        query_img_kapture_gv = add_image_to_kapture(kdata_map_gv,
                                                    map_plus_query_gv_path,
                                                    kdata_sub_gv, img_query,
                                                    query_pairs)

        # match missing pairs for localization
        compute_matches_from_loaded_data(map_plus_query_path,
                                         query_img_kapture, query_pairs)

        # query_img_kapture needs to be re-created to add the new matches
        query_img_kapture = add_image_to_kapture(kdata_map,
                                                 map_plus_query_path,
                                                 kdata_sub, img_query,
                                                 query_pairs)

        # run colmap gv on missing pairs
        if len(query_img_kapture.matches) != len(query_img_kapture_gv.matches):
            run_colmap_gv_from_loaded_data(query_img_kapture,
                                           query_img_kapture_gv,
                                           map_plus_query_path,
                                           map_plus_query_gv_path,
                                           colmap_binary, [], True)
            # query_img_kapture_gv needs to be re-created to add the new matches
            query_img_kapture_gv = add_image_to_kapture(
                kdata_map_gv, map_plus_query_gv_path, kdata_sub_gv, img_query,
                query_pairs)

        # sanity check
        if len(query_pairs) != len(query_img_kapture_gv.matches):
            logger.info(f'not all query matches available')

        # localize in COLMAP map
        try:
            colmap_localize_from_loaded_data(
                query_img_kapture_gv, map_plus_query_gv_path,
                os.path.join(kdata_sub_colmap_path, 'registered'),
                os.path.join(kdata_sub_colmap_path, 'colmap.db'),
                os.path.join(kdata_sub_colmap_path, 'reconstruction'),
                colmap_binary, False, [
                    '--Mapper.ba_refine_focal_length', '0',
                    '--Mapper.ba_refine_principal_point', '0',
                    '--Mapper.ba_refine_extra_params', '0',
                    '--Mapper.min_num_matches', '4',
                    '--Mapper.init_min_num_inliers', '4',
                    '--Mapper.abs_pose_min_num_inliers', '4',
                    '--Mapper.abs_pose_min_inlier_ratio', '0.05',
                    '--Mapper.ba_local_max_num_iterations', '50',
                    '--Mapper.abs_pose_max_error', '20',
                    '--Mapper.filter_max_reproj_error', '12'
                ], [], True)
        except ValueError:
            logger.info(f'{img_query} was not localized')
            continue

        if not os.path.exists(
                os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                             'reconstruction/images.txt')):
            logger.info(
                f'colmap localization of {img_query} did not work, image was not localized'
            )
            continue

        # add to results kapture
        kdata_reg_query = import_colmap(
            kdata_reg_query_path,
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                         'colmap.db'),
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                         'reconstruction'), None, None, True, True, True,
            TransferAction.skip)

        if add_pose_to_query_kapture(kdata_reg_query, kdata_query, img_query):
            logger.info('successfully localized')

        # write results (after each image to see the progress)
        kapture_to_dir(output_path, kdata_query)

    # clean up (e.g. remove temporal files and folders)
    safe_remove_any_path(kdata_sub_colmap_path, True)
    safe_remove_any_path(kdata_reg_query_path, True)
    safe_remove_file(sub_kapture_pairsfile_path, True)
    safe_remove_file(query_img_kapture_pairsfile_path, True)

    logger.info('all done')
def mapping_pipeline(kapture_path: str, keypoints_path: str,
                     descriptors_path: str,
                     global_features_path: Optional[str],
                     input_pairsfile_path: Optional[str], matches_path: str,
                     matches_gv_path: str, keypoints_type: Optional[str],
                     descriptors_type: Optional[str],
                     global_features_type: Optional[str], colmap_map_path: str,
                     colmap_binary: str, python_binary: Optional[str],
                     topk: int, config: int, skip_list: List[str],
                     force_overwrite_existing: bool) -> None:
    """
    Build a colmap model using pre computed features with the kapture data.

    :param kapture_path: path to the kapture map directory
    :param keypoints_path: input path to the orphan keypoints folder
    :param descriptors_path: input path to the orphan descriptors folder
    :param global_features_path: input path to the orphan global_features folder
    :param input_pairsfile_path: text file in the csv format; where each line is image_name1, image_name2, score
    :param matches_path: input path to the orphan matches (not verified) folder
    :param matches_gv_path: input path to the orphan matches (verified) folder
    :param colmap_map_path: path to the colmap output folder
    :param colmap_binary: path to the colmap executable
    :param python_binary: path to the python executable
    :param topk: the max number of top retained images when computing image pairs from global features
    :param config: index of the config parameters to use for point triangulator
    :param skip_list: list of steps to ignore
    :param force_overwrite_existing: silently overwrite files if already exists
    """
    os.makedirs(colmap_map_path, exist_ok=True)
    if input_pairsfile_path is None:
        pairsfile_path = path.join(colmap_map_path,
                                   f'pairs_mapping_{topk}.txt')
    else:
        pairsfile_path = input_pairsfile_path

    if not path.isdir(matches_path):
        os.makedirs(matches_path)
    if not path.isdir(matches_gv_path):
        os.makedirs(matches_gv_path)

    # build proxy kapture in output folder
    proxy_kapture_path = path.join(colmap_map_path,
                                   'kapture_inputs/proxy_mapping')
    create_kapture_proxy_single_features(proxy_kapture_path, kapture_path,
                                         keypoints_path, descriptors_path,
                                         global_features_path, matches_path,
                                         keypoints_type, descriptors_type,
                                         global_features_type,
                                         force_overwrite_existing)

    # kapture_compute_image_pairs.py
    if global_features_path is not None and 'compute_image_pairs' not in skip_list:
        local_image_pairs_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_compute_image_pairs.py')
        if os.path.isfile(pairsfile_path):
            safe_remove_file(pairsfile_path, force_overwrite_existing)

        compute_image_pairs_args = [
            '-v',
            str(logger.level), '--mapping', proxy_kapture_path, '--query',
            proxy_kapture_path, '--topk',
            str(topk), '-o', pairsfile_path
        ]
        run_python_command(local_image_pairs_path, compute_image_pairs_args,
                           python_binary)

    # kapture_compute_matches.py
    if 'compute_matches' not in skip_list:
        local_compute_matches_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_compute_matches.py')
        compute_matches_args = [
            '-v',
            str(logger.level), '-i', proxy_kapture_path, '--pairsfile-path',
            pairsfile_path
        ]
        run_python_command(local_compute_matches_path, compute_matches_args,
                           python_binary)

    # build proxy gv kapture in output folder
    proxy_kapture_gv_path = path.join(colmap_map_path,
                                      'kapture_inputs/proxy_mapping_gv')
    create_kapture_proxy_single_features(proxy_kapture_gv_path, kapture_path,
                                         keypoints_path, descriptors_path,
                                         global_features_path, matches_gv_path,
                                         keypoints_type, descriptors_type,
                                         global_features_type,
                                         force_overwrite_existing)

    # kapture_run_colmap_gv.py
    if 'geometric_verification' not in skip_list:
        local_run_colmap_gv_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_run_colmap_gv.py')
        run_colmap_gv_args = [
            '-v',
            str(logger.level), '-i', proxy_kapture_path, '-o',
            proxy_kapture_gv_path, '--pairsfile-path', pairsfile_path,
            '-colmap', colmap_binary
        ]
        if force_overwrite_existing:
            run_colmap_gv_args.append('-f')
        run_python_command(local_run_colmap_gv_path, run_colmap_gv_args,
                           python_binary)

    # kapture_colmap_build_map.py
    if 'colmap_build_map' not in skip_list:
        local_build_map_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_colmap_build_map.py')
        build_map_args = [
            '-v',
            str(logger.level), '-i', proxy_kapture_gv_path, '-o',
            colmap_map_path, '-colmap', colmap_binary, '--pairs-file-path',
            pairsfile_path
        ]
        if force_overwrite_existing:
            build_map_args.append('-f')
        build_map_args += CONFIGS[config]
        run_python_command(local_build_map_path, build_map_args, python_binary)
Exemple #15
0
def colmap_localize_from_loaded_data(kapture_data: kapture.Kapture,
                                     kapture_path: str,
                                     tar_handlers: Optional[TarCollection],
                                     colmap_path: str,
                                     input_database_path: str,
                                     input_reconstruction_path: str,
                                     colmap_binary: str,
                                     keypoints_type: Optional[str],
                                     use_colmap_matches_importer: bool,
                                     image_registrator_options: List[str],
                                     skip_list: List[str],
                                     force: bool) -> None:
    """
    Localize images on a colmap model with the kapture data.

    :param kapture_data: kapture data to use
    :param kapture_path: path to the kapture to use
    :param tar_handler: collection of preloaded tar archives
    :param colmap_path: path to the colmap build
    :param input_database_path: path to the map colmap.db
    :param input_database_path: path to the map colmap.db
    :param input_reconstruction_path: path to the map reconstruction folder
    :param colmap_binary: path to the colmap binary executable
    :param keypoints_type: type of keypoints, name of the keypoints subfolder
    :param use_colmap_matches_importer: bool,
    :param image_registrator_options: options for the image registrator
    :param skip_list: list of steps to skip
    :param force: Silently overwrite kapture files if already exists.
    """
    os.makedirs(colmap_path, exist_ok=True)

    if not (kapture_data.records_camera and kapture_data.sensors and kapture_data.keypoints and kapture_data.matches):
        raise ValueError('records_camera, sensors, keypoints, matches are mandatory')

    if kapture_data.trajectories:
        logger.warning("Input data contains trajectories: they will be ignored")
        kapture_data.trajectories.clear()
    else:
        kapture_data.trajectories = kapture.Trajectories()

    # COLMAP does not fully support rigs.
    if kapture_data.rigs is not None and kapture_data.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
        kapture_data.rigs.clear()

    # Prepare output
    # Set fixed name for COLMAP database
    colmap_db_path = path.join(colmap_path, 'colmap.db')
    image_list_path = path.join(colmap_path, 'images.list')
    reconstruction_path = path.join(colmap_path, "reconstruction")
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)
        safe_remove_file(image_list_path, force)
        safe_remove_any_path(reconstruction_path, force)
    os.makedirs(reconstruction_path, exist_ok=True)

    # Copy colmap db to output
    if not os.path.exists(colmap_db_path):
        shutil.copy(input_database_path, colmap_db_path)

    # find correspondences between the colmap db and the kapture data
    images_all = {image_path: (ts, cam_id)
                  for ts, shot in kapture_data.records_camera.items()
                  for cam_id, image_path in shot.items()}

    colmap_db = COLMAPDatabase.connect(colmap_db_path)
    colmap_image_ids = database_extra.get_colmap_image_ids_from_db(colmap_db)
    colmap_images = database_extra.get_images_from_database(colmap_db)
    colmap_db.close()

    # dict ( kapture_camera -> colmap_camera_id )
    colmap_camera_ids = {images_all[image_path][1]: colmap_cam_id
                         for image_path, colmap_cam_id in colmap_images if image_path in images_all}

    images_to_add = {image_path: value
                     for image_path, value in images_all.items()
                     if image_path not in colmap_image_ids}

    flatten_images_to_add = [(ts, kapture_cam_id, image_path)
                             for image_path, (ts, kapture_cam_id) in images_to_add.items()]

    if 'import_to_db' not in skip_list:
        logger.info("Step 1: Add precomputed keypoints and matches to colmap db")

        if keypoints_type is None:
            keypoints_type = try_get_only_key_from_collection(kapture_data.keypoints)
        assert keypoints_type is not None
        assert keypoints_type in kapture_data.keypoints
        assert keypoints_type in kapture_data.matches

        cameras_to_add = kapture.Sensors()
        for _, (_, kapture_cam_id) in images_to_add.items():
            if kapture_cam_id not in colmap_camera_ids:
                kapture_cam = kapture_data.sensors[kapture_cam_id]
                cameras_to_add[kapture_cam_id] = kapture_cam
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        colmap_added_camera_ids = database_extra.add_cameras_to_database(cameras_to_add, colmap_db)
        colmap_camera_ids.update(colmap_added_camera_ids)

        colmap_added_image_ids = database_extra.add_images_to_database_from_flatten(
            colmap_db, flatten_images_to_add, kapture_data.trajectories, colmap_camera_ids)
        colmap_image_ids.update(colmap_added_image_ids)

        colmap_image_ids_reversed = {v: k for k, v in colmap_image_ids.items()}  # colmap_id : name

        # add new features
        colmap_keypoints = database_extra.get_keypoints_set_from_database(colmap_db, colmap_image_ids_reversed)

        keypoints_all = kapture_data.keypoints[keypoints_type]
        keypoints_to_add = {name for name in keypoints_all if name not in colmap_keypoints}
        keypoints_to_add = kapture.Keypoints(keypoints_all.type_name, keypoints_all.dtype, keypoints_all.dsize,
                                             keypoints_to_add)
        database_extra.add_keypoints_to_database(colmap_db, keypoints_to_add,
                                                 keypoints_type, kapture_path,
                                                 tar_handlers,
                                                 colmap_image_ids)

        # add new matches
        colmap_matches = kapture.Matches(database_extra.get_matches_set_from_database(colmap_db,
                                                                                      colmap_image_ids_reversed))
        colmap_matches.normalize()

        matches_all = kapture_data.matches[keypoints_type]
        matches_to_add = kapture.Matches({pair for pair in matches_all if pair not in colmap_matches})
        # print(list(matches_to_add))
        database_extra.add_matches_to_database(colmap_db, matches_to_add,
                                               keypoints_type, kapture_path,
                                               tar_handlers,
                                               colmap_image_ids,
                                               export_two_view_geometry=not use_colmap_matches_importer)
        colmap_db.close()

    if use_colmap_matches_importer:
        logger.info('Step 2: Run geometric verification')
        logger.debug('running colmap matches_importer...')

        if keypoints_type is None:
            keypoints_type = try_get_only_key_from_collection(kapture_data.matches)
        assert keypoints_type is not None
        assert keypoints_type in kapture_data.matches

        # compute two view geometry
        colmap_lib.run_matches_importer_from_kapture_matches(
            colmap_binary,
            colmap_use_cpu=True,
            colmap_gpu_index=None,
            colmap_db_path=colmap_db_path,
            kapture_matches=kapture_data.matches[keypoints_type],
            force=force)
    else:
        logger.info('Step 2: Run geometric verification - skipped')
    if 'image_registrator' not in skip_list:
        logger.info("Step 3: Run image_registrator")
        # run image_registrator
        colmap_lib.run_image_registrator(
            colmap_binary,
            colmap_db_path,
            input_reconstruction_path,
            reconstruction_path,
            image_registrator_options
        )

    # run model_converter
    if 'model_converter' not in skip_list:
        logger.info("Step 4: Export reconstruction results to txt")
        colmap_lib.run_model_converter(
            colmap_binary,
            reconstruction_path,
            reconstruction_path
        )
def mapping_pipeline(kapture_path: str,
                     keypoints_path: str,
                     descriptors_path: str,
                     global_features_path: str,
                     matches_path: str,
                     matches_gv_path: str,
                     colmap_map_path: str,
                     colmap_binary: str,
                     python_binary: Optional[str],
                     topk: int,
                     skip_list: List[str],
                     force_overwrite_existing: bool) -> None:
    """
    Build a colmap model using pre computed features with the kapture data.

    :param kapture_path: path to the kapture map directory
    :type kapture_path: str
    :param keypoints_path: input path to the orphan keypoints folder
    :type keypoints_path: str
    :param descriptors_path: input path to the orphan descriptors folder
    :type descriptors_path: str
    :param global_features_path: input path to the orphan global_features folder
    :type global_features_path: str
    :param matches_path: input path to the orphan matches (not verified) folder
    :type matches_path: str
    :param matches_gv_path: input path to the orphan matches (verified) folder
    :type matches_gv_path: str
    :param colmap_map_path: path to the colmap output folder
    :type colmap_map_path: str
    :param colmap_binary: path to the colmap executable
    :type colmap_binary: str
    :param python_binary: path to the python executable
    :type python_binary: Optional[str]
    :param topk: the max number of top retained images when computing image pairs from global features
    :type topk: int
    :param skip_list: list of steps to ignore
    :type skip_list: List[str]
    :param force_overwrite_existing: silently overwrite files if already exists
    :type force_overwrite_existing: bool
    """
    os.makedirs(colmap_map_path, exist_ok=True)
    pairfile_path = path.join(colmap_map_path, f'pairs_mapping_{topk}.txt')

    if not path.isdir(matches_path):
        os.makedirs(matches_path)
    if not path.isdir(matches_gv_path):
        os.makedirs(matches_gv_path)

    # build proxy kapture in output folder
    proxy_kapture_path = path.join(colmap_map_path, 'kapture_inputs/proxy_mapping')
    create_kapture_proxy(proxy_kapture_path,
                         kapture_path,
                         keypoints_path,
                         descriptors_path,
                         global_features_path,
                         matches_path,
                         force_overwrite_existing)

    # kapture_compute_image_pairs.py
    if 'compute_image_pairs' not in skip_list:
        local_image_pairs_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_compute_image_pairs.py')
        if os.path.isfile(pairfile_path):
            safe_remove_file(pairfile_path, force_overwrite_existing)

        compute_image_pairs_args = ['-v', str(logger.level),
                                    '--mapping', proxy_kapture_path,
                                    '--query', proxy_kapture_path,
                                    '--topk', str(topk),
                                    '-o', pairfile_path]
        run_python_command(local_image_pairs_path, compute_image_pairs_args, python_binary)

    # kapture_compute_matches.py
    if 'compute_matches' not in skip_list:
        local_compute_matches_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_compute_matches.py')
        compute_matches_args = ['-v', str(logger.level),
                                '-i', proxy_kapture_path,
                                '--pairsfile-path', pairfile_path]
        run_python_command(local_compute_matches_path, compute_matches_args, python_binary)

    # build proxy gv kapture in output folder
    proxy_kapture_gv_path = path.join(colmap_map_path, 'kapture_inputs/proxy_mapping_gv')
    create_kapture_proxy(proxy_kapture_gv_path,
                         kapture_path,
                         keypoints_path,
                         descriptors_path,
                         global_features_path,
                         matches_gv_path,
                         force_overwrite_existing)

    # kapture_run_colmap_gv.py
    if 'geometric_verification' not in skip_list:
        local_run_colmap_gv_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_run_colmap_gv.py')
        run_colmap_gv_args = ['-v', str(logger.level),
                              '-i', proxy_kapture_path,
                              '-o', proxy_kapture_gv_path,
                              '--pairsfile-path', pairfile_path,
                              '-colmap', colmap_binary]
        if force_overwrite_existing:
            run_colmap_gv_args.append('-f')
        run_python_command(local_run_colmap_gv_path, run_colmap_gv_args, python_binary)

    # kapture_colmap_build_map.py
    if 'colmap_build_map' not in skip_list:
        local_build_map_path = path.join(pipeline_import_paths.HERE_PATH, '../tools/kapture_colmap_build_map.py')
        build_map_args = ['-v', str(logger.level),
                          '-i', proxy_kapture_gv_path,
                          '-o', colmap_map_path,
                          '-colmap', colmap_binary,
                          '--pairs-file-path', pairfile_path]
        if force_overwrite_existing:
            build_map_args.append('-f')
        build_map_args += ['--Mapper.ba_refine_focal_length', '0',
                           '--Mapper.ba_refine_principal_point', '0',
                           '--Mapper.ba_refine_extra_params', '0']
        run_python_command(local_build_map_path, build_map_args, python_binary)
Exemple #17
0
def image_retrieval_benchmark(kapture_map_path: str, kapture_query_path: str,
                              merge_path: Optional[str], keypoints_path: str,
                              descriptors_path: str, global_features_path: str,
                              matches_path: str, matches_gv_path: str,
                              colmap_map_path: str,
                              localization_output_path: str,
                              colmap_binary: str, python_binary: Optional[str],
                              topk: int, config: int, prepend_cam: bool,
                              skip_list: List[str],
                              force_overwrite_existing: bool) -> None:
    """
    Image retrieval benchmark


    :param kapture_map_path: path to the kapture map directory
    :type kapture_map_path: str
    :param kapture_query_path: path to the kapture query directory
    :type kapture_query_path: str
    :param merge_path: path to the kapture map+query directory
    :type merge_path: Optional[str]
    :param keypoints_path: input path to the orphan keypoints folder
    :type keypoints_path: str
    :param descriptors_path: input path to the orphan descriptors folder
    :type descriptors_path: str
    :param global_features_path: input path to the orphan global_features folder
    :type global_features_path: str
    :param matches_path: input path to the orphan matches (not verified) folder
    :type matches_path: str
    :param matches_gv_path: input path to the orphan matches (verified) folder
    :type matches_gv_path: str
    :param colmap_map_path: input path to the colmap reconstruction folder
    :type colmap_map_path: str
    :param localization_output_path: output path to the localization results
    :type localization_output_path: str
    :param colmap_binary: path to the colmap executable
    :type colmap_binary: str
    :param python_binary: path to the python executable
    :type python_binary: Optional[str]
    :param topk: the max number of top retained images when computing image pairs from global features
    :type topk: int
    :param config: index of the config parameters to use for image registrator
    :type config: int
    :param prepend_cam: prepend camera names to filename in LTVL2020 formatted output
    :type prepend_cam: bool
    :param skip_list: list of steps to ignore
    :type skip_list: List[str]
    :param force_overwrite_existing: silently overwrite files if already exists
    :type force_overwrite_existing: bool
    """
    os.makedirs(localization_output_path, exist_ok=True)
    pairfile_path = path.join(localization_output_path,
                              f'pairs_localization_{topk}.txt')
    map_plus_query_path = path.join(
        localization_output_path,
        'kapture_inputs/map_plus_query') if merge_path is None else merge_path
    eval_path = path.join(localization_output_path, f'eval')

    # global sfm results
    global_sfm_path = path.join(localization_output_path, f'global_sfm')
    global_sfm_colmap_localize_path = path.join(global_sfm_path,
                                                f'colmap_localized')
    os.makedirs(global_sfm_colmap_localize_path, exist_ok=True)
    global_sfm_kapture_localize_import_path = path.join(
        global_sfm_path, f'kapture_localized')
    global_sfm_LTVL2020_output_path = path.join(
        localization_output_path, 'global_sfm_LTVL2020_style_result.txt')

    # local sfm results
    local_sfm_path = path.join(localization_output_path, f'local_sfm')
    os.makedirs(local_sfm_path, exist_ok=True)
    local_sfm_localize_path = path.join(local_sfm_path, f'localized')
    local_sfm_LTVL2020_output_path = path.join(
        localization_output_path, 'local_sfm_LTVL2020_style_result.txt')

    # pose approximation results
    pose_approx_path = path.join(localization_output_path, f'pose_approx')
    pose_approx_EWB_path = path.join(pose_approx_path, f'EWB')
    pose_approx_BDI_path = path.join(pose_approx_path, f'BDI')
    pose_approx_CSI_path = path.join(pose_approx_path, f'CSI')

    pose_approx_EWB_LTVL2020_output_path = path.join(
        localization_output_path, 'EWB_LTVL2020_style_result.txt')
    pose_approx_BDI_LTVL2020_output_path = path.join(
        localization_output_path, 'BSI_LTVL2020_style_result.txt')
    pose_approx_CSI_LTVL2020_output_path = path.join(
        localization_output_path, 'CSI_LTVL2020_style_result.txt')

    if not path.isdir(matches_path):
        os.makedirs(matches_path)
    if not path.isdir(matches_gv_path):
        os.makedirs(matches_gv_path)

    # build proxy kapture map in output folder
    proxy_kapture_map_path = path.join(localization_output_path,
                                       'kapture_inputs/proxy_mapping')
    create_kapture_proxy(proxy_kapture_map_path, kapture_map_path,
                         keypoints_path, descriptors_path,
                         global_features_path, matches_path,
                         force_overwrite_existing)

    # build proxy kapture query in output folder
    proxy_kapture_query_path = path.join(localization_output_path,
                                         'kapture_inputs/proxy_query')
    create_kapture_proxy(proxy_kapture_query_path, kapture_query_path,
                         keypoints_path, descriptors_path,
                         global_features_path, matches_path,
                         force_overwrite_existing)

    # kapture_compute_image_pairs.py
    if 'compute_image_pairs' not in skip_list:
        local_image_pairs_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_compute_image_pairs.py')
        if os.path.isfile(pairfile_path):
            safe_remove_file(pairfile_path, force_overwrite_existing)
        compute_image_pairs_args = [
            '-v',
            str(logger.level), '--mapping', proxy_kapture_map_path, '--query',
            proxy_kapture_query_path, '--topk',
            str(topk), '-o', pairfile_path
        ]
        run_python_command(local_image_pairs_path, compute_image_pairs_args,
                           python_binary)

    # kapture_merge.py
    if merge_path is None:
        local_merge_path = path.join(pipeline_import_paths.HERE_PATH,
                                     '../../kapture/tools/kapture_merge.py')
        merge_args = [
            '-v',
            str(logger.level), '-i', proxy_kapture_map_path,
            proxy_kapture_query_path, '-o', map_plus_query_path, '-s',
            'keypoints', 'descriptors', 'global_features', 'matches',
            '--image_transfer', 'link_absolute'
        ]
        if force_overwrite_existing:
            merge_args.append('-f')
        run_python_command(local_merge_path, merge_args, python_binary)

    # build proxy kapture map+query in output folder
    proxy_kapture_map_plus_query_path = path.join(
        localization_output_path, 'kapture_inputs/proxy_map_plus_query')
    create_kapture_proxy(proxy_kapture_map_plus_query_path,
                         map_plus_query_path, keypoints_path, descriptors_path,
                         global_features_path, matches_path,
                         force_overwrite_existing)

    # kapture_compute_matches.py
    if 'compute_matches' not in skip_list:
        local_compute_matches_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_compute_matches.py')
        compute_matches_args = [
            '-v',
            str(logger.level), '-i', proxy_kapture_map_plus_query_path,
            '--pairsfile-path', pairfile_path
        ]
        run_python_command(local_compute_matches_path, compute_matches_args,
                           python_binary)

    # build proxy gv kapture in output folder
    proxy_kapture_map_plus_query_gv_path = path.join(
        localization_output_path, 'kapture_inputs/proxy_map_plus_query_gv')
    create_kapture_proxy(proxy_kapture_map_plus_query_gv_path,
                         map_plus_query_path, keypoints_path, descriptors_path,
                         global_features_path, matches_gv_path,
                         force_overwrite_existing)

    # kapture_run_colmap_gv.py
    if 'geometric_verification' not in skip_list:
        local_run_colmap_gv_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_run_colmap_gv.py')
        run_colmap_gv_args = [
            '-v',
            str(logger.level), '-i', proxy_kapture_map_plus_query_path, '-o',
            proxy_kapture_map_plus_query_gv_path, '--pairsfile-path',
            pairfile_path, '-colmap', colmap_binary
        ]
        if force_overwrite_existing:
            run_colmap_gv_args.append('-f')
        run_python_command(local_run_colmap_gv_path, run_colmap_gv_args,
                           python_binary)

    # -------- GLOBAL MAP LOCALIZATION --------
    if 'global_sfm' not in skip_list:
        # kapture_colmap_localize.py
        local_localize_path = path.join(pipeline_import_paths.HERE_PATH,
                                        '../tools/kapture_colmap_localize.py')
        localize_args = [
            '-v',
            str(logger.level), '-i', proxy_kapture_map_plus_query_gv_path,
            '-o', global_sfm_colmap_localize_path, '-colmap', colmap_binary,
            '--pairs-file-path', pairfile_path, '-db',
            path.join(colmap_map_path, 'colmap.db'), '-txt',
            path.join(colmap_map_path, 'reconstruction')
        ]
        if force_overwrite_existing:
            localize_args.append('-f')
        localize_args += CONFIGS[config]
        run_python_command(local_localize_path, localize_args, python_binary)

        # kapture_import_colmap.py
        local_import_colmap_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../../kapture/tools/kapture_import_colmap.py')
        import_colmap_args = [
            '-v',
            str(logger.level), '-db',
            path.join(global_sfm_colmap_localize_path, 'colmap.db'), '-txt',
            path.join(global_sfm_colmap_localize_path, 'reconstruction'), '-o',
            global_sfm_kapture_localize_import_path, '--skip_reconstruction'
        ]
        if force_overwrite_existing:
            import_colmap_args.append('-f')
        run_python_command(local_import_colmap_path, import_colmap_args,
                           python_binary)

        # kapture_export_LTVL2020.py
        if 'export_LTVL2020' not in skip_list:
            local_export_LTVL2020_path = path.join(
                pipeline_import_paths.HERE_PATH,
                '../../kapture/tools/kapture_export_LTVL2020.py')
            export_LTVL2020_args = [
                '-v',
                str(logger.level), '-i',
                global_sfm_kapture_localize_import_path, '-o',
                global_sfm_LTVL2020_output_path
            ]
            if prepend_cam:
                export_LTVL2020_args.append('-p')
            if force_overwrite_existing:
                export_LTVL2020_args.append('-f')
            run_python_command(local_export_LTVL2020_path,
                               export_LTVL2020_args, python_binary)

    # -------- LOCAL SFM LOCALIZATION --------
    if 'local_sfm' not in skip_list:
        # kapture_colmap_localize_localsfm
        local_colmap_localize_localsfm_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_colmap_localize_localsfm.py')
        colmap_localize_localsfm_args = [
            '-v',
            str(logger.level), '--map_plus_query',
            proxy_kapture_map_plus_query_path, '--map_plus_query_gv',
            proxy_kapture_map_plus_query_gv_path, '--query',
            proxy_kapture_query_path, '-o', local_sfm_path, '-colmap',
            colmap_binary, '--pairsfile-path', pairfile_path
        ]
        if force_overwrite_existing:
            colmap_localize_localsfm_args.append('-f')
        run_python_command(local_colmap_localize_localsfm_path,
                           colmap_localize_localsfm_args, python_binary)

        # kapture_export_LTVL2020.py
        if 'export_LTVL2020' not in skip_list:
            local_export_LTVL2020_path = path.join(
                pipeline_import_paths.HERE_PATH,
                '../../kapture/tools/kapture_export_LTVL2020.py')
            export_LTVL2020_args = [
                '-v',
                str(logger.level), '-i', local_sfm_localize_path, '-o',
                local_sfm_LTVL2020_output_path
            ]
            if prepend_cam:
                export_LTVL2020_args.append('-p')
            if force_overwrite_existing:
                export_LTVL2020_args.append('-f')
            run_python_command(local_export_LTVL2020_path,
                               export_LTVL2020_args, python_binary)

    # -------- POSE APPROXIMATION LOCALIZATION --------
    if 'pose_approximation' not in skip_list:
        # kapture_pose_approximation.py
        local_pose_approximation_path = path.join(
            pipeline_import_paths.HERE_PATH,
            '../tools/kapture_pose_approximation.py')
        pose_approximation_args = [
            '-v',
            str(logger.level), '--mapping', proxy_kapture_map_path, '--query',
            proxy_kapture_query_path, '--topk',
            str(topk)
        ]
        if force_overwrite_existing:
            pose_approximation_args.append('-f')

        # EWB
        EWB_pose_approximation_args = pose_approximation_args + [
            '-o', pose_approx_EWB_path, 'equal_weighted_barycenter'
        ]
        run_python_command(local_pose_approximation_path,
                           EWB_pose_approximation_args, python_binary)

        # BDI
        BDI_pose_approximation_args = pose_approximation_args + [
            '-o', pose_approx_BDI_path, 'barycentric_descriptor_interpolation'
        ]
        run_python_command(local_pose_approximation_path,
                           BDI_pose_approximation_args, python_binary)

        # CSI
        CSI_pose_approximation_args = pose_approximation_args + [
            '-o', pose_approx_CSI_path, 'cosine_similarity'
        ]
        run_python_command(local_pose_approximation_path,
                           CSI_pose_approximation_args, python_binary)

        # kapture_export_LTVL2020.py
        if 'export_LTVL2020' not in skip_list:
            local_export_LTVL2020_path = path.join(
                pipeline_import_paths.HERE_PATH,
                '../../kapture/tools/kapture_export_LTVL2020.py')
            args_append_array = []
            if prepend_cam:
                args_append_array.append('-p')
            if force_overwrite_existing:
                args_append_array.append('-f')

            EWB_export_LTVL2020_args = [
                '-v',
                str(logger.level), '-i', pose_approx_EWB_path, '-o',
                pose_approx_EWB_LTVL2020_output_path
            ] + args_append_array
            run_python_command(local_export_LTVL2020_path,
                               EWB_export_LTVL2020_args, python_binary)

            BDI_export_LTVL2020_args = [
                '-v',
                str(logger.level), '-i', pose_approx_BDI_path, '-o',
                pose_approx_BDI_LTVL2020_output_path
            ] + args_append_array
            run_python_command(local_export_LTVL2020_path,
                               BDI_export_LTVL2020_args, python_binary)

            CSI_export_LTVL2020_args = [
                '-v',
                str(logger.level), '-i', pose_approx_CSI_path, '-o',
                pose_approx_CSI_LTVL2020_output_path
            ] + args_append_array
            run_python_command(local_export_LTVL2020_path,
                               CSI_export_LTVL2020_args, python_binary)

    # -------- EVALUATE ALL AT ONCE --------
    # kapture_evaluate.py
    if 'evaluate' not in skip_list and path.isfile(
            path.join(kapture_query_path, 'sensors/trajectories.txt')):
        local_evaluate_path = path.join(pipeline_import_paths.HERE_PATH,
                                        '../tools/kapture_evaluate.py')
        input_list = []
        label_list = []
        if 'global_sfm' not in skip_list:
            input_list.append(global_sfm_kapture_localize_import_path)
            label_list.append(f'global_sfm_config_{config}')
        if 'local_sfm' not in skip_list:
            input_list.append(local_sfm_localize_path)
            label_list.append('local_sfm')
        if 'pose_approximation' not in skip_list:
            input_list.append(pose_approx_EWB_path)
            label_list.append('EWB')
            input_list.append(pose_approx_BDI_path)
            label_list.append('BDI')
            input_list.append(pose_approx_CSI_path)
            label_list.append('CSI')
        evaluate_args = ['-v', str(logger.level), '-i'] + input_list + [
            '--labels'
        ] + label_list + ['-gt', kapture_query_path, '-o', eval_path]
        if force_overwrite_existing:
            evaluate_args.append('-f')
        run_python_command(local_evaluate_path, evaluate_args, python_binary)
Exemple #18
0
def colmap_build_map_from_loaded_data(kapture_data: kapture.Kapture,
                                      kapture_path: str,
                                      colmap_path: str,
                                      colmap_binary: str,
                                      pairsfile_path: Optional[str],
                                      use_colmap_matches_importer: bool,
                                      point_triangulator_options: List[str],
                                      skip_list: List[str],
                                      force: bool) -> None:
    """
    Build a colmap model using custom features with the kapture data.

    :param kapture_data: kapture data to use
    :param kapture_path: path to the kapture to use
    :param colmap_path: path to the colmap build
    :param colmap_binary: path to the colmap executable
    :param pairsfile_path: Optional[str],
    :param use_colmap_matches_importer: bool,
    :param point_triangulator_options: options for the point triangulator
    :param skip_list: list of steps to skip
    :param force: Silently overwrite kapture files if already exists.
    """
    os.makedirs(colmap_path, exist_ok=True)

    if not (kapture_data.records_camera and kapture_data.sensors and kapture_data.keypoints and kapture_data.matches):
        raise ValueError('records_camera, sensors, keypoints, matches are mandatory')
    if not kapture_data.trajectories:
        logger.info('there are no trajectories, running mapper instead of point_triangulator')

    # COLMAP does not fully support rigs.
    if kapture_data.rigs is not None and kapture_data.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
        kapture_data.rigs.clear()

    # Set fixed name for COLMAP database
    colmap_db_path = path.join(colmap_path, 'colmap.db')
    reconstruction_path = path.join(colmap_path, "reconstruction")
    priors_txt_path = path.join(colmap_path, "priors_for_reconstruction")
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)
        safe_remove_any_path(reconstruction_path, force)
        safe_remove_any_path(priors_txt_path, force)
    os.makedirs(reconstruction_path, exist_ok=True)

    if 'colmap_db' not in skip_list:
        logger.info('Using precomputed keypoints and matches')
        logger.info('Step 1: Export kapture format to colmap')

        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        if kapture_data.descriptors is not None:
            kapture_data.descriptors.clear()
        database_extra.kapture_to_colmap(kapture_data, kapture_path, colmap_db,
                                         export_two_view_geometry=not use_colmap_matches_importer)
        # close db before running colmap processes in order to avoid locks
        colmap_db.close()

        if use_colmap_matches_importer:
            logger.info('Step 2: Run geometric verification')
            logger.debug('running colmap matches_importer...')
            colmap_lib.run_matches_importer_from_kapture(
                colmap_binary,
                colmap_use_cpu=True,
                colmap_gpu_index=None,
                colmap_db_path=colmap_db_path,
                kapture_data=kapture_data,
                force=force
            )
        else:
            logger.info('Step 2: Run geometric verification - skipped')

    if kapture_data.trajectories is not None:
        # Generate priors for reconstruction
        os.makedirs(priors_txt_path, exist_ok=True)
        if 'priors_for_reconstruction' not in skip_list:
            logger.info('Step 3: Exporting priors for reconstruction.')
            colmap_db = COLMAPDatabase.connect(colmap_db_path)
            database_extra.generate_priors_for_reconstruction(kapture_data, colmap_db, priors_txt_path)
            colmap_db.close()

        # Point triangulator
        reconstruction_path = path.join(colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        if 'triangulation' not in skip_list:
            logger.info("Step 4: Triangulation")
            colmap_lib.run_point_triangulator(
                colmap_binary,
                colmap_db_path,
                get_image_fullpath(kapture_path),
                priors_txt_path,
                reconstruction_path,
                point_triangulator_options
            )
    else:
        # mapper
        reconstruction_path = path.join(colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        if 'triangulation' not in skip_list:
            logger.info("Step 4: Triangulation")
            colmap_lib.run_mapper(
                colmap_binary,
                colmap_db_path,
                get_image_fullpath(kapture_path),
                None,
                reconstruction_path,
                point_triangulator_options
            )
            # use reconstruction 0 as main
            first_reconstruction = os.path.join(reconstruction_path, '0')
            files = os.listdir(first_reconstruction)
            for f in files:
                shutil.move(os.path.join(first_reconstruction, f), os.path.join(reconstruction_path, f))
            shutil.rmtree(first_reconstruction)

    # run model_converter
    if 'model_converter' not in skip_list:
        logger.info("Step 5: Export reconstruction results to txt")
        colmap_lib.run_model_converter(
            colmap_binary,
            reconstruction_path,
            reconstruction_path
        )
def local_sfm_from_loaded_data(kdata_map: kapture.Kapture,
                               kdata_map_gv: kapture.Kapture,
                               kdata_query: kapture.Kapture,
                               map_plus_query_path: str,
                               map_plus_query_gv_path: str,
                               tar_handlers_map: Optional[TarCollection],
                               tar_handlers_map_gv: Optional[TarCollection],
                               descriptors_type: Optional[str],
                               pairsfile_path: str,
                               output_path_root: str,
                               colmap_binary: str,
                               force: bool):
    """
    Localize query images in a COLMAP model built from topk retrieved images.

    :param map_plus_query_path: path to the kapture data consisting of mapping and query data (sensors and reconstruction)
    :param map_plus_query_gv_path: path to the kapture data consisting of mapping and query data after geometric verification (sensors and reconstruction)
    :param query_path: path to the query kapture data (sensors)
    :param descriptors_type: type of descriptors, name of the descriptors subfolder
    :param pairsfile_path: path to the pairsfile that contains the topk retrieved mapping images for each query image
    :param output_path_root: root path where outputs should be stored
    :param colmap_binary: path to the COLMAP binary
    :param force: silently overwrite already existing results
    """

    # load query kapture (we use query kapture to reuse sensor_ids etc.)
    if kdata_query.trajectories:
        logger.warning("Query data contains trajectories: they will be ignored")
        kdata_query.trajectories.clear()
    else:
        kdata_query.trajectories = kapture.Trajectories()

    # clear query trajectories in map_plus_query
    kdata_map_cleared_trajectories = kapture.Trajectories()
    query_image_list = set(kdata_query.records_camera.data_list())
    for timestamp, subdict in kdata_map.records_camera.items():
        for sensor_id, image_name in subdict.items():
            if image_name in query_image_list:
                continue
            if (timestamp, sensor_id) in kdata_map.trajectories:
                pose = kdata_map.trajectories.get(timestamp)[sensor_id]
                kdata_map_cleared_trajectories.setdefault(timestamp, {})[sensor_id] = pose
    kdata_map.trajectories = kdata_map_cleared_trajectories

    # load output kapture
    output_path = os.path.join(output_path_root, 'localized')
    if os.path.exists(os.path.join(output_path, 'sensors/trajectories.txt')):
        kdata_output = kapture_from_dir(output_path)
        if kdata_query.records_camera == kdata_output.records_camera and len(
                kdata_output.trajectories) != 0 and not force:
            kdata_query.trajectories = kdata_output.trajectories

    if kdata_map.rigs is not None:
        rigs_remove_inplace(kdata_map.trajectories, kdata_map.rigs)
    if kdata_map_gv.rigs is not None:
        rigs_remove_inplace(kdata_map_gv.trajectories, kdata_map_gv.rigs)

    # load pairsfile
    pairs = {}
    with open(pairsfile_path, 'r') as fid:
        table = table_from_file(fid)
        for img_query, img_map, _ in table:
            if img_query not in pairs:
                pairs[img_query] = []
            pairs[img_query].append(img_map)

    kdata_sub_colmap_path = os.path.join(output_path_root, 'colmap')
    kdata_reg_query_path = os.path.join(output_path_root, 'query_registered')
    sub_kapture_pairsfile_path = os.path.join(output_path_root, 'tmp_pairs.txt')

    if descriptors_type is None:
        descriptors_type = try_get_only_key_from_collection(kdata_map.descriptors)
    assert descriptors_type is not None
    assert descriptors_type in kdata_map.descriptors
    keypoints_type = kdata_map.descriptors[descriptors_type].keypoints_type

    # init matches for kdata_map and kdata_map_gv
    if kdata_map.matches is None:
        kdata_map.matches = {}
    if keypoints_type not in kdata_map.matches:
        kdata_map.matches[keypoints_type] = kapture.Matches()
    if kdata_map_gv.matches is None:
        kdata_map_gv.matches = {}
    if keypoints_type not in kdata_map_gv.matches:
        kdata_map_gv.matches[keypoints_type] = kapture.Matches()

    # run all matching
    # loop over query images
    img_skip_list = set()
    for img_query, img_list_map in pairs.items():
        if pose_found(kdata_query, img_query):
            logger.info(f'{img_query} already processed, skipping...')
            img_skip_list.add(img_query)
            continue
        else:
            map_pairs = get_pairfile_from_img_list(img_list_map)
            query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'matching for {img_query}')
                table_to_file(fid, map_pairs)
                table_to_file(fid, query_pairs)

            pairs_all = map_pairs + query_pairs
            pairs_all = [(i, j) for i, j, _ in pairs_all]
            # match missing pairs
            # kdata_map.matches is being updated by compute_matches_from_loaded_data
            compute_matches_from_loaded_data(map_plus_query_path,
                                             tar_handlers_map,
                                             kdata_map,
                                             descriptors_type,
                                             pairs_all)

    # if kdata_map have matches in tar, they need to be switched to read mode
    matches_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map)
    if matches_handler is not None:
        matches_handler.close()
        tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_path)
        tar_handlers_map.matches[keypoints_type] = TarHandler(tarfile_path, 'r')

    # run all gv
    # loop over query images
    for img_query, img_list_map in pairs.items():
        if img_query in img_skip_list:
            continue
        else:
            # recompute the pairs
            map_pairs = get_pairfile_from_img_list(img_list_map)
            query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'geometric verification of {img_query}')
                table_to_file(fid, map_pairs)
                table_to_file(fid, query_pairs)

            pairs_all = map_pairs + query_pairs
            pairs_all = [(i, j) for i, j, _ in pairs_all]

            if all(pair in kdata_map_gv.matches[keypoints_type] for pair in pairs_all):
                continue

            # create a sub kapture in order to minimize the amount of data exported to colmap
            # kdata_sub needs to be re-created to add the new matches
            kdata_sub = sub_kapture_from_img_list(kdata_map, img_list_map + [img_query], pairs_all,
                                                  keypoints_type, descriptors_type)

            kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map + [img_query], pairs_all,
                                                     keypoints_type, descriptors_type)
            # run colmap gv on missing pairs
            run_colmap_gv_from_loaded_data(kdata_sub,
                                           kdata_sub_gv,
                                           map_plus_query_path,
                                           map_plus_query_gv_path,
                                           tar_handlers_map,
                                           tar_handlers_map_gv,
                                           colmap_binary,
                                           keypoints_type,
                                           [],
                                           True)
            # update kdata_map_gv.matches
            kdata_map_gv.matches[keypoints_type].update(kdata_sub_gv.matches[keypoints_type])

    # if kdata_map_gv have matches in tar, they need to be switched to read mode
    matches_gv_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map_gv)
    if matches_gv_handler is not None:
        print(matches_gv_handler)
        matches_gv_handler.close()
        tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_gv_path)
        tar_handlers_map_gv.matches[keypoints_type] = TarHandler(tarfile_path, 'r')

    # loop over query images
    for img_query, img_list_map in pairs.items():
        if img_query in img_skip_list:
            continue
        else:
            map_pairs = get_pairfile_from_img_list(img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'mapping and localization for {img_query}')
                table_to_file(fid, map_pairs)
            map_pairs = [(i, j) for i, j, _ in map_pairs]
            kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map, map_pairs,
                                                     keypoints_type, descriptors_type)
            # sanity check
            if len(map_pairs) != len(kdata_sub_gv.matches[keypoints_type]):
                logger.info(f'not all mapping matches available')

            # build COLMAP map
            try:
                colmap_build_map_from_loaded_data(
                    kdata_sub_gv,
                    map_plus_query_gv_path,
                    tar_handlers_map_gv,
                    kdata_sub_colmap_path,
                    colmap_binary,
                    keypoints_type,
                    False,
                    [],
                    ['model_converter'],
                    True)
            except ValueError:
                logger.info(f'{img_query} was not localized')
                continue

        if not os.path.exists(os.path.join(kdata_sub_colmap_path, 'reconstruction/images.bin')):
            logger.info(f'colmap mapping for {img_query} did not work, image was not localized')
            continue

        query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
        with open(sub_kapture_pairsfile_path, 'w') as fid:
            table_to_file(fid, query_pairs)
        query_pairs = [(i, j) for i, j, _ in query_pairs]
        query_img_kapture_gv = add_image_to_kapture(kdata_map_gv,
                                                    kdata_sub_gv, img_query, query_pairs,
                                                    keypoints_type, descriptors_type)
        # sanity check
        if len(query_pairs) != len(query_img_kapture_gv.matches[keypoints_type]):
            logger.info(f'not all query matches available')

        # localize in COLMAP map
        try:
            colmap_localize_from_loaded_data(
                query_img_kapture_gv,
                map_plus_query_gv_path,
                tar_handlers_map_gv,
                os.path.join(kdata_sub_colmap_path, 'registered'),
                os.path.join(kdata_sub_colmap_path, 'colmap.db'),
                os.path.join(kdata_sub_colmap_path, 'reconstruction'),
                colmap_binary,
                keypoints_type,
                False,
                ['--Mapper.ba_refine_focal_length', '0',
                 '--Mapper.ba_refine_principal_point', '0',
                 '--Mapper.ba_refine_extra_params', '0',
                 '--Mapper.min_num_matches', '4',
                 '--Mapper.init_min_num_inliers', '4',
                 '--Mapper.abs_pose_min_num_inliers', '4',
                 '--Mapper.abs_pose_min_inlier_ratio', '0.05',
                 '--Mapper.ba_local_max_num_iterations', '50',
                 '--Mapper.abs_pose_max_error', '20',
                 '--Mapper.filter_max_reproj_error', '12'],
                [],
                True)
        except ValueError:
            logger.info(f'{img_query} was not localized')
            continue

        if not os.path.exists(os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                                           'reconstruction/images.txt')):
            logger.info(f'colmap localization of {img_query} did not work, image was not localized')
            continue

        # add to results kapture
        kdata_reg_query = import_colmap(
            kdata_reg_query_path,
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'colmap.db'),
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                         'reconstruction'),
            None,
            None,
            True,
            True,
            True,
            TransferAction.skip)

        if add_pose_to_query_kapture(kdata_reg_query, kdata_query, img_query):
            logger.info('successfully localized')

        # write results (after each image to see the progress)
        kapture_to_dir(output_path, kdata_query)

    # clean up (e.g. remove temporal files and folders)
    safe_remove_any_path(kdata_sub_colmap_path, True)
    safe_remove_any_path(kdata_reg_query_path, True)
    safe_remove_file(sub_kapture_pairsfile_path, True)

    logger.info('all done')