コード例 #1
0
def create_kapture_proxy(output_path: str, source_path: str,
                         keypoints_path: Optional[str],
                         descriptors_path: Optional[str],
                         global_features_path: Optional[str],
                         matches_path: Optional[str], force: bool):
    """
    Creates a kapture proxy directory based on  another one,
     and optionally gathering multiple source of reconstruction data.
    It heavily uses symlinks to minimize the amount of copied data.
    A source kapture directory is mandatory for the sensors part of kapture.
    All other reconstruction data, if given, are added.

    :param output_path: root path where to save the proxy. It will be cleaned if already exists.
    :param source_path: root path of the input kapture directory containing sensors.
    :param keypoints_path: path to keypoints root directory. Remapped to output_path/reconstruction/keypoints
    :param descriptors_path: path to descriptors root directory. Remapped to output_path/reconstruction/descriptors
    :param global_features_path: path to global features root directory.
                                 Remapped to output_path/reconstruction/global_features_path
    :param matches_path: path to matches root directory. Remapped to output_path/reconstruction/matches_path
    :param force: for to clean output (if needed) without user prompt.
    """
    if path.exists(output_path):
        safe_remove_any_path(output_path, force)
    assert not path.exists(output_path)
    os.makedirs(output_path)

    sensors_in_path = path.join(source_path, 'sensors')
    assert path.exists(sensors_in_path)

    sensors_out_path = path.join(output_path, 'sensors')
    absolute_symlink(sensors_in_path, sensors_out_path)

    reconstruction_out_path = path.join(output_path, 'reconstruction')
    os.makedirs(reconstruction_out_path)
    if keypoints_path is not None:
        assert path.exists(keypoints_path)
        absolute_symlink(keypoints_path,
                         os.path.join(reconstruction_out_path, 'keypoints'))

    if descriptors_path is not None:
        assert path.exists(descriptors_path)
        absolute_symlink(descriptors_path,
                         os.path.join(reconstruction_out_path, 'descriptors'))

    if global_features_path is not None:
        assert path.exists(global_features_path)
        absolute_symlink(
            global_features_path,
            os.path.join(reconstruction_out_path, 'global_features'))

    if matches_path is not None:
        assert path.exists(matches_path)
        absolute_symlink(matches_path,
                         os.path.join(reconstruction_out_path, 'matches'))
コード例 #2
0
def export_openmvg(kapture_path: str, openmvg_path: str,
                   image_action: TransferAction, force: bool = False) -> None:
    """
    Export the kapture data to an openMVG JSON file.
    If the openmvg_path is a directory, it will create a JSON file (using the default name sfm_data.json)
    in that directory.

    :param kapture_path: full path to the top kapture directory
    :param openmvg_path: path of the file or directory where to store the data as JSON
    :param image_action: an action to apply on the images: relative linking, absolute linking, copy or move. Or top
     directory linking or skip to do nothing.
    :param force: if true, will remove existing openMVG data without prompting the user.
    """

    if path.isdir(openmvg_path):  # Existing directory
        json_file = path.join(openmvg_path, DEFAULT_JSON_FILE_NAME)
    else:
        file_ext = path.splitext(openmvg_path)[1]
        if len(file_ext) == 0:  # No extension: -> new directory
            json_file = path.join(openmvg_path, DEFAULT_JSON_FILE_NAME)
        elif file_ext.lower() != '.json':
            logger.warning(f'Creating output directory with file extension {file_ext}')
            json_file = path.join(openmvg_path, DEFAULT_JSON_FILE_NAME)
        else:  # Json file
            json_file = openmvg_path
    json_dir = path.dirname(json_file)
    safe_remove_file(json_file, force)
    if path.exists(json_file):
        raise ValueError(f'{json_file} file already exist')
    if image_action != TransferAction.skip and path.exists(json_dir) and any(pathlib.Path(json_dir).iterdir()):
        safe_remove_any_path(json_dir, force)
        if path.isdir(json_dir):
            raise ValueError(f'Images directory {json_dir} exist with remaining files')
    os.makedirs(json_dir, exist_ok=True)

    kapture_data = load_kapture(kapture_path)
    openmvg_data = kapture_to_openmvg(kapture_data, kapture_path, image_action, json_dir)
    logger.info(f'Saving to openmvg file {json_file}')
    with open(json_file, "w") as fid:
        json.dump(openmvg_data, fid, indent=4)
コード例 #3
0
def colmap_build_map_from_loaded_data(kapture_data: kapture.Kapture,
                                      kapture_path: str,
                                      colmap_path: str,
                                      colmap_binary: str,
                                      pairsfile_path: Optional[str],
                                      use_colmap_matches_importer: bool,
                                      point_triangulator_options: List[str],
                                      skip_list: List[str],
                                      force: bool) -> None:
    """
    Build a colmap model using custom features with the kapture data.

    :param kapture_data: kapture data to use
    :param kapture_path: path to the kapture to use
    :param colmap_path: path to the colmap build
    :param colmap_binary: path to the colmap executable
    :param pairsfile_path: Optional[str],
    :param use_colmap_matches_importer: bool,
    :param point_triangulator_options: options for the point triangulator
    :param skip_list: list of steps to skip
    :param force: Silently overwrite kapture files if already exists.
    """
    os.makedirs(colmap_path, exist_ok=True)

    if not (kapture_data.records_camera and kapture_data.sensors and kapture_data.keypoints and kapture_data.matches):
        raise ValueError('records_camera, sensors, keypoints, matches are mandatory')
    if not kapture_data.trajectories:
        logger.info('there are no trajectories, running mapper instead of point_triangulator')

    # COLMAP does not fully support rigs.
    if kapture_data.rigs is not None and kapture_data.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
        kapture_data.rigs.clear()

    # Set fixed name for COLMAP database
    colmap_db_path = path.join(colmap_path, 'colmap.db')
    reconstruction_path = path.join(colmap_path, "reconstruction")
    priors_txt_path = path.join(colmap_path, "priors_for_reconstruction")
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)
        safe_remove_any_path(reconstruction_path, force)
        safe_remove_any_path(priors_txt_path, force)
    os.makedirs(reconstruction_path, exist_ok=True)

    if 'colmap_db' not in skip_list:
        logger.info('Using precomputed keypoints and matches')
        logger.info('Step 1: Export kapture format to colmap')

        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        if kapture_data.descriptors is not None:
            kapture_data.descriptors.clear()
        database_extra.kapture_to_colmap(kapture_data, kapture_path, colmap_db,
                                         export_two_view_geometry=not use_colmap_matches_importer)
        # close db before running colmap processes in order to avoid locks
        colmap_db.close()

        if use_colmap_matches_importer:
            logger.info('Step 2: Run geometric verification')
            logger.debug('running colmap matches_importer...')
            colmap_lib.run_matches_importer_from_kapture(
                colmap_binary,
                colmap_use_cpu=True,
                colmap_gpu_index=None,
                colmap_db_path=colmap_db_path,
                kapture_data=kapture_data,
                force=force
            )
        else:
            logger.info('Step 2: Run geometric verification - skipped')

    if kapture_data.trajectories is not None:
        # Generate priors for reconstruction
        os.makedirs(priors_txt_path, exist_ok=True)
        if 'priors_for_reconstruction' not in skip_list:
            logger.info('Step 3: Exporting priors for reconstruction.')
            colmap_db = COLMAPDatabase.connect(colmap_db_path)
            database_extra.generate_priors_for_reconstruction(kapture_data, colmap_db, priors_txt_path)
            colmap_db.close()

        # Point triangulator
        reconstruction_path = path.join(colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        if 'triangulation' not in skip_list:
            logger.info("Step 4: Triangulation")
            colmap_lib.run_point_triangulator(
                colmap_binary,
                colmap_db_path,
                get_image_fullpath(kapture_path),
                priors_txt_path,
                reconstruction_path,
                point_triangulator_options
            )
    else:
        # mapper
        reconstruction_path = path.join(colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        if 'triangulation' not in skip_list:
            logger.info("Step 4: Triangulation")
            colmap_lib.run_mapper(
                colmap_binary,
                colmap_db_path,
                get_image_fullpath(kapture_path),
                None,
                reconstruction_path,
                point_triangulator_options
            )
            # use reconstruction 0 as main
            first_reconstruction = os.path.join(reconstruction_path, '0')
            files = os.listdir(first_reconstruction)
            for f in files:
                shutil.move(os.path.join(first_reconstruction, f), os.path.join(reconstruction_path, f))
            shutil.rmtree(first_reconstruction)

    # run model_converter
    if 'model_converter' not in skip_list:
        logger.info("Step 5: Export reconstruction results to txt")
        colmap_lib.run_model_converter(
            colmap_binary,
            reconstruction_path,
            reconstruction_path
        )
コード例 #4
0
def colmap_localize_from_loaded_data(kapture_data: kapture.Kapture,
                                     kapture_path: str,
                                     tar_handlers: Optional[TarCollection],
                                     colmap_path: str,
                                     input_database_path: str,
                                     input_reconstruction_path: str,
                                     colmap_binary: str,
                                     keypoints_type: Optional[str],
                                     use_colmap_matches_importer: bool,
                                     image_registrator_options: List[str],
                                     skip_list: List[str],
                                     force: bool) -> None:
    """
    Localize images on a colmap model with the kapture data.

    :param kapture_data: kapture data to use
    :param kapture_path: path to the kapture to use
    :param tar_handler: collection of preloaded tar archives
    :param colmap_path: path to the colmap build
    :param input_database_path: path to the map colmap.db
    :param input_database_path: path to the map colmap.db
    :param input_reconstruction_path: path to the map reconstruction folder
    :param colmap_binary: path to the colmap binary executable
    :param keypoints_type: type of keypoints, name of the keypoints subfolder
    :param use_colmap_matches_importer: bool,
    :param image_registrator_options: options for the image registrator
    :param skip_list: list of steps to skip
    :param force: Silently overwrite kapture files if already exists.
    """
    os.makedirs(colmap_path, exist_ok=True)

    if not (kapture_data.records_camera and kapture_data.sensors and kapture_data.keypoints and kapture_data.matches):
        raise ValueError('records_camera, sensors, keypoints, matches are mandatory')

    if kapture_data.trajectories:
        logger.warning("Input data contains trajectories: they will be ignored")
        kapture_data.trajectories.clear()
    else:
        kapture_data.trajectories = kapture.Trajectories()

    # COLMAP does not fully support rigs.
    if kapture_data.rigs is not None and kapture_data.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
        kapture_data.rigs.clear()

    # Prepare output
    # Set fixed name for COLMAP database
    colmap_db_path = path.join(colmap_path, 'colmap.db')
    image_list_path = path.join(colmap_path, 'images.list')
    reconstruction_path = path.join(colmap_path, "reconstruction")
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)
        safe_remove_file(image_list_path, force)
        safe_remove_any_path(reconstruction_path, force)
    os.makedirs(reconstruction_path, exist_ok=True)

    # Copy colmap db to output
    if not os.path.exists(colmap_db_path):
        shutil.copy(input_database_path, colmap_db_path)

    # find correspondences between the colmap db and the kapture data
    images_all = {image_path: (ts, cam_id)
                  for ts, shot in kapture_data.records_camera.items()
                  for cam_id, image_path in shot.items()}

    colmap_db = COLMAPDatabase.connect(colmap_db_path)
    colmap_image_ids = database_extra.get_colmap_image_ids_from_db(colmap_db)
    colmap_images = database_extra.get_images_from_database(colmap_db)
    colmap_db.close()

    # dict ( kapture_camera -> colmap_camera_id )
    colmap_camera_ids = {images_all[image_path][1]: colmap_cam_id
                         for image_path, colmap_cam_id in colmap_images if image_path in images_all}

    images_to_add = {image_path: value
                     for image_path, value in images_all.items()
                     if image_path not in colmap_image_ids}

    flatten_images_to_add = [(ts, kapture_cam_id, image_path)
                             for image_path, (ts, kapture_cam_id) in images_to_add.items()]

    if 'import_to_db' not in skip_list:
        logger.info("Step 1: Add precomputed keypoints and matches to colmap db")

        if keypoints_type is None:
            keypoints_type = try_get_only_key_from_collection(kapture_data.keypoints)
        assert keypoints_type is not None
        assert keypoints_type in kapture_data.keypoints
        assert keypoints_type in kapture_data.matches

        cameras_to_add = kapture.Sensors()
        for _, (_, kapture_cam_id) in images_to_add.items():
            if kapture_cam_id not in colmap_camera_ids:
                kapture_cam = kapture_data.sensors[kapture_cam_id]
                cameras_to_add[kapture_cam_id] = kapture_cam
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        colmap_added_camera_ids = database_extra.add_cameras_to_database(cameras_to_add, colmap_db)
        colmap_camera_ids.update(colmap_added_camera_ids)

        colmap_added_image_ids = database_extra.add_images_to_database_from_flatten(
            colmap_db, flatten_images_to_add, kapture_data.trajectories, colmap_camera_ids)
        colmap_image_ids.update(colmap_added_image_ids)

        colmap_image_ids_reversed = {v: k for k, v in colmap_image_ids.items()}  # colmap_id : name

        # add new features
        colmap_keypoints = database_extra.get_keypoints_set_from_database(colmap_db, colmap_image_ids_reversed)

        keypoints_all = kapture_data.keypoints[keypoints_type]
        keypoints_to_add = {name for name in keypoints_all if name not in colmap_keypoints}
        keypoints_to_add = kapture.Keypoints(keypoints_all.type_name, keypoints_all.dtype, keypoints_all.dsize,
                                             keypoints_to_add)
        database_extra.add_keypoints_to_database(colmap_db, keypoints_to_add,
                                                 keypoints_type, kapture_path,
                                                 tar_handlers,
                                                 colmap_image_ids)

        # add new matches
        colmap_matches = kapture.Matches(database_extra.get_matches_set_from_database(colmap_db,
                                                                                      colmap_image_ids_reversed))
        colmap_matches.normalize()

        matches_all = kapture_data.matches[keypoints_type]
        matches_to_add = kapture.Matches({pair for pair in matches_all if pair not in colmap_matches})
        # print(list(matches_to_add))
        database_extra.add_matches_to_database(colmap_db, matches_to_add,
                                               keypoints_type, kapture_path,
                                               tar_handlers,
                                               colmap_image_ids,
                                               export_two_view_geometry=not use_colmap_matches_importer)
        colmap_db.close()

    if use_colmap_matches_importer:
        logger.info('Step 2: Run geometric verification')
        logger.debug('running colmap matches_importer...')

        if keypoints_type is None:
            keypoints_type = try_get_only_key_from_collection(kapture_data.matches)
        assert keypoints_type is not None
        assert keypoints_type in kapture_data.matches

        # compute two view geometry
        colmap_lib.run_matches_importer_from_kapture_matches(
            colmap_binary,
            colmap_use_cpu=True,
            colmap_gpu_index=None,
            colmap_db_path=colmap_db_path,
            kapture_matches=kapture_data.matches[keypoints_type],
            force=force)
    else:
        logger.info('Step 2: Run geometric verification - skipped')
    if 'image_registrator' not in skip_list:
        logger.info("Step 3: Run image_registrator")
        # run image_registrator
        colmap_lib.run_image_registrator(
            colmap_binary,
            colmap_db_path,
            input_reconstruction_path,
            reconstruction_path,
            image_registrator_options
        )

    # run model_converter
    if 'model_converter' not in skip_list:
        logger.info("Step 4: Export reconstruction results to txt")
        colmap_lib.run_model_converter(
            colmap_binary,
            reconstruction_path,
            reconstruction_path
        )
コード例 #5
0
def colmap_build_sift_map(kapture_path: str,
                          colmap_path: str,
                          colmap_binary: str,
                          colmap_use_cpu: bool,
                          colmap_gpu_index: str,
                          vocab_tree_path: str,
                          point_triangulator_options: List[str],
                          skip_list: List[str],
                          force: bool) -> None:
    """
    Build a colmap model using default SIFT features with the kapture data.

    :param kapture_path: path to the kapture to use
    :param colmap_path: path to the colmap build
    :param colmap_binary: path to the colmap executable
    :param colmap_use_cpu: to use cpu only (and ignore gpu) or to use also gpu
    :param colmap_gpu_index: gpu index for sift extractor and mapper
    :param vocab_tree_path: path to the colmap vocabulary tree file
    :param point_triangulator_options: options for the point triangulator
    :param skip_list: list of steps to skip
    :param force: Silently overwrite kapture files if already exists.
    """
    os.makedirs(colmap_path, exist_ok=True)

    # Load input files first to make sure it is OK
    logger.info('loading kapture files...')
    kapture_data = kapture.io.csv.kapture_from_dir(kapture_path)

    if not (kapture_data.records_camera and kapture_data.sensors):
        raise ValueError('records_camera, sensors are mandatory')
    if not kapture_data.trajectories:
        logger.info('there are no trajectories, running mapper instead of point_triangulator')

    if not os.path.isfile(vocab_tree_path):
        raise ValueError(f'Vocabulary Tree file does not exist: {vocab_tree_path}')

    # COLMAP does not fully support rigs.
    if kapture_data.rigs is not None and kapture_data.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
        kapture_data.rigs.clear()

    # Set fixed name for COLMAP database
    colmap_db_path = path.join(colmap_path, 'colmap.db')
    image_list_path = path.join(colmap_path, 'images.list')
    reconstruction_path = path.join(colmap_path, "reconstruction")
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)
        safe_remove_file(image_list_path, force)
        safe_remove_any_path(reconstruction_path, force)
    os.makedirs(reconstruction_path, exist_ok=True)

    if 'feature_extract' not in skip_list:
        logger.info("Step 1: Feature extraction using colmap")
        with open(image_list_path, 'w') as fid:
            for timestamp, sensor_id in sorted(kapture_data.records_camera.key_pairs()):
                fid.write(kapture_data.records_camera[timestamp][sensor_id] + "\n")

        colmap_lib.run_feature_extractor(
            colmap_binary,
            colmap_use_cpu,
            colmap_gpu_index,
            colmap_db_path,
            get_image_fullpath(kapture_path),
            image_list_path
        )

    # Update cameras in COLMAP:
    # - use only one camera for all images taken with the same camera (update all camera IDs)
    # - import camera intrinsics
    # - import camera pose
    if 'update_db_cameras' not in skip_list:
        logger.info("Step 2: Populate COLMAP DB with cameras and poses")
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.update_DB_cameras_and_poses(colmap_db, kapture_data)
        # close db before running colmap processes in order to avoid locks
        colmap_db.close()

    # Extract matches with COLMAP
    if 'matches' not in skip_list:
        logger.info("Step 3: Compute matches with colmap")

        colmap_lib.run_vocab_tree_matcher(
            colmap_binary,
            colmap_use_cpu,
            colmap_gpu_index,
            colmap_db_path,
            vocab_tree_path)

    if kapture_data.trajectories is not None:
        # Generate priors for reconstruction
        txt_path = path.join(colmap_path, "priors_for_reconstruction")
        os.makedirs(txt_path, exist_ok=True)
        if 'priors_for_reconstruction' not in skip_list:
            logger.info('Step 4: Exporting priors for reconstruction.')
            colmap_db = COLMAPDatabase.connect(colmap_db_path)
            database_extra.generate_priors_for_reconstruction(kapture_data, colmap_db, txt_path)
            colmap_db.close()

        # Point triangulator
        reconstruction_path = path.join(colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        if 'triangulation' not in skip_list:
            logger.info("Step 5: Triangulation")
            colmap_lib.run_point_triangulator(
                colmap_binary,
                colmap_db_path,
                get_image_fullpath(kapture_path),
                txt_path,
                reconstruction_path,
                point_triangulator_options
            )
    else:
        # mapper
        reconstruction_path = path.join(colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        if 'triangulation' not in skip_list:
            logger.info("Step 5: Triangulation")
            colmap_lib.run_mapper(
                colmap_binary,
                colmap_db_path,
                get_image_fullpath(kapture_path),
                None,
                reconstruction_path,
                point_triangulator_options
            )
            # use reconstruction 0 as main
            first_reconstruction = os.path.join(reconstruction_path, '0')
            files = os.listdir(first_reconstruction)
            for f in files:
                shutil.move(os.path.join(first_reconstruction, f), os.path.join(reconstruction_path, f))
            shutil.rmtree(first_reconstruction)

    # run model_converter
    if 'model_converter' not in skip_list:
        logger.info("Step 6: Export reconstruction results to txt")
        colmap_lib.run_model_converter(
            colmap_binary,
            reconstruction_path,
            reconstruction_path
        )
コード例 #6
0
def colmap_localize_sift(kapture_path: str,
                         colmap_path: str,
                         input_database_path: str,
                         input_reconstruction_path: str,
                         colmap_binary: str,
                         colmap_use_cpu: bool,
                         colmap_gpu_index: str,
                         vocab_tree_path: str,
                         image_registrator_options: List[str],
                         skip_list: List[str],
                         force: bool) -> None:
    """
    Localize images on a colmap model using default SIFT features with the kapture data.

    :param kapture_path: path to the kapture to use
    :param colmap_path: path to the colmap build
    :param input_database_path: path to the map colmap.db
    :param input_database_path: path to the map colmap.db
    :param input_reconstruction_path: path to the map reconstruction folder
    :param colmap_use_cpu: to use cpu only (and ignore gpu) or to use also gpu
    :param colmap_gpu_index: gpu index for sift extractor and mapper
    :param vocab_tree_path: path to the colmap vocabulary tree file
    :param image_registrator_options: options for the image registrator
    :param skip_list: list of steps to skip
    :param force: Silently overwrite kapture files if already exists.
    """
    os.makedirs(colmap_path, exist_ok=True)
    # Set fixed name for COLMAP database

    # Load input files first to make sure it is OK
    logger.info('loading kapture files...')
    kapture_data = kapture.io.csv.kapture_from_dir(kapture_path)

    if not (kapture_data.records_camera and kapture_data.sensors):
        raise ValueError('records_camera, sensors are mandatory')

    if kapture_data.trajectories:
        logger.warning("Input data contains trajectories: they will be ignored")
        kapture_data.trajectories.clear()
    else:
        kapture_data.trajectories = kapture.Trajectories()

    if not os.path.isfile(vocab_tree_path):
        raise ValueError(f'Vocabulary Tree file does not exist: {vocab_tree_path}')

    # COLMAP does not fully support rigs.
    if kapture_data.rigs is not None and kapture_data.trajectories is not None:
        # make sure, rigs are not used in trajectories.
        logger.info('remove rigs notation.')
        rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
        kapture_data.rigs.clear()

    # Prepare output
    # Set fixed name for COLMAP database
    colmap_db_path = path.join(colmap_path, 'colmap.db')
    image_list_path = path.join(colmap_path, 'images.list')
    reconstruction_path = path.join(colmap_path, "reconstruction")
    if 'delete_existing' not in skip_list:
        safe_remove_file(colmap_db_path, force)
        safe_remove_file(image_list_path, force)
        safe_remove_any_path(reconstruction_path, force)
    os.makedirs(reconstruction_path, exist_ok=True)

    # Copy colmap db to output
    if not os.path.exists(colmap_db_path):
        shutil.copy(input_database_path, colmap_db_path)

    # find correspondences between the colmap db and the kapture data
    images_all = {image_path: (ts, cam_id)
                  for ts, shot in kapture_data.records_camera.items()
                  for cam_id, image_path in shot.items()}

    colmap_db = COLMAPDatabase.connect(colmap_db_path)
    colmap_image_ids = database_extra.get_colmap_image_ids_from_db(colmap_db)
    colmap_cameras = database_extra.get_camera_ids_from_database(colmap_db)
    colmap_images = database_extra.get_images_from_database(colmap_db)
    colmap_db.close()

    # dict ( kapture_camera -> colmap_camera_id )
    colmap_camera_ids = {images_all[image_path][1]: colmap_cam_id
                         for image_path, colmap_cam_id in colmap_images if image_path in images_all}

    images_to_add = {image_path: value
                     for image_path, value in images_all.items()
                     if image_path not in colmap_image_ids}

    flatten_images_to_add = [(ts, kapture_cam_id, image_path)
                             for image_path, (ts, kapture_cam_id) in images_to_add.items()]

    if 'feature_extract' not in skip_list:
        logger.info("Step 1: Feature extraction using colmap")
        with open(image_list_path, 'w') as fid:
            for image in images_to_add.keys():
                fid.write(image + "\n")

        colmap_lib.run_feature_extractor(
            colmap_binary,
            colmap_use_cpu,
            colmap_gpu_index,
            colmap_db_path,
            get_image_fullpath(kapture_path),
            image_list_path
        )

    if 'matches' not in skip_list:
        logger.info("Step 2: Compute matches with colmap")
        colmap_lib.run_vocab_tree_matcher(
            colmap_binary,
            colmap_use_cpu,
            colmap_gpu_index,
            colmap_db_path,
            vocab_tree_path,
            image_list_path
        )

    if 'fix_db_cameras' not in skip_list:
        logger.info("Step 3: Replace colmap generated cameras with kapture cameras")
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.foreign_keys_off(colmap_db)

        # remove colmap generated cameras
        after_feature_extraction_colmap_cameras = database_extra.get_camera_ids_from_database(colmap_db)
        colmap_cameras_to_remove = [cam_id
                                    for cam_id in after_feature_extraction_colmap_cameras
                                    if cam_id not in colmap_cameras]
        for cam_id in colmap_cameras_to_remove:
            database_extra.remove_camera(colmap_db, cam_id)

        # put the correct cameras and image extrinsic back into the database
        cameras_to_add = kapture.Sensors()
        for image_path, (ts, kapture_cam_id) in images_to_add.items():
            if kapture_cam_id not in colmap_camera_ids:
                kapture_cam = kapture_data.sensors[kapture_cam_id]
                cameras_to_add[kapture_cam_id] = kapture_cam
        colmap_added_camera_ids = database_extra.add_cameras_to_database(cameras_to_add, colmap_db)
        colmap_camera_ids.update(colmap_added_camera_ids)

        database_extra.update_images_in_database_from_flatten(
            colmap_db,
            flatten_images_to_add,
            kapture_data.trajectories,
            colmap_camera_ids
        )

        database_extra.foreign_keys_on(colmap_db)
        colmap_db.commit()
        colmap_db.close()

    if 'image_registrator' not in skip_list:
        logger.info("Step 4: Run image_registrator")
        # run image_registrator
        colmap_lib.run_image_registrator(
            colmap_binary,
            colmap_db_path,
            input_reconstruction_path,
            reconstruction_path,
            image_registrator_options
        )

    # run model_converter
    if 'model_converter' not in skip_list:
        logger.info("Step 5: Export reconstruction results to txt")
        colmap_lib.run_model_converter(
            colmap_binary,
            reconstruction_path,
            reconstruction_path
        )
コード例 #7
0
def create_kapture_proxy(output_path: str, source_path: str,
                         keypoints_paths: Optional[List[Optional[str]]],
                         descriptors_paths: Optional[List[str]],
                         global_features_paths: Optional[List[str]],
                         matches_paths: Optional[List[Optional[str]]],
                         keypoints_types: Optional[List[Optional[str]]],
                         descriptors_types: Optional[List[Optional[str]]],
                         global_features_types: Optional[List[Optional[str]]],
                         force: bool):
    """
    Creates a kapture proxy directory based on  another one,
     and optionally gathering multiple source of reconstruction data.
    It heavily uses symlinks to minimize the amount of copied data.
    A source kapture directory is mandatory for the sensors part of kapture.
    All other reconstruction data, if given, are added.

    :param output_path: root path where to save the proxy. It will be cleaned if already exists.
    :param source_path: root path of the input kapture directory containing sensors.
    :param keypoints_paths: paths to keypoints root directory. Remapped to output_path/reconstruction/keypoints
    :param descriptors_paths: paths to descriptors root directory. Remapped to output_path/reconstruction/descriptors
    :param global_features_paths: paths to global features root directory.
                                 Remapped to output_path/reconstruction/global_features_path
    :param matches_paths: paths to matches root directory. Remapped to output_path/reconstruction/matches_path
    :param force: for to clean output (if needed) without user prompt.
    """
    if path.exists(output_path):
        safe_remove_any_path(output_path, force)
    assert not path.exists(output_path)
    os.makedirs(output_path)

    sensors_in_path = path.join(source_path, 'sensors')
    assert path.exists(sensors_in_path)

    sensors_out_path = path.join(output_path, 'sensors')
    absolute_symlink(sensors_in_path, sensors_out_path)

    reconstruction_out_path = path.join(output_path, 'reconstruction')
    os.makedirs(reconstruction_out_path)

    if keypoints_paths is not None and matches_paths is not None:
        assert len(keypoints_paths) == len(matches_paths)
    if keypoints_paths is not None and keypoints_types is not None:
        assert len(keypoints_paths) == len(keypoints_types)
    if matches_paths is not None and keypoints_types is not None:
        assert len(matches_paths) == len(keypoints_types)
    if descriptors_paths is not None and descriptors_types is not None:
        assert len(descriptors_paths) == len(descriptors_types)
    if global_features_paths is not None and global_features_types is not None:
        assert len(global_features_paths) == len(global_features_types)

    if keypoints_paths is not None:
        if keypoints_types is None:
            keypoints_types = [None for _ in range(len(keypoints_paths))]
        for i, keypoints_path in enumerate(keypoints_paths):
            if not keypoints_path:
                continue
            assert path.exists(keypoints_path)
            if keypoints_types[i] is None:
                keypoints_types[i] = guess_feature_name_from_path(
                    keypoints_path)
            os.makedirs(os.path.join(reconstruction_out_path, 'keypoints'),
                        exist_ok=True)
            absolute_symlink(
                keypoints_path,
                os.path.join(reconstruction_out_path, 'keypoints',
                             keypoints_types[i]))

    if descriptors_paths is not None:
        if descriptors_types is None:
            descriptors_types = [None for _ in range(len(descriptors_paths))]
        for i, descriptors_path in enumerate(descriptors_paths):
            assert path.exists(descriptors_path)
            if descriptors_types[i] is None:
                descriptors_types[i] = guess_feature_name_from_path(
                    descriptors_path)
            os.makedirs(os.path.join(reconstruction_out_path, 'descriptors'),
                        exist_ok=True)
            absolute_symlink(
                descriptors_path,
                os.path.join(reconstruction_out_path, 'descriptors',
                             descriptors_types[i]))

    if global_features_paths is not None:
        if global_features_types is None:
            global_features_types = [
                None for _ in range(len(global_features_paths))
            ]
        for i, global_features_path in enumerate(global_features_paths):
            assert path.exists(global_features_path)
            if global_features_types[i] is None:
                global_features_types[i] = guess_feature_name_from_path(
                    global_features_path)
            os.makedirs(os.path.join(reconstruction_out_path,
                                     'global_features'),
                        exist_ok=True)
            absolute_symlink(
                global_features_path,
                os.path.join(reconstruction_out_path, 'global_features',
                             global_features_types[i]))

    if matches_paths is not None:
        if keypoints_types is None:
            keypoints_types = [None for _ in range(len(matches_paths))]
        for i, matches_path in enumerate(matches_paths):
            if not matches_path:
                continue
            assert path.exists(matches_path)
            if keypoints_types[i] is None:
                keypoints_types[i] = guess_feature_name_from_path(matches_path)
            os.makedirs(os.path.join(reconstruction_out_path, 'matches'),
                        exist_ok=True)
            absolute_symlink(
                matches_path,
                os.path.join(reconstruction_out_path, 'matches',
                             keypoints_types[i]))
コード例 #8
0
#!/usr/bin/env python3
# Copyright 2020-present NAVER Corp. Under BSD 3-clause license
import os.path as path
import path_to_kapture_localization  # noqa: F401
import kapture_localization.utils.path_to_kapture  # noqa: F401
from kapture.utils.paths import safe_remove_any_path

HERE_PATH = path.normpath(path.dirname(__file__))
colmap_sfm_folder = path.join(HERE_PATH, 'colmap-sfm')
colmap_localization_folder = path.join(HERE_PATH, 'colmap-localization')
sift_colmap_vocab_tree_folder = path.join(HERE_PATH, 'sift_colmap_vocab_tree')
ir_bench_folder = path.join(HERE_PATH, 'image_retrieval_benchmark')

if path.isdir(colmap_sfm_folder):
    safe_remove_any_path(colmap_sfm_folder, force=False)
if path.isdir(colmap_localization_folder):
    safe_remove_any_path(colmap_localization_folder, force=False)
if path.isdir(sift_colmap_vocab_tree_folder):
    safe_remove_any_path(sift_colmap_vocab_tree_folder, force=False)
if path.isdir(ir_bench_folder):
    safe_remove_any_path(ir_bench_folder, force=False)

matches_no_gv_folder = path.join(HERE_PATH, 'local_features/r2d2_500/NN_no_gv')
if path.isdir(matches_no_gv_folder):
    safe_remove_any_path(matches_no_gv_folder, force=False)

matches_colmap_gv_folder = path.join(HERE_PATH,
                                     'local_features/r2d2_500/NN_colmap_gv')
if path.isdir(matches_colmap_gv_folder):
    safe_remove_any_path(matches_colmap_gv_folder, force=False)
コード例 #9
0
    def reconstruct(self, kapture_data):
        os.makedirs(self._colmap_path, exist_ok=True)

        if not (kapture_data.records_camera and kapture_data.sensors
                and kapture_data.keypoints and kapture_data.matches
                and kapture_data.trajectories):
            raise ValueError(
                'records_camera, sensors, keypoints, matches, trajectories are mandatory'
            )

        # Set fixed name for COLMAP database
        colmap_db_path = path.join(self._colmap_path, 'colmap.db')
        reconstruction_path = path.join(self._colmap_path, "reconstruction")
        priors_txt_path = path.join(self._colmap_path,
                                    "priors_for_reconstruction")

        safe_remove_file(colmap_db_path, True)
        safe_remove_any_path(reconstruction_path, True)
        safe_remove_any_path(priors_txt_path, True)
        os.makedirs(reconstruction_path, exist_ok=True)

        # COLMAP does not fully support rigs.
        print("Step 1. Remove rigs")
        if kapture_data.rigs is not None and kapture_data.trajectories is not None:
            # make sure, rigs are not used in trajectories.
            rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs)
            kapture_data.rigs.clear()

        print("Step 2. Kapture to colmap")
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.kapture_to_colmap(kapture_data,
                                         kapture_data.kapture_path,
                                         colmap_db,
                                         export_two_view_geometry=True)
        colmap_db.close()

        os.makedirs(priors_txt_path, exist_ok=True)

        print("Step 3. Generate priors for reconstruction")
        colmap_db = COLMAPDatabase.connect(colmap_db_path)
        database_extra.generate_priors_for_reconstruction(
            kapture_data, colmap_db, priors_txt_path)
        colmap_db.close()

        # Point triangulator
        print("Step 4. Point triangulator")
        reconstruction_path = path.join(self._colmap_path, "reconstruction")
        os.makedirs(reconstruction_path, exist_ok=True)
        run_point_triangulator(self._colmap_binary, colmap_db_path,
                               kapture_data.image_path, priors_txt_path,
                               reconstruction_path,
                               self._point_triangulator_options)
        print("Step 5. Model converter")
        run_model_converter(self._colmap_binary, reconstruction_path,
                            reconstruction_path)
        print("Step 5. Reconstruction import")
        points3d, observations = import_from_colmap_points3d_txt(
            os.path.join(reconstruction_path, "points3D.txt"),
            kapture_data.image_names)
        kapture_data.observations = observations
        kapture_data.points3d = points3d
コード例 #10
0
def local_sfm(map_plus_query_path: str, map_plus_query_gv_path: str,
              query_path: str, pairsfile_path: str, output_path_root: str,
              colmap_binary: str, force: bool):
    """
    Localize query images in a COLMAP model built from topk retrieved images.

    :param map_plus_query_path: path to the kapture data consisting of mapping and query data (sensors and reconstruction)
    :param map_plus_query_gv_path: path to the kapture data consisting of mapping and query data after geometric verification (sensors and reconstruction)
    :param query_path: path to the query kapture data (sensors)
    :param pairsfile_path: path to the pairsfile that contains the topk retrieved mapping images for each query image
    :param output_path_root: root path where outputs should be stored
    :param colmap_binary: path to the COLMAP binary
    :param force: silently overwrite already existing results
    """

    # load query kapture (we use query kapture to reuse sensor_ids etc.)
    kdata_query = kapture_from_dir(query_path)
    if kdata_query.trajectories:
        logger.warning(
            "Query data contains trajectories: they will be ignored")
        kdata_query.trajectories.clear()
    else:
        kdata_query.trajectories = kapture.Trajectories()

    # load output kapture
    output_path = os.path.join(output_path_root, 'localized')
    if os.path.exists(os.path.join(output_path, 'sensors/trajectories.txt')):
        kdata_output = kapture_from_dir(output_path)
        if kdata_query.records_camera == kdata_output.records_camera and len(
                kdata_output.trajectories) != 0 and not force:
            kdata_query.trajectories = kdata_output.trajectories

    # load kapture maps
    kdata_map = kapture_from_dir(map_plus_query_path)
    if kdata_map.rigs != None:
        rigs_remove_inplace(kdata_map.trajectories, kdata_map.rigs)
    kdata_map_gv = kapture_from_dir(map_plus_query_gv_path)
    if kdata_map_gv.rigs != None:
        rigs_remove_inplace(kdata_map_gv.trajectories, kdata_map_gv.rigs)

    # load pairsfile
    pairs = {}
    with open(pairsfile_path, 'r') as fid:
        table = table_from_file(fid)
        for img_query, img_map, score in table:
            if not img_query in pairs:
                pairs[img_query] = []
            pairs[img_query].append(img_map)

    kdata_sub_colmap_path = os.path.join(output_path_root, 'colmap')
    kdata_reg_query_path = os.path.join(output_path_root, 'query_registered')
    sub_kapture_pairsfile_path = os.path.join(output_path_root,
                                              'tmp_pairs_map.txt')
    query_img_kapture_pairsfile_path = os.path.join(output_path_root,
                                                    'tmp_pairs_query.txt')

    # loop over query images
    for img_query, img_list_map in pairs.items():
        if pose_found(kdata_query, img_query):
            logger.info(f'{img_query} already processed, skipping...')
            continue
        else:
            logger.info(f'processing {img_query}')

        # write pairsfile for sub-kapture
        map_pairs = write_pairfile_from_img_list(img_list_map,
                                                 sub_kapture_pairsfile_path)

        # write pairsfile for query_img_kapture
        query_pairs = write_pairfile_img_vs_img_list(
            img_query, img_list_map, query_img_kapture_pairsfile_path)

        # create sub-kapture
        kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path,
                                              img_list_map, map_pairs)
        kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv,
                                                 map_plus_query_gv_path,
                                                 img_list_map, map_pairs)

        # match missing pairs for mapping
        compute_matches_from_loaded_data(map_plus_query_path, kdata_sub,
                                         map_pairs)

        # kdata_sub needs to be re-created to add the new matches
        kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path,
                                              img_list_map, map_pairs)

        # run colmap gv on missing pairs
        if len(kdata_sub.matches) != len(kdata_sub_gv.matches):
            run_colmap_gv_from_loaded_data(kdata_sub, kdata_sub_gv,
                                           map_plus_query_path,
                                           map_plus_query_gv_path,
                                           colmap_binary, [], True)
            # kdata_sub_gv needs to be re-created to add the new matches
            kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv,
                                                     map_plus_query_gv_path,
                                                     img_list_map, map_pairs)

        # sanity check
        if len(map_pairs) != len(kdata_sub_gv.matches):
            logger.info(f'not all mapping matches available')

        # build COLMAP map
        try:
            colmap_build_map_from_loaded_data(kdata_sub_gv,
                                              map_plus_query_gv_path,
                                              kdata_sub_colmap_path,
                                              colmap_binary, False, [],
                                              ['model_converter'], True)
        except ValueError:
            logger.info(f'{img_query} was not localized')
            continue

        if not os.path.exists(
                os.path.join(kdata_sub_colmap_path,
                             'reconstruction/images.bin')):
            logger.info(
                f'colmap mapping for {img_query} did not work, image was not localized'
            )
            continue

        # create single image kapture (kdata_sub needs to be recreated because descriptors are deleted in build_colmap_model)
        kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path,
                                              img_list_map, map_pairs)
        kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv,
                                                 map_plus_query_gv_path,
                                                 img_list_map, map_pairs)
        query_img_kapture = add_image_to_kapture(kdata_map,
                                                 map_plus_query_path,
                                                 kdata_sub, img_query,
                                                 query_pairs)
        query_img_kapture_gv = add_image_to_kapture(kdata_map_gv,
                                                    map_plus_query_gv_path,
                                                    kdata_sub_gv, img_query,
                                                    query_pairs)

        # match missing pairs for localization
        compute_matches_from_loaded_data(map_plus_query_path,
                                         query_img_kapture, query_pairs)

        # query_img_kapture needs to be re-created to add the new matches
        query_img_kapture = add_image_to_kapture(kdata_map,
                                                 map_plus_query_path,
                                                 kdata_sub, img_query,
                                                 query_pairs)

        # run colmap gv on missing pairs
        if len(query_img_kapture.matches) != len(query_img_kapture_gv.matches):
            run_colmap_gv_from_loaded_data(query_img_kapture,
                                           query_img_kapture_gv,
                                           map_plus_query_path,
                                           map_plus_query_gv_path,
                                           colmap_binary, [], True)
            # query_img_kapture_gv needs to be re-created to add the new matches
            query_img_kapture_gv = add_image_to_kapture(
                kdata_map_gv, map_plus_query_gv_path, kdata_sub_gv, img_query,
                query_pairs)

        # sanity check
        if len(query_pairs) != len(query_img_kapture_gv.matches):
            logger.info(f'not all query matches available')

        # localize in COLMAP map
        try:
            colmap_localize_from_loaded_data(
                query_img_kapture_gv, map_plus_query_gv_path,
                os.path.join(kdata_sub_colmap_path, 'registered'),
                os.path.join(kdata_sub_colmap_path, 'colmap.db'),
                os.path.join(kdata_sub_colmap_path, 'reconstruction'),
                colmap_binary, False, [
                    '--Mapper.ba_refine_focal_length', '0',
                    '--Mapper.ba_refine_principal_point', '0',
                    '--Mapper.ba_refine_extra_params', '0',
                    '--Mapper.min_num_matches', '4',
                    '--Mapper.init_min_num_inliers', '4',
                    '--Mapper.abs_pose_min_num_inliers', '4',
                    '--Mapper.abs_pose_min_inlier_ratio', '0.05',
                    '--Mapper.ba_local_max_num_iterations', '50',
                    '--Mapper.abs_pose_max_error', '20',
                    '--Mapper.filter_max_reproj_error', '12'
                ], [], True)
        except ValueError:
            logger.info(f'{img_query} was not localized')
            continue

        if not os.path.exists(
                os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                             'reconstruction/images.txt')):
            logger.info(
                f'colmap localization of {img_query} did not work, image was not localized'
            )
            continue

        # add to results kapture
        kdata_reg_query = import_colmap(
            kdata_reg_query_path,
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                         'colmap.db'),
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                         'reconstruction'), None, None, True, True, True,
            TransferAction.skip)

        if add_pose_to_query_kapture(kdata_reg_query, kdata_query, img_query):
            logger.info('successfully localized')

        # write results (after each image to see the progress)
        kapture_to_dir(output_path, kdata_query)

    # clean up (e.g. remove temporal files and folders)
    safe_remove_any_path(kdata_sub_colmap_path, True)
    safe_remove_any_path(kdata_reg_query_path, True)
    safe_remove_file(sub_kapture_pairsfile_path, True)
    safe_remove_file(query_img_kapture_pairsfile_path, True)

    logger.info('all done')
コード例 #11
0
def local_sfm_from_loaded_data(kdata_map: kapture.Kapture,
                               kdata_map_gv: kapture.Kapture,
                               kdata_query: kapture.Kapture,
                               map_plus_query_path: str,
                               map_plus_query_gv_path: str,
                               tar_handlers_map: Optional[TarCollection],
                               tar_handlers_map_gv: Optional[TarCollection],
                               descriptors_type: Optional[str],
                               pairsfile_path: str,
                               output_path_root: str,
                               colmap_binary: str,
                               force: bool):
    """
    Localize query images in a COLMAP model built from topk retrieved images.

    :param map_plus_query_path: path to the kapture data consisting of mapping and query data (sensors and reconstruction)
    :param map_plus_query_gv_path: path to the kapture data consisting of mapping and query data after geometric verification (sensors and reconstruction)
    :param query_path: path to the query kapture data (sensors)
    :param descriptors_type: type of descriptors, name of the descriptors subfolder
    :param pairsfile_path: path to the pairsfile that contains the topk retrieved mapping images for each query image
    :param output_path_root: root path where outputs should be stored
    :param colmap_binary: path to the COLMAP binary
    :param force: silently overwrite already existing results
    """

    # load query kapture (we use query kapture to reuse sensor_ids etc.)
    if kdata_query.trajectories:
        logger.warning("Query data contains trajectories: they will be ignored")
        kdata_query.trajectories.clear()
    else:
        kdata_query.trajectories = kapture.Trajectories()

    # clear query trajectories in map_plus_query
    kdata_map_cleared_trajectories = kapture.Trajectories()
    query_image_list = set(kdata_query.records_camera.data_list())
    for timestamp, subdict in kdata_map.records_camera.items():
        for sensor_id, image_name in subdict.items():
            if image_name in query_image_list:
                continue
            if (timestamp, sensor_id) in kdata_map.trajectories:
                pose = kdata_map.trajectories.get(timestamp)[sensor_id]
                kdata_map_cleared_trajectories.setdefault(timestamp, {})[sensor_id] = pose
    kdata_map.trajectories = kdata_map_cleared_trajectories

    # load output kapture
    output_path = os.path.join(output_path_root, 'localized')
    if os.path.exists(os.path.join(output_path, 'sensors/trajectories.txt')):
        kdata_output = kapture_from_dir(output_path)
        if kdata_query.records_camera == kdata_output.records_camera and len(
                kdata_output.trajectories) != 0 and not force:
            kdata_query.trajectories = kdata_output.trajectories

    if kdata_map.rigs is not None:
        rigs_remove_inplace(kdata_map.trajectories, kdata_map.rigs)
    if kdata_map_gv.rigs is not None:
        rigs_remove_inplace(kdata_map_gv.trajectories, kdata_map_gv.rigs)

    # load pairsfile
    pairs = {}
    with open(pairsfile_path, 'r') as fid:
        table = table_from_file(fid)
        for img_query, img_map, _ in table:
            if img_query not in pairs:
                pairs[img_query] = []
            pairs[img_query].append(img_map)

    kdata_sub_colmap_path = os.path.join(output_path_root, 'colmap')
    kdata_reg_query_path = os.path.join(output_path_root, 'query_registered')
    sub_kapture_pairsfile_path = os.path.join(output_path_root, 'tmp_pairs.txt')

    if descriptors_type is None:
        descriptors_type = try_get_only_key_from_collection(kdata_map.descriptors)
    assert descriptors_type is not None
    assert descriptors_type in kdata_map.descriptors
    keypoints_type = kdata_map.descriptors[descriptors_type].keypoints_type

    # init matches for kdata_map and kdata_map_gv
    if kdata_map.matches is None:
        kdata_map.matches = {}
    if keypoints_type not in kdata_map.matches:
        kdata_map.matches[keypoints_type] = kapture.Matches()
    if kdata_map_gv.matches is None:
        kdata_map_gv.matches = {}
    if keypoints_type not in kdata_map_gv.matches:
        kdata_map_gv.matches[keypoints_type] = kapture.Matches()

    # run all matching
    # loop over query images
    img_skip_list = set()
    for img_query, img_list_map in pairs.items():
        if pose_found(kdata_query, img_query):
            logger.info(f'{img_query} already processed, skipping...')
            img_skip_list.add(img_query)
            continue
        else:
            map_pairs = get_pairfile_from_img_list(img_list_map)
            query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'matching for {img_query}')
                table_to_file(fid, map_pairs)
                table_to_file(fid, query_pairs)

            pairs_all = map_pairs + query_pairs
            pairs_all = [(i, j) for i, j, _ in pairs_all]
            # match missing pairs
            # kdata_map.matches is being updated by compute_matches_from_loaded_data
            compute_matches_from_loaded_data(map_plus_query_path,
                                             tar_handlers_map,
                                             kdata_map,
                                             descriptors_type,
                                             pairs_all)

    # if kdata_map have matches in tar, they need to be switched to read mode
    matches_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map)
    if matches_handler is not None:
        matches_handler.close()
        tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_path)
        tar_handlers_map.matches[keypoints_type] = TarHandler(tarfile_path, 'r')

    # run all gv
    # loop over query images
    for img_query, img_list_map in pairs.items():
        if img_query in img_skip_list:
            continue
        else:
            # recompute the pairs
            map_pairs = get_pairfile_from_img_list(img_list_map)
            query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'geometric verification of {img_query}')
                table_to_file(fid, map_pairs)
                table_to_file(fid, query_pairs)

            pairs_all = map_pairs + query_pairs
            pairs_all = [(i, j) for i, j, _ in pairs_all]

            if all(pair in kdata_map_gv.matches[keypoints_type] for pair in pairs_all):
                continue

            # create a sub kapture in order to minimize the amount of data exported to colmap
            # kdata_sub needs to be re-created to add the new matches
            kdata_sub = sub_kapture_from_img_list(kdata_map, img_list_map + [img_query], pairs_all,
                                                  keypoints_type, descriptors_type)

            kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map + [img_query], pairs_all,
                                                     keypoints_type, descriptors_type)
            # run colmap gv on missing pairs
            run_colmap_gv_from_loaded_data(kdata_sub,
                                           kdata_sub_gv,
                                           map_plus_query_path,
                                           map_plus_query_gv_path,
                                           tar_handlers_map,
                                           tar_handlers_map_gv,
                                           colmap_binary,
                                           keypoints_type,
                                           [],
                                           True)
            # update kdata_map_gv.matches
            kdata_map_gv.matches[keypoints_type].update(kdata_sub_gv.matches[keypoints_type])

    # if kdata_map_gv have matches in tar, they need to be switched to read mode
    matches_gv_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map_gv)
    if matches_gv_handler is not None:
        print(matches_gv_handler)
        matches_gv_handler.close()
        tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_gv_path)
        tar_handlers_map_gv.matches[keypoints_type] = TarHandler(tarfile_path, 'r')

    # loop over query images
    for img_query, img_list_map in pairs.items():
        if img_query in img_skip_list:
            continue
        else:
            map_pairs = get_pairfile_from_img_list(img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'mapping and localization for {img_query}')
                table_to_file(fid, map_pairs)
            map_pairs = [(i, j) for i, j, _ in map_pairs]
            kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map, map_pairs,
                                                     keypoints_type, descriptors_type)
            # sanity check
            if len(map_pairs) != len(kdata_sub_gv.matches[keypoints_type]):
                logger.info(f'not all mapping matches available')

            # build COLMAP map
            try:
                colmap_build_map_from_loaded_data(
                    kdata_sub_gv,
                    map_plus_query_gv_path,
                    tar_handlers_map_gv,
                    kdata_sub_colmap_path,
                    colmap_binary,
                    keypoints_type,
                    False,
                    [],
                    ['model_converter'],
                    True)
            except ValueError:
                logger.info(f'{img_query} was not localized')
                continue

        if not os.path.exists(os.path.join(kdata_sub_colmap_path, 'reconstruction/images.bin')):
            logger.info(f'colmap mapping for {img_query} did not work, image was not localized')
            continue

        query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
        with open(sub_kapture_pairsfile_path, 'w') as fid:
            table_to_file(fid, query_pairs)
        query_pairs = [(i, j) for i, j, _ in query_pairs]
        query_img_kapture_gv = add_image_to_kapture(kdata_map_gv,
                                                    kdata_sub_gv, img_query, query_pairs,
                                                    keypoints_type, descriptors_type)
        # sanity check
        if len(query_pairs) != len(query_img_kapture_gv.matches[keypoints_type]):
            logger.info(f'not all query matches available')

        # localize in COLMAP map
        try:
            colmap_localize_from_loaded_data(
                query_img_kapture_gv,
                map_plus_query_gv_path,
                tar_handlers_map_gv,
                os.path.join(kdata_sub_colmap_path, 'registered'),
                os.path.join(kdata_sub_colmap_path, 'colmap.db'),
                os.path.join(kdata_sub_colmap_path, 'reconstruction'),
                colmap_binary,
                keypoints_type,
                False,
                ['--Mapper.ba_refine_focal_length', '0',
                 '--Mapper.ba_refine_principal_point', '0',
                 '--Mapper.ba_refine_extra_params', '0',
                 '--Mapper.min_num_matches', '4',
                 '--Mapper.init_min_num_inliers', '4',
                 '--Mapper.abs_pose_min_num_inliers', '4',
                 '--Mapper.abs_pose_min_inlier_ratio', '0.05',
                 '--Mapper.ba_local_max_num_iterations', '50',
                 '--Mapper.abs_pose_max_error', '20',
                 '--Mapper.filter_max_reproj_error', '12'],
                [],
                True)
        except ValueError:
            logger.info(f'{img_query} was not localized')
            continue

        if not os.path.exists(os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                                           'reconstruction/images.txt')):
            logger.info(f'colmap localization of {img_query} did not work, image was not localized')
            continue

        # add to results kapture
        kdata_reg_query = import_colmap(
            kdata_reg_query_path,
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'colmap.db'),
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                         'reconstruction'),
            None,
            None,
            True,
            True,
            True,
            TransferAction.skip)

        if add_pose_to_query_kapture(kdata_reg_query, kdata_query, img_query):
            logger.info('successfully localized')

        # write results (after each image to see the progress)
        kapture_to_dir(output_path, kdata_query)

    # clean up (e.g. remove temporal files and folders)
    safe_remove_any_path(kdata_sub_colmap_path, True)
    safe_remove_any_path(kdata_reg_query_path, True)
    safe_remove_file(sub_kapture_pairsfile_path, True)

    logger.info('all done')