コード例 #1
0
def compute_sequence_pairs(mapping_path: str, output_path: str,
                           window_size: int, loop: bool, expand_window: bool,
                           max_interval: int):
    """
    compute image pairs from sequence, and write the result in a text file
    """
    skip_heavy = [
        kapture.RecordsLidar, kapture.RecordsWifi, kapture.Keypoints,
        kapture.Descriptors, kapture.GlobalFeatures, kapture.Matches,
        kapture.Points3d, kapture.Observations
    ]

    logger.info(f'compute_sequence_pairs. loading mapping: {mapping_path}')
    kdata = kapture_from_dir(mapping_path, skip_list=skip_heavy)
    assert kdata.sensors is not None
    assert kdata.records_camera is not None

    os.umask(0o002)
    p = pathlib.Path(output_path)
    os.makedirs(str(p.parent.resolve()), exist_ok=True)

    with open(output_path, 'w') as fid:
        image_pairs = get_pairs_sequence(kdata, window_size, loop,
                                         expand_window, max_interval)
        table_to_file(fid,
                      image_pairs,
                      header='# query_image, map_image, score')
    logger.info('all done')
コード例 #2
0
def compute_observations_pairs(mapping_path: str,
                               query_path: Optional[str],
                               output_path: str,
                               topk: int,
                               keypoints_type: Optional[str],
                               iou: bool,
                               max_number_of_threads: Optional[int] = None):
    """
    compute image pairs from observations, and write the result in a text file
    """
    skip_heavy_features = [
        kapture.Descriptors, kapture.GlobalFeatures, kapture.Matches
    ]
    skip_heavy = [kapture.RecordsLidar, kapture.RecordsWifi
                  ] + skip_heavy_features

    logger.info(f'compute_observations_pairs. loading mapping: {mapping_path}')
    # the content of the keypoints is not important, we do not need to keep a reference to the tar
    with get_all_tar_handlers(mapping_path,
                              skip_list=skip_heavy_features) as tar_handlers:
        kdata = kapture_from_dir(mapping_path,
                                 skip_list=skip_heavy,
                                 tar_handlers=tar_handlers)
    assert kdata.sensors is not None
    assert kdata.records_camera is not None
    if keypoints_type is None:
        keypoints_type = try_get_only_key_from_collection(kdata.keypoints)
    assert keypoints_type is not None
    assert kdata.observations is not None
    assert kdata.keypoints is not None
    assert keypoints_type in kdata.keypoints
    assert kdata.points3d is not None

    if query_path is None or mapping_path == query_path:
        logger.info('computing mapping pairs from observations...')
        kdata_query = None
    else:
        logger.info('computing query pairs from observations...')
        with get_all_tar_handlers(
                query_path,
                skip_list=skip_heavy_features) as query_tar_handlers:
            kdata_query = kapture_from_dir(query_path,
                                           skip_list=skip_heavy,
                                           tar_handlers=query_tar_handlers)
        assert kdata_query.sensors is not None
        assert kdata_query.records_camera is not None

    os.umask(0o002)
    p = pathlib.Path(output_path)
    os.makedirs(str(p.parent.resolve()), exist_ok=True)

    with open(output_path, 'w') as fid:
        image_pairs = get_pairs_observations(kdata, kdata_query,
                                             keypoints_type,
                                             max_number_of_threads, iou, topk)
        table_to_file(fid,
                      image_pairs,
                      header='# query_image, map_image, score')
    logger.info('all done')
コード例 #3
0
def pairsfile_fusion(input_path: List[str], output_path: str,
                     topk: Optional[int], method: LateFusionMethod,
                     additional_parameters: dict):
    """
    fuse pairsfile scores and write a pairsfile with the fused scores
    """
    assert len(input_path) > 1

    logger.info(f'pairsfile_fusion. loading {input_path}')

    similarity_dicts: List[dict] = []
    for file_path in input_path:
        loaded_pairs = get_ordered_pairs_from_file(file_path)
        similarity_dicts.append(loaded_pairs)

    if method == LateFusionMethod.round_robin:
        image_pairs = round_robin_from_similarity_dicts(similarity_dicts, topk)
    else:
        pairs = {}
        for loaded_pairs in similarity_dicts:
            for query_name, pairlist in loaded_pairs.items():
                for map_name, score in pairlist:
                    pairtuple = (query_name, map_name)
                    if pairtuple not in pairs:
                        pairs[pairtuple] = []
                    pairs[pairtuple].append(score)

        # keep entries with correct count
        similarity_dict = {}
        for pairtuple, scores in pairs.items():
            (query_name, map_name) = pairtuple
            if len(scores) != len(similarity_dicts):
                logger.warning(
                    f'pair {pairtuple} did not have a line in all pairsfile, skipped'
                )
                continue
            scores_as_matrices = [
                np.array([[score]], dtype=np.float64) for score in scores
            ]
            final_score = fuse_similarities(scores_as_matrices, method,
                                            additional_parameters)[0, 0]
            if query_name not in similarity_dict:
                similarity_dict[query_name] = []
            similarity_dict[query_name].append((map_name, final_score))
        image_pairs = get_image_pairs(similarity_dict, topk)

    logger.info('saving to file ...')
    os.umask(0o002)
    p = pathlib.Path(output_path)
    os.makedirs(str(p.parent.resolve()), exist_ok=True)
    with open(output_path, 'w') as fid:
        table_to_file(fid,
                      image_pairs,
                      header='# query_image, map_image, score')
    logger.info('all done')
コード例 #4
0
def slice_pairsfile(pairsfile_path: str, output_path: str, topk: int,
                    threshold: float, startk: int, skip_if_na: bool):
    logger.info('slice_pairsfile...')
    similarity_dict = get_ordered_pairs_from_file(pairsfile_path)

    # apply topk override + skip_if_na
    image_pairs = []
    for name_query, paired_images in sorted(similarity_dict.items()):
        paired_images_threshold = [
            x for x in paired_images if x[1] >= threshold
        ]
        if math.isfinite(
                topk) and startk + topk > len(paired_images_threshold):
            logger.debug(
                f'image {name_query} has {len(paired_images_threshold)} pairs, '
                f'less than topk={topk} (with startk={startk})')
            if skip_if_na:
                logger.debug(f'skipping {name_query}')
                continue
        if math.isinf(topk):
            paired_images_threshold = paired_images_threshold[startk:]
        else:
            paired_images_threshold = paired_images_threshold[startk:startk +
                                                              topk]
        for name_map, score in paired_images_threshold:
            image_pairs.append((name_query, name_map, score))

    if len(image_pairs) > 0:
        os.umask(0o002)
        p = pathlib.Path(output_path)
        os.makedirs(str(p.parent.resolve()), exist_ok=True)
        with open(output_path, 'w') as fid:
            table_to_file(fid,
                          image_pairs,
                          header='# query_image, map_image, score')
    else:
        logger.info('no pairs written')
    logger.info('all done')
def _image_retrieval_late_fusion_from_loaded_data(
        input_path: str, map_tar_handlers: TarCollection,
        kdata_map: kapture.Kapture, query_path: str,
        query_tar_handlers: TarCollection, kdata_query: kapture.Kapture,
        global_features_types: List[str], output_path: str,
        topk: Optional[int], method: LateFusionMethod,
        additional_parameters: dict):
    image_list_map = [
        name for _, _, name in kapture.flatten(kdata_map.records_camera,
                                               is_sorted=True)
    ]
    image_list_query = [
        name for _, _, name in kapture.flatten(kdata_query.records_camera,
                                               is_sorted=True)
    ]

    if len(global_features_types) == 0:
        global_features_types = list(
            set(kdata_map.global_features.keys()).intersection(
                kdata_query.global_features.keys()))

    similarity_matrices = []
    stacked_query_index = None
    stacked_map_index = None

    for global_features_type in global_features_types:
        if global_features_type not in kdata_map.global_features:
            logger.warning(
                f'could not use {global_features_type}, it was missing in kdata_map'
            )
            continue
        if global_features_type not in kdata_query.global_features:
            logger.warning(
                f'could not use {global_features_type}, it was missing in kdata_query'
            )
            continue
        mapping_gfeats = kdata_map.global_features[global_features_type]
        query_gfeats = kdata_query.global_features[global_features_type]
        assert mapping_gfeats.dtype == query_gfeats.dtype
        assert mapping_gfeats.dsize == query_gfeats.dsize
        assert mapping_gfeats.metric_type == query_gfeats.metric_type

        global_features_config = GlobalFeaturesConfig(
            mapping_gfeats.type_name, mapping_gfeats.dtype,
            mapping_gfeats.dsize, mapping_gfeats.metric_type)

        # force the same order for all global features
        mapping_global_features_to_filepaths = [
            (image_filename,
             get_features_fullpath(kapture.GlobalFeatures,
                                   global_features_type, input_path,
                                   image_filename, map_tar_handlers))
            for image_filename in image_list_map
        ]
        mapping_stacked_features = stack_global_features(
            global_features_config, mapping_global_features_to_filepaths)

        if input_path == query_path:
            query_stacked_features = mapping_stacked_features
        else:
            query_global_features_to_filepaths = [
                (image_filename,
                 get_features_fullpath(kapture.GlobalFeatures,
                                       global_features_type, query_path,
                                       image_filename, query_tar_handlers))
                for image_filename in image_list_query
            ]
            query_stacked_features = stack_global_features(
                global_features_config, query_global_features_to_filepaths)

        # additional step to really make sure the order or the matrix is the same, and to remember it
        if stacked_map_index is None:
            stacked_map_index = mapping_stacked_features.index
        else:
            assert stacked_map_index.tolist(
            ) == mapping_stacked_features.index.tolist()

        if stacked_query_index is None:
            stacked_query_index = query_stacked_features.index
        else:
            assert stacked_query_index.tolist(
            ) == query_stacked_features.index.tolist()

        similarity_matrices.append(
            get_similarity_matrix(query_stacked_features,
                                  mapping_stacked_features))

    if method == LateFusionMethod.round_robin:
        logger.info('Compute fused similarity from round_robin')
        similarity_dicts = [
            get_similarity_dict_from_similarity_matrix(similarity,
                                                       stacked_query_index,
                                                       stacked_map_index)
            for similarity in similarity_matrices
        ]
        image_pairs = round_robin_from_similarity_dicts(similarity_dicts, topk)
    else:
        logger.info(f'Compute fused similarity from {method.value} ...')
        similarity = fuse_similarities(similarity_matrices, method,
                                       additional_parameters)
        similarity_dict = get_similarity_dict_from_similarity_matrix(
            similarity, stacked_query_index, stacked_map_index)
        image_pairs = get_image_pairs(similarity_dict, topk)

    logger.info('saving to file ...')
    os.umask(0o002)
    p = pathlib.Path(output_path)
    os.makedirs(str(p.parent.resolve()), exist_ok=True)
    with open(output_path, 'w') as fid:
        table_to_file(fid,
                      image_pairs,
                      header='# query_image, map_image, score')
    logger.info('all done')
コード例 #6
0
def compute_image_pairs(mapping_path: str,
                        query_path: str,
                        output_path: str,
                        topk: int):
    """
    compute image pairs between query -> mapping from global features, and write the result in a text file

    :param mapping_path: input path to kapture input root directory
    :type mapping_path: str
    :param query_path: input path to a kapture root directory
    :type query_path: str
    :param output_path: output path to pairsfile
    :type output_path: str
    :param topk: the max number of top retained images
    :type topk: int
    """
    logger.info(f'compute_image_pairs. loading mapping: {mapping_path}')
    kdata_mapping = kapture_from_dir(mapping_path, None, skip_list=[kapture.Keypoints,
                                                                    kapture.Descriptors,
                                                                    kapture.Matches,
                                                                    kapture.Observations,
                                                                    kapture.Points3d])
    assert kdata_mapping.sensors is not None
    assert kdata_mapping.records_camera is not None
    assert kdata_mapping.global_features is not None

    if mapping_path == query_path:
        kdata_query = kdata_mapping
    else:
        logger.info(f'compute_image_pairs. loading query: {query_path}')
        kdata_query = kapture_from_dir(query_path, None, skip_list=[kapture.Keypoints,
                                                                    kapture.Descriptors,
                                                                    kapture.Matches,
                                                                    kapture.Observations,
                                                                    kapture.Points3d])
        assert kdata_query.sensors is not None
        assert kdata_query.records_camera is not None
        assert kdata_query.global_features is not None

    assert kdata_mapping.global_features is not None
    assert kdata_query.global_features is not None
    assert kdata_mapping.global_features.type_name == kdata_query.global_features.type_name
    assert kdata_mapping.global_features.dtype == kdata_query.global_features.dtype
    assert kdata_mapping.global_features.dsize == kdata_query.global_features.dsize
    global_features_config = ImageFeatureConfig(kdata_mapping.global_features.type_name,
                                                kdata_mapping.global_features.dtype,
                                                kdata_mapping.global_features.dsize)

    logger.info(f'computing pairs from with {kdata_mapping.global_features.type_name}...')

    mapping_global_features_to_filepaths = global_features_to_filepaths(kdata_mapping.global_features,
                                                                        mapping_path)
    mapping_list = list(kapture.flatten(mapping_global_features_to_filepaths, is_sorted=True))
    mapping_stacked_features = stack_global_features(global_features_config, mapping_list)

    if mapping_path == query_path:
        query_stacked_features = mapping_stacked_features
    else:
        query_global_features_to_filepaths = global_features_to_filepaths(kdata_query.global_features,
                                                                          query_path)
        query_list = list(kapture.flatten(query_global_features_to_filepaths, is_sorted=True))
        query_stacked_features = stack_global_features(global_features_config, query_list)

    similarity = get_similarity(query_stacked_features, mapping_stacked_features)

    # get list of image pairs
    image_pairs = get_image_pairs(similarity, topk)

    logger.info('saving to file  ...')
    p = pathlib.Path(output_path)
    os.makedirs(str(p.parent.resolve()), exist_ok=True)
    with open(output_path, 'w') as fid:
        table_to_file(fid, image_pairs, header='# query_image, map_image, score')
    logger.info('all done')
コード例 #7
0
def compute_distance_pairs(mapping_path: str, query_path: Optional[str],
                           output_path: str, topk: int, block_size: int,
                           min_distance: float, max_distance: float,
                           max_angle: float, keep_rejected: bool):
    """
    compute image pairs from distance, and write the result in a text file
    """
    skip_heavy = [
        kapture.RecordsLidar, kapture.RecordsWifi, kapture.Keypoints,
        kapture.Descriptors, kapture.GlobalFeatures, kapture.Matches,
        kapture.Points3d, kapture.Observations
    ]

    logger.info(f'compute_distance_pairs. loading mapping: {mapping_path}')
    kdata = kapture_from_dir(mapping_path, skip_list=skip_heavy)
    assert kdata.sensors is not None
    assert kdata.records_camera is not None
    assert kdata.trajectories is not None

    if query_path is None or mapping_path == query_path:
        logger.info('computing mapping pairs from distance...')
        kdata_query = None
    else:
        logger.info('computing query pairs from distance...')
        kdata_query = kapture_from_dir(query_path, skip_list=skip_heavy)
        assert kdata_query.sensors is not None
        assert kdata_query.records_camera is not None
        assert kdata_query.trajectories is not None

    os.umask(0o002)
    p = pathlib.Path(output_path)
    os.makedirs(str(p.parent.resolve()), exist_ok=True)

    with open(output_path, 'w') as fid:
        if kdata_query is None:
            kdata_query = kdata
        if kdata_query.rigs is not None:
            assert kdata_query.trajectories is not None  # for ide
            kapture.rigs_remove_inplace(kdata_query.trajectories,
                                        kdata_query.rigs)
        records_camera_list = [
            k for k in sorted(kapture.flatten(kdata_query.records_camera),
                              key=lambda x: x[2])
        ]
        number_of_iteration = math.ceil(len(records_camera_list) / block_size)
        table_to_file(fid, [], header='# query_image, map_image, score')
        for i in tqdm(range(number_of_iteration),
                      disable=logging.getLogger().level >= logging.CRITICAL):
            sliced_records = kapture.RecordsCamera()
            for ts, sensor_id, img_name in records_camera_list[i *
                                                               block_size:(i +
                                                                           1) *
                                                               block_size]:
                if (ts, sensor_id) not in kdata_query.trajectories:
                    continue
                sliced_records[(ts, sensor_id)] = img_name
            kdata_slice_query = kapture.Kapture(
                sensors=kdata_query.sensors,
                records_camera=sliced_records,
                trajectories=kdata_query.trajectories)
            image_pairs = get_pairs_distance(kdata, kdata_slice_query, topk,
                                             min_distance, max_distance,
                                             max_angle, keep_rejected)
            table_to_file(fid, image_pairs)
    logger.info('all done')
コード例 #8
0
def compute_image_pairs(mapping_path: str, query_path: str, output_path: str,
                        topk: int):
    """
    compute image pairs between query -> mapping from global features, and write the result in a text file

    :param mapping_path: input path to kapture input root directory
    :type mapping_path: str
    :param query_path: input path to a kapture root directory
    :type query_path: str
    :param output_path: output path to pairsfile
    :type output_path: str
    :param topk: the max number of top retained images
    :type topk: int
    """
    logger.info(f'compute_image_pairs. loading mapping: {mapping_path}')
    kdata_mapping = kapture_from_dir(mapping_path)
    assert kdata_mapping.sensors is not None
    assert kdata_mapping.records_camera is not None
    assert kdata_mapping.global_features is not None

    if mapping_path == query_path:
        kdata_query = kdata_mapping
    else:
        logger.info(f'compute_image_pairs. loading query: {query_path}')
        kdata_query = kapture_from_dir(query_path)
        assert kdata_query.sensors is not None
        assert kdata_query.records_camera is not None
        assert kdata_query.global_features is not None

    assert kdata_mapping.global_features is not None
    assert kdata_query.global_features is not None
    assert kdata_mapping.global_features.type_name == kdata_query.global_features.type_name
    assert kdata_mapping.global_features.dtype == kdata_query.global_features.dtype
    assert kdata_mapping.global_features.dsize == kdata_query.global_features.dsize
    global_features_config = ImageFeatureConfig(
        kdata_mapping.global_features.type_name,
        kdata_mapping.global_features.dtype,
        kdata_mapping.global_features.dsize)

    logger.info(
        f'computing pairs from with {kdata_mapping.global_features.type_name}...'
    )

    mapping_global_features_to_filepaths = global_features_to_filepaths(
        kdata_mapping.global_features, mapping_path)
    mapping_list = list(
        kapture.flatten(mapping_global_features_to_filepaths, is_sorted=True))
    mapping_indexes, mapping_features = stack_global_features(
        global_features_config, mapping_list)

    if mapping_path == query_path:
        query_indexes, query_features = mapping_indexes, mapping_features
    else:
        query_global_features_to_filepaths = global_features_to_filepaths(
            kdata_query.global_features, query_path)
        query_list = list(
            kapture.flatten(query_global_features_to_filepaths,
                            is_sorted=True))
        query_indexes, query_features = stack_global_features(
            global_features_config, query_list)
    # compute similarity matrix
    similarity_matrix = query_features.dot(mapping_features.T)

    # convert similarity matrix to dictionary query_name -> sorted (high score first) list [(mapping_name, score), ...]
    similarity_dict = {}
    for i, line in enumerate(similarity_matrix):
        scores = line
        indexes = np.argsort(-scores)
        query_name = query_indexes[i]
        similarity_dict[query_name] = list(
            zip(mapping_indexes[indexes], scores[indexes]))

    # get list of image pairs
    image_pairs = []
    for query_image_name, images_to_match in sorted(similarity_dict.items()):
        for mapping_image_name, score in images_to_match[:topk]:
            image_pairs.append([query_image_name, mapping_image_name, score])

    logger.info('saving to file  ...')
    p = pathlib.Path(output_path)
    os.makedirs(str(p.parent.resolve()), exist_ok=True)
    with open(output_path, 'w') as fid:
        table_to_file(fid,
                      image_pairs,
                      header='# query_image, map_image, score')
    logger.info('all done')
コード例 #9
0
def compute_image_pairs(mapping_path: str, query_path: str, output_path: str,
                        global_features_type: Optional[str], topk: int):
    """
    compute image pairs between query -> mapping from global features, and write the result in a text file

    :param mapping_path: input path to kapture input root directory
    :type mapping_path: str
    :param query_path: input path to a kapture root directory
    :type query_path: str
    :param output_path: output path to pairsfile
    :type output_path: str
    :param global_features_type: type of global_features, name of the global_features subfolder
    :param topk: the max number of top retained images
    :type topk: int
    """
    logger.info(f'compute_image_pairs. loading mapping: {mapping_path}')
    with get_all_tar_handlers(mapping_path) as mapping_tar_handlers:
        kdata_mapping = kapture_from_dir(mapping_path,
                                         None,
                                         skip_list=[
                                             kapture.Keypoints,
                                             kapture.Descriptors,
                                             kapture.Matches,
                                             kapture.Observations,
                                             kapture.Points3d
                                         ],
                                         tar_handlers=mapping_tar_handlers)
        assert kdata_mapping.sensors is not None
        assert kdata_mapping.records_camera is not None
        assert kdata_mapping.global_features is not None
        if global_features_type is None:
            global_features_type = try_get_only_key_from_collection(
                kdata_mapping.global_features)
        assert global_features_type is not None
        assert global_features_type in kdata_mapping.global_features

        global_features_config = GlobalFeaturesConfig(
            kdata_mapping.global_features[global_features_type].type_name,
            kdata_mapping.global_features[global_features_type].dtype,
            kdata_mapping.global_features[global_features_type].dsize,
            kdata_mapping.global_features[global_features_type].metric_type)

        logger.info(f'computing pairs with {global_features_type}...')

        mapping_global_features_to_filepaths = global_features_to_filepaths(
            kdata_mapping.global_features[global_features_type],
            global_features_type, mapping_path, mapping_tar_handlers)
        mapping_list = list(
            sorted(mapping_global_features_to_filepaths.items()))
        mapping_stacked_features = stack_global_features(
            global_features_config, mapping_list)

    if mapping_path == query_path:
        kdata_query = kdata_mapping
        query_stacked_features = mapping_stacked_features
    else:
        logger.info(f'compute_image_pairs. loading query: {query_path}')
        with get_all_tar_handlers(query_path) as query_tar_handlers:
            kdata_query = kapture_from_dir(query_path,
                                           None,
                                           skip_list=[
                                               kapture.Keypoints,
                                               kapture.Descriptors,
                                               kapture.Matches,
                                               kapture.Observations,
                                               kapture.Points3d
                                           ],
                                           tar_handlers=query_tar_handlers)
            assert kdata_query.sensors is not None
            assert kdata_query.records_camera is not None
            assert kdata_query.global_features is not None
            assert global_features_type in kdata_query.global_features

            kdata_mapping_gfeat = kdata_mapping.global_features[
                global_features_type]
            kdata_query_gfeat = kdata_query.global_features[
                global_features_type]
            assert kdata_mapping_gfeat.type_name == kdata_query_gfeat.type_name
            assert kdata_mapping_gfeat.dtype == kdata_query_gfeat.dtype
            assert kdata_mapping_gfeat.dsize == kdata_query_gfeat.dsize

            query_global_features_to_filepaths = global_features_to_filepaths(
                kdata_query_gfeat, global_features_type, query_path,
                query_tar_handlers)
            query_list = list(
                sorted(query_global_features_to_filepaths.items()))
            query_stacked_features = stack_global_features(
                global_features_config, query_list)

    similarity = get_similarity(query_stacked_features,
                                mapping_stacked_features)

    # get list of image pairs
    image_pairs = get_image_pairs(similarity, topk)

    logger.info('saving to file  ...')
    p = pathlib.Path(output_path)
    os.makedirs(str(p.parent.resolve()), exist_ok=True)
    with open(output_path, 'w') as fid:
        table_to_file(fid,
                      image_pairs,
                      header='# query_image, map_image, score')
    logger.info('all done')
コード例 #10
0
def local_sfm_from_loaded_data(kdata_map: kapture.Kapture,
                               kdata_map_gv: kapture.Kapture,
                               kdata_query: kapture.Kapture,
                               map_plus_query_path: str,
                               map_plus_query_gv_path: str,
                               tar_handlers_map: Optional[TarCollection],
                               tar_handlers_map_gv: Optional[TarCollection],
                               descriptors_type: Optional[str],
                               pairsfile_path: str,
                               output_path_root: str,
                               colmap_binary: str,
                               force: bool):
    """
    Localize query images in a COLMAP model built from topk retrieved images.

    :param map_plus_query_path: path to the kapture data consisting of mapping and query data (sensors and reconstruction)
    :param map_plus_query_gv_path: path to the kapture data consisting of mapping and query data after geometric verification (sensors and reconstruction)
    :param query_path: path to the query kapture data (sensors)
    :param descriptors_type: type of descriptors, name of the descriptors subfolder
    :param pairsfile_path: path to the pairsfile that contains the topk retrieved mapping images for each query image
    :param output_path_root: root path where outputs should be stored
    :param colmap_binary: path to the COLMAP binary
    :param force: silently overwrite already existing results
    """

    # load query kapture (we use query kapture to reuse sensor_ids etc.)
    if kdata_query.trajectories:
        logger.warning("Query data contains trajectories: they will be ignored")
        kdata_query.trajectories.clear()
    else:
        kdata_query.trajectories = kapture.Trajectories()

    # clear query trajectories in map_plus_query
    kdata_map_cleared_trajectories = kapture.Trajectories()
    query_image_list = set(kdata_query.records_camera.data_list())
    for timestamp, subdict in kdata_map.records_camera.items():
        for sensor_id, image_name in subdict.items():
            if image_name in query_image_list:
                continue
            if (timestamp, sensor_id) in kdata_map.trajectories:
                pose = kdata_map.trajectories.get(timestamp)[sensor_id]
                kdata_map_cleared_trajectories.setdefault(timestamp, {})[sensor_id] = pose
    kdata_map.trajectories = kdata_map_cleared_trajectories

    # load output kapture
    output_path = os.path.join(output_path_root, 'localized')
    if os.path.exists(os.path.join(output_path, 'sensors/trajectories.txt')):
        kdata_output = kapture_from_dir(output_path)
        if kdata_query.records_camera == kdata_output.records_camera and len(
                kdata_output.trajectories) != 0 and not force:
            kdata_query.trajectories = kdata_output.trajectories

    if kdata_map.rigs is not None:
        rigs_remove_inplace(kdata_map.trajectories, kdata_map.rigs)
    if kdata_map_gv.rigs is not None:
        rigs_remove_inplace(kdata_map_gv.trajectories, kdata_map_gv.rigs)

    # load pairsfile
    pairs = {}
    with open(pairsfile_path, 'r') as fid:
        table = table_from_file(fid)
        for img_query, img_map, _ in table:
            if img_query not in pairs:
                pairs[img_query] = []
            pairs[img_query].append(img_map)

    kdata_sub_colmap_path = os.path.join(output_path_root, 'colmap')
    kdata_reg_query_path = os.path.join(output_path_root, 'query_registered')
    sub_kapture_pairsfile_path = os.path.join(output_path_root, 'tmp_pairs.txt')

    if descriptors_type is None:
        descriptors_type = try_get_only_key_from_collection(kdata_map.descriptors)
    assert descriptors_type is not None
    assert descriptors_type in kdata_map.descriptors
    keypoints_type = kdata_map.descriptors[descriptors_type].keypoints_type

    # init matches for kdata_map and kdata_map_gv
    if kdata_map.matches is None:
        kdata_map.matches = {}
    if keypoints_type not in kdata_map.matches:
        kdata_map.matches[keypoints_type] = kapture.Matches()
    if kdata_map_gv.matches is None:
        kdata_map_gv.matches = {}
    if keypoints_type not in kdata_map_gv.matches:
        kdata_map_gv.matches[keypoints_type] = kapture.Matches()

    # run all matching
    # loop over query images
    img_skip_list = set()
    for img_query, img_list_map in pairs.items():
        if pose_found(kdata_query, img_query):
            logger.info(f'{img_query} already processed, skipping...')
            img_skip_list.add(img_query)
            continue
        else:
            map_pairs = get_pairfile_from_img_list(img_list_map)
            query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'matching for {img_query}')
                table_to_file(fid, map_pairs)
                table_to_file(fid, query_pairs)

            pairs_all = map_pairs + query_pairs
            pairs_all = [(i, j) for i, j, _ in pairs_all]
            # match missing pairs
            # kdata_map.matches is being updated by compute_matches_from_loaded_data
            compute_matches_from_loaded_data(map_plus_query_path,
                                             tar_handlers_map,
                                             kdata_map,
                                             descriptors_type,
                                             pairs_all)

    # if kdata_map have matches in tar, they need to be switched to read mode
    matches_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map)
    if matches_handler is not None:
        matches_handler.close()
        tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_path)
        tar_handlers_map.matches[keypoints_type] = TarHandler(tarfile_path, 'r')

    # run all gv
    # loop over query images
    for img_query, img_list_map in pairs.items():
        if img_query in img_skip_list:
            continue
        else:
            # recompute the pairs
            map_pairs = get_pairfile_from_img_list(img_list_map)
            query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'geometric verification of {img_query}')
                table_to_file(fid, map_pairs)
                table_to_file(fid, query_pairs)

            pairs_all = map_pairs + query_pairs
            pairs_all = [(i, j) for i, j, _ in pairs_all]

            if all(pair in kdata_map_gv.matches[keypoints_type] for pair in pairs_all):
                continue

            # create a sub kapture in order to minimize the amount of data exported to colmap
            # kdata_sub needs to be re-created to add the new matches
            kdata_sub = sub_kapture_from_img_list(kdata_map, img_list_map + [img_query], pairs_all,
                                                  keypoints_type, descriptors_type)

            kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map + [img_query], pairs_all,
                                                     keypoints_type, descriptors_type)
            # run colmap gv on missing pairs
            run_colmap_gv_from_loaded_data(kdata_sub,
                                           kdata_sub_gv,
                                           map_plus_query_path,
                                           map_plus_query_gv_path,
                                           tar_handlers_map,
                                           tar_handlers_map_gv,
                                           colmap_binary,
                                           keypoints_type,
                                           [],
                                           True)
            # update kdata_map_gv.matches
            kdata_map_gv.matches[keypoints_type].update(kdata_sub_gv.matches[keypoints_type])

    # if kdata_map_gv have matches in tar, they need to be switched to read mode
    matches_gv_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map_gv)
    if matches_gv_handler is not None:
        print(matches_gv_handler)
        matches_gv_handler.close()
        tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_gv_path)
        tar_handlers_map_gv.matches[keypoints_type] = TarHandler(tarfile_path, 'r')

    # loop over query images
    for img_query, img_list_map in pairs.items():
        if img_query in img_skip_list:
            continue
        else:
            map_pairs = get_pairfile_from_img_list(img_list_map)
            with open(sub_kapture_pairsfile_path, 'w') as fid:
                logger.info(f'mapping and localization for {img_query}')
                table_to_file(fid, map_pairs)
            map_pairs = [(i, j) for i, j, _ in map_pairs]
            kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map, map_pairs,
                                                     keypoints_type, descriptors_type)
            # sanity check
            if len(map_pairs) != len(kdata_sub_gv.matches[keypoints_type]):
                logger.info(f'not all mapping matches available')

            # build COLMAP map
            try:
                colmap_build_map_from_loaded_data(
                    kdata_sub_gv,
                    map_plus_query_gv_path,
                    tar_handlers_map_gv,
                    kdata_sub_colmap_path,
                    colmap_binary,
                    keypoints_type,
                    False,
                    [],
                    ['model_converter'],
                    True)
            except ValueError:
                logger.info(f'{img_query} was not localized')
                continue

        if not os.path.exists(os.path.join(kdata_sub_colmap_path, 'reconstruction/images.bin')):
            logger.info(f'colmap mapping for {img_query} did not work, image was not localized')
            continue

        query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map)
        with open(sub_kapture_pairsfile_path, 'w') as fid:
            table_to_file(fid, query_pairs)
        query_pairs = [(i, j) for i, j, _ in query_pairs]
        query_img_kapture_gv = add_image_to_kapture(kdata_map_gv,
                                                    kdata_sub_gv, img_query, query_pairs,
                                                    keypoints_type, descriptors_type)
        # sanity check
        if len(query_pairs) != len(query_img_kapture_gv.matches[keypoints_type]):
            logger.info(f'not all query matches available')

        # localize in COLMAP map
        try:
            colmap_localize_from_loaded_data(
                query_img_kapture_gv,
                map_plus_query_gv_path,
                tar_handlers_map_gv,
                os.path.join(kdata_sub_colmap_path, 'registered'),
                os.path.join(kdata_sub_colmap_path, 'colmap.db'),
                os.path.join(kdata_sub_colmap_path, 'reconstruction'),
                colmap_binary,
                keypoints_type,
                False,
                ['--Mapper.ba_refine_focal_length', '0',
                 '--Mapper.ba_refine_principal_point', '0',
                 '--Mapper.ba_refine_extra_params', '0',
                 '--Mapper.min_num_matches', '4',
                 '--Mapper.init_min_num_inliers', '4',
                 '--Mapper.abs_pose_min_num_inliers', '4',
                 '--Mapper.abs_pose_min_inlier_ratio', '0.05',
                 '--Mapper.ba_local_max_num_iterations', '50',
                 '--Mapper.abs_pose_max_error', '20',
                 '--Mapper.filter_max_reproj_error', '12'],
                [],
                True)
        except ValueError:
            logger.info(f'{img_query} was not localized')
            continue

        if not os.path.exists(os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                                           'reconstruction/images.txt')):
            logger.info(f'colmap localization of {img_query} did not work, image was not localized')
            continue

        # add to results kapture
        kdata_reg_query = import_colmap(
            kdata_reg_query_path,
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'colmap.db'),
            os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'),
                         'reconstruction'),
            None,
            None,
            True,
            True,
            True,
            TransferAction.skip)

        if add_pose_to_query_kapture(kdata_reg_query, kdata_query, img_query):
            logger.info('successfully localized')

        # write results (after each image to see the progress)
        kapture_to_dir(output_path, kdata_query)

    # clean up (e.g. remove temporal files and folders)
    safe_remove_any_path(kdata_sub_colmap_path, True)
    safe_remove_any_path(kdata_reg_query_path, True)
    safe_remove_file(sub_kapture_pairsfile_path, True)

    logger.info('all done')