def convert_pairs_to_hloc_format(pairsfile_path_kapture: str, pairsfile_path_hloc: str): """ convert kapture pairsfile to hloc pairsfile """ with open(pairsfile_path_kapture, 'r') as fid: table = list(table_from_file(fid)) os.makedirs(os.path.dirname(os.path.abspath(pairsfile_path_hloc)), exist_ok=True) with open(pairsfile_path_hloc, 'w') as fid: for query_name, map_name, _ in table: fid.write(f'{query_name} {map_name}\n')
def get_pairs_from_file(pairsfile_path: str) -> List[Tuple[str, str]]: """ read a pairs file (csv with 3 fields, name1, name2, score) and return the list of matches :param pairsfile_path: path to pairsfile :type pairsfile_path: str """ logger.info('reading pairs from pairsfile') image_pairs = [] with open(pairsfile_path, 'r') as fid: table = table_from_file(fid) for query_name, map_name, _ in table: # last field score is not used if query_name != map_name: image_pairs.append((query_name, map_name) if query_name < map_name else (map_name, query_name)) # remove duplicates without breaking order image_pairs = list(OrderedDict.fromkeys(image_pairs)) return image_pairs
def get_ordered_pairs_from_file( pairsfile_path: str, query_records: kapture.RecordsCamera = None, map_records: kapture.RecordsCamera = None, topk_override=None) -> Dict[str, List[Tuple[str, float]]]: """ read pairfile and return a list of pairs (keep duplicates, order is query, map) """ getLogger().info('reading pairs from pairsfile') if query_records is not None: query_images = set(query_records.data_list()) else: query_images = None if map_records is not None: map_images = set(map_records.data_list()) else: map_images = None image_pairs = {} with open(pairsfile_path, 'r') as fid: table = table_from_file(fid) for query_name, map_name, score in table: if query_images is not None and query_name not in query_images: continue if map_images is not None and map_name not in map_images: continue if query_name not in image_pairs: image_pairs[query_name] = [] image_pairs[query_name].append((map_name, float(score))) for k in image_pairs.keys(): sorted_by_score = list( sorted(image_pairs[k], key=lambda x: x[1], reverse=True)) if topk_override is not None and topk_override > len(sorted_by_score): getLogger().debug( f'image {k} has {len(sorted_by_score)} pairs, less than topk={topk_override}' ) elif topk_override is not None: sorted_by_score = sorted_by_score[:topk_override] image_pairs[k] = sorted_by_score return image_pairs
def get_pairs_from_file( pairsfile_path: str, query_records: kapture.RecordsCamera = None, map_records: kapture.RecordsCamera = None, ) -> List[Tuple[str, str]]: """ read a pairs file (csv with 3 fields, name1, name2, score) and return the list of matches :param pairsfile_path: path to pairsfile :type pairsfile_path: str """ getLogger().info('reading pairs from pairsfile') if query_records is not None: query_images = set(query_records.data_list()) else: query_images = None if map_records is not None: map_images = set(map_records.data_list()) else: map_images = None image_pairs = [] with open(pairsfile_path, 'r') as fid: table = table_from_file(fid) for query_name, map_name, _ in table: # last field score is not used if query_images is not None and query_name not in query_images: continue if map_images is not None and map_name not in map_images: continue if query_name != map_name: image_pairs.append((query_name, map_name) if query_name < map_name else ( map_name, query_name)) # remove duplicates without breaking order image_pairs = list(OrderedDict.fromkeys(image_pairs)) return image_pairs
def evaluate_command_line() -> None: """ Do the evaluation using the parameters given on the command line. """ parser = argparse.ArgumentParser( description='Evaluation script for kapture data.') parser_verbosity = parser.add_mutually_exclusive_group() parser_verbosity.add_argument( '-v', '--verbose', nargs='?', default=logging.WARNING, const=logging.INFO, action=kapture.utils.logging.VerbosityParser, help= 'verbosity level (debug, info, warning, critical, ... or int value) [warning]' ) parser_verbosity.add_argument('-q', '--silent', '--quiet', action='store_const', dest='verbose', const=logging.CRITICAL) parser.add_argument( '-i', '--inputs', nargs='+', help= 'input path to kapture data root directory. You can compare multiple models' ) parser.add_argument( '--labels', nargs='+', default=[], help='labels for inputs. must be of same length as inputs') parser.add_argument( '-gt', '--ground-truth', required=True, help='input path to data ground truth root directory in kapture format' ) parser.add_argument('-o', '--output', help='output directory.', required=True) parser.add_argument( '-l', '--image-list', default="", help= 'optional, path to a text file containing the list of images to consider' ' (1 line per image or a pairsfile). if not present, all gt images are used' ) parser.add_argument( '--bins', nargs='+', default=["0.25 2", "0.5 5", "5 10"], help='the desired positions/rotations thresholds for bins' 'format is string : position_threshold_in_m space rotation_threshold_in_degree' ) parser.add_argument( '-p', '--plot-rotation-threshold', default=-1, type=float, help= 'rotation threshold for position error threshold plot. negative values -> ignore rotation' ) parser.add_argument('--plot-max', default=100, type=int, help='maximum distance in cm shown in plot') parser.add_argument('--plot-title', default="", help='title for position error threshold plot') parser.add_argument( '--plot-loc', default="best", choices=[ 'best', 'upper right', 'upper left', 'lower left', 'lower right', 'right', 'center left', 'center right', 'lower center', 'upper center', 'center' ], help='position of plot legend. loc param for plt.legend.') parser.add_argument('--plot-font-size', default=15, type=int, help='value for plt.rcParams[\'font.size\']') parser.add_argument('--plot-legend-font-size', default=8, type=int, help='value for plt.rcParams[\'legend.fontsize\']') parser.add_argument('-f', '-y', '--force', action='store_true', default=False, help='Force delete output directory if already exists') args = parser.parse_args() logger.setLevel(args.verbose) if args.verbose <= logging.DEBUG: # also let kapture express its logs kapture.utils.logging.getLogger().setLevel(args.verbose) kapture_localization.utils.logging.getLogger().setLevel(args.verbose) assert (len(args.inputs) > 0) if len(args.labels) == 0: args.labels = [f'input{i}' for i in range(1, len(args.inputs) + 1)] assert (len(args.labels) == len(args.inputs)) try: logger.debug(''.join( ['\n\t{:13} = {}'.format(k, v) for k, v in vars(args).items()])) os.makedirs(args.output, exist_ok=True) logger.debug('loading: {}'.format(args.inputs)) all_kapture_to_eval = [ csv.kapture_from_dir(folder) for folder in args.inputs ] logger.info('loading ground truth data') gt_kapture = csv.kapture_from_dir(args.ground_truth) assert gt_kapture.records_camera is not None assert gt_kapture.trajectories is not None if args.image_list: with open(args.image_list, 'r') as fid: table = table_from_file(fid) image_set = {line[0] for line in table} else: if gt_kapture.rigs is not None: gt_trajectories = kapture.rigs_remove(gt_kapture.trajectories, gt_kapture.rigs) else: gt_trajectories = gt_kapture.trajectories image_set = set(image_name for ts, sensor_id, image_name in kapture.flatten(gt_kapture.records_camera) if (ts, sensor_id) in gt_trajectories) if len(image_set) == 0: logger.info( 'image_set is empty, for some reason, I could not find images to evaluate' ) exit(0) results = [ evaluate(kapture_to_eval, gt_kapture, image_set) for kapture_to_eval in all_kapture_to_eval ] save_evaluation(results, args.output, args.labels, args.bins, args.plot_rotation_threshold, args.plot_max, args.plot_title, args.plot_loc, args.plot_font_size, args.plot_legend_font_size, args.force) except Exception as e: logger.critical(e) if args.verbose > 1: raise
def local_sfm(map_plus_query_path: str, map_plus_query_gv_path: str, query_path: str, pairsfile_path: str, output_path_root: str, colmap_binary: str, force: bool): """ Localize query images in a COLMAP model built from topk retrieved images. :param map_plus_query_path: path to the kapture data consisting of mapping and query data (sensors and reconstruction) :param map_plus_query_gv_path: path to the kapture data consisting of mapping and query data after geometric verification (sensors and reconstruction) :param query_path: path to the query kapture data (sensors) :param pairsfile_path: path to the pairsfile that contains the topk retrieved mapping images for each query image :param output_path_root: root path where outputs should be stored :param colmap_binary: path to the COLMAP binary :param force: silently overwrite already existing results """ # load query kapture (we use query kapture to reuse sensor_ids etc.) kdata_query = kapture_from_dir(query_path) if kdata_query.trajectories: logger.warning( "Query data contains trajectories: they will be ignored") kdata_query.trajectories.clear() else: kdata_query.trajectories = kapture.Trajectories() # load output kapture output_path = os.path.join(output_path_root, 'localized') if os.path.exists(os.path.join(output_path, 'sensors/trajectories.txt')): kdata_output = kapture_from_dir(output_path) if kdata_query.records_camera == kdata_output.records_camera and len( kdata_output.trajectories) != 0 and not force: kdata_query.trajectories = kdata_output.trajectories # load kapture maps kdata_map = kapture_from_dir(map_plus_query_path) if kdata_map.rigs != None: rigs_remove_inplace(kdata_map.trajectories, kdata_map.rigs) kdata_map_gv = kapture_from_dir(map_plus_query_gv_path) if kdata_map_gv.rigs != None: rigs_remove_inplace(kdata_map_gv.trajectories, kdata_map_gv.rigs) # load pairsfile pairs = {} with open(pairsfile_path, 'r') as fid: table = table_from_file(fid) for img_query, img_map, score in table: if not img_query in pairs: pairs[img_query] = [] pairs[img_query].append(img_map) kdata_sub_colmap_path = os.path.join(output_path_root, 'colmap') kdata_reg_query_path = os.path.join(output_path_root, 'query_registered') sub_kapture_pairsfile_path = os.path.join(output_path_root, 'tmp_pairs_map.txt') query_img_kapture_pairsfile_path = os.path.join(output_path_root, 'tmp_pairs_query.txt') # loop over query images for img_query, img_list_map in pairs.items(): if pose_found(kdata_query, img_query): logger.info(f'{img_query} already processed, skipping...') continue else: logger.info(f'processing {img_query}') # write pairsfile for sub-kapture map_pairs = write_pairfile_from_img_list(img_list_map, sub_kapture_pairsfile_path) # write pairsfile for query_img_kapture query_pairs = write_pairfile_img_vs_img_list( img_query, img_list_map, query_img_kapture_pairsfile_path) # create sub-kapture kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path, img_list_map, map_pairs) kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, map_plus_query_gv_path, img_list_map, map_pairs) # match missing pairs for mapping compute_matches_from_loaded_data(map_plus_query_path, kdata_sub, map_pairs) # kdata_sub needs to be re-created to add the new matches kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path, img_list_map, map_pairs) # run colmap gv on missing pairs if len(kdata_sub.matches) != len(kdata_sub_gv.matches): run_colmap_gv_from_loaded_data(kdata_sub, kdata_sub_gv, map_plus_query_path, map_plus_query_gv_path, colmap_binary, [], True) # kdata_sub_gv needs to be re-created to add the new matches kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, map_plus_query_gv_path, img_list_map, map_pairs) # sanity check if len(map_pairs) != len(kdata_sub_gv.matches): logger.info(f'not all mapping matches available') # build COLMAP map try: colmap_build_map_from_loaded_data(kdata_sub_gv, map_plus_query_gv_path, kdata_sub_colmap_path, colmap_binary, False, [], ['model_converter'], True) except ValueError: logger.info(f'{img_query} was not localized') continue if not os.path.exists( os.path.join(kdata_sub_colmap_path, 'reconstruction/images.bin')): logger.info( f'colmap mapping for {img_query} did not work, image was not localized' ) continue # create single image kapture (kdata_sub needs to be recreated because descriptors are deleted in build_colmap_model) kdata_sub = sub_kapture_from_img_list(kdata_map, map_plus_query_path, img_list_map, map_pairs) kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, map_plus_query_gv_path, img_list_map, map_pairs) query_img_kapture = add_image_to_kapture(kdata_map, map_plus_query_path, kdata_sub, img_query, query_pairs) query_img_kapture_gv = add_image_to_kapture(kdata_map_gv, map_plus_query_gv_path, kdata_sub_gv, img_query, query_pairs) # match missing pairs for localization compute_matches_from_loaded_data(map_plus_query_path, query_img_kapture, query_pairs) # query_img_kapture needs to be re-created to add the new matches query_img_kapture = add_image_to_kapture(kdata_map, map_plus_query_path, kdata_sub, img_query, query_pairs) # run colmap gv on missing pairs if len(query_img_kapture.matches) != len(query_img_kapture_gv.matches): run_colmap_gv_from_loaded_data(query_img_kapture, query_img_kapture_gv, map_plus_query_path, map_plus_query_gv_path, colmap_binary, [], True) # query_img_kapture_gv needs to be re-created to add the new matches query_img_kapture_gv = add_image_to_kapture( kdata_map_gv, map_plus_query_gv_path, kdata_sub_gv, img_query, query_pairs) # sanity check if len(query_pairs) != len(query_img_kapture_gv.matches): logger.info(f'not all query matches available') # localize in COLMAP map try: colmap_localize_from_loaded_data( query_img_kapture_gv, map_plus_query_gv_path, os.path.join(kdata_sub_colmap_path, 'registered'), os.path.join(kdata_sub_colmap_path, 'colmap.db'), os.path.join(kdata_sub_colmap_path, 'reconstruction'), colmap_binary, False, [ '--Mapper.ba_refine_focal_length', '0', '--Mapper.ba_refine_principal_point', '0', '--Mapper.ba_refine_extra_params', '0', '--Mapper.min_num_matches', '4', '--Mapper.init_min_num_inliers', '4', '--Mapper.abs_pose_min_num_inliers', '4', '--Mapper.abs_pose_min_inlier_ratio', '0.05', '--Mapper.ba_local_max_num_iterations', '50', '--Mapper.abs_pose_max_error', '20', '--Mapper.filter_max_reproj_error', '12' ], [], True) except ValueError: logger.info(f'{img_query} was not localized') continue if not os.path.exists( os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'reconstruction/images.txt')): logger.info( f'colmap localization of {img_query} did not work, image was not localized' ) continue # add to results kapture kdata_reg_query = import_colmap( kdata_reg_query_path, os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'colmap.db'), os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'reconstruction'), None, None, True, True, True, TransferAction.skip) if add_pose_to_query_kapture(kdata_reg_query, kdata_query, img_query): logger.info('successfully localized') # write results (after each image to see the progress) kapture_to_dir(output_path, kdata_query) # clean up (e.g. remove temporal files and folders) safe_remove_any_path(kdata_sub_colmap_path, True) safe_remove_any_path(kdata_reg_query_path, True) safe_remove_file(sub_kapture_pairsfile_path, True) safe_remove_file(query_img_kapture_pairsfile_path, True) logger.info('all done')
def local_sfm_from_loaded_data(kdata_map: kapture.Kapture, kdata_map_gv: kapture.Kapture, kdata_query: kapture.Kapture, map_plus_query_path: str, map_plus_query_gv_path: str, tar_handlers_map: Optional[TarCollection], tar_handlers_map_gv: Optional[TarCollection], descriptors_type: Optional[str], pairsfile_path: str, output_path_root: str, colmap_binary: str, force: bool): """ Localize query images in a COLMAP model built from topk retrieved images. :param map_plus_query_path: path to the kapture data consisting of mapping and query data (sensors and reconstruction) :param map_plus_query_gv_path: path to the kapture data consisting of mapping and query data after geometric verification (sensors and reconstruction) :param query_path: path to the query kapture data (sensors) :param descriptors_type: type of descriptors, name of the descriptors subfolder :param pairsfile_path: path to the pairsfile that contains the topk retrieved mapping images for each query image :param output_path_root: root path where outputs should be stored :param colmap_binary: path to the COLMAP binary :param force: silently overwrite already existing results """ # load query kapture (we use query kapture to reuse sensor_ids etc.) if kdata_query.trajectories: logger.warning("Query data contains trajectories: they will be ignored") kdata_query.trajectories.clear() else: kdata_query.trajectories = kapture.Trajectories() # clear query trajectories in map_plus_query kdata_map_cleared_trajectories = kapture.Trajectories() query_image_list = set(kdata_query.records_camera.data_list()) for timestamp, subdict in kdata_map.records_camera.items(): for sensor_id, image_name in subdict.items(): if image_name in query_image_list: continue if (timestamp, sensor_id) in kdata_map.trajectories: pose = kdata_map.trajectories.get(timestamp)[sensor_id] kdata_map_cleared_trajectories.setdefault(timestamp, {})[sensor_id] = pose kdata_map.trajectories = kdata_map_cleared_trajectories # load output kapture output_path = os.path.join(output_path_root, 'localized') if os.path.exists(os.path.join(output_path, 'sensors/trajectories.txt')): kdata_output = kapture_from_dir(output_path) if kdata_query.records_camera == kdata_output.records_camera and len( kdata_output.trajectories) != 0 and not force: kdata_query.trajectories = kdata_output.trajectories if kdata_map.rigs is not None: rigs_remove_inplace(kdata_map.trajectories, kdata_map.rigs) if kdata_map_gv.rigs is not None: rigs_remove_inplace(kdata_map_gv.trajectories, kdata_map_gv.rigs) # load pairsfile pairs = {} with open(pairsfile_path, 'r') as fid: table = table_from_file(fid) for img_query, img_map, _ in table: if img_query not in pairs: pairs[img_query] = [] pairs[img_query].append(img_map) kdata_sub_colmap_path = os.path.join(output_path_root, 'colmap') kdata_reg_query_path = os.path.join(output_path_root, 'query_registered') sub_kapture_pairsfile_path = os.path.join(output_path_root, 'tmp_pairs.txt') if descriptors_type is None: descriptors_type = try_get_only_key_from_collection(kdata_map.descriptors) assert descriptors_type is not None assert descriptors_type in kdata_map.descriptors keypoints_type = kdata_map.descriptors[descriptors_type].keypoints_type # init matches for kdata_map and kdata_map_gv if kdata_map.matches is None: kdata_map.matches = {} if keypoints_type not in kdata_map.matches: kdata_map.matches[keypoints_type] = kapture.Matches() if kdata_map_gv.matches is None: kdata_map_gv.matches = {} if keypoints_type not in kdata_map_gv.matches: kdata_map_gv.matches[keypoints_type] = kapture.Matches() # run all matching # loop over query images img_skip_list = set() for img_query, img_list_map in pairs.items(): if pose_found(kdata_query, img_query): logger.info(f'{img_query} already processed, skipping...') img_skip_list.add(img_query) continue else: map_pairs = get_pairfile_from_img_list(img_list_map) query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map) with open(sub_kapture_pairsfile_path, 'w') as fid: logger.info(f'matching for {img_query}') table_to_file(fid, map_pairs) table_to_file(fid, query_pairs) pairs_all = map_pairs + query_pairs pairs_all = [(i, j) for i, j, _ in pairs_all] # match missing pairs # kdata_map.matches is being updated by compute_matches_from_loaded_data compute_matches_from_loaded_data(map_plus_query_path, tar_handlers_map, kdata_map, descriptors_type, pairs_all) # if kdata_map have matches in tar, they need to be switched to read mode matches_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map) if matches_handler is not None: matches_handler.close() tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_path) tar_handlers_map.matches[keypoints_type] = TarHandler(tarfile_path, 'r') # run all gv # loop over query images for img_query, img_list_map in pairs.items(): if img_query in img_skip_list: continue else: # recompute the pairs map_pairs = get_pairfile_from_img_list(img_list_map) query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map) with open(sub_kapture_pairsfile_path, 'w') as fid: logger.info(f'geometric verification of {img_query}') table_to_file(fid, map_pairs) table_to_file(fid, query_pairs) pairs_all = map_pairs + query_pairs pairs_all = [(i, j) for i, j, _ in pairs_all] if all(pair in kdata_map_gv.matches[keypoints_type] for pair in pairs_all): continue # create a sub kapture in order to minimize the amount of data exported to colmap # kdata_sub needs to be re-created to add the new matches kdata_sub = sub_kapture_from_img_list(kdata_map, img_list_map + [img_query], pairs_all, keypoints_type, descriptors_type) kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map + [img_query], pairs_all, keypoints_type, descriptors_type) # run colmap gv on missing pairs run_colmap_gv_from_loaded_data(kdata_sub, kdata_sub_gv, map_plus_query_path, map_plus_query_gv_path, tar_handlers_map, tar_handlers_map_gv, colmap_binary, keypoints_type, [], True) # update kdata_map_gv.matches kdata_map_gv.matches[keypoints_type].update(kdata_sub_gv.matches[keypoints_type]) # if kdata_map_gv have matches in tar, they need to be switched to read mode matches_gv_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map_gv) if matches_gv_handler is not None: print(matches_gv_handler) matches_gv_handler.close() tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_gv_path) tar_handlers_map_gv.matches[keypoints_type] = TarHandler(tarfile_path, 'r') # loop over query images for img_query, img_list_map in pairs.items(): if img_query in img_skip_list: continue else: map_pairs = get_pairfile_from_img_list(img_list_map) with open(sub_kapture_pairsfile_path, 'w') as fid: logger.info(f'mapping and localization for {img_query}') table_to_file(fid, map_pairs) map_pairs = [(i, j) for i, j, _ in map_pairs] kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map, map_pairs, keypoints_type, descriptors_type) # sanity check if len(map_pairs) != len(kdata_sub_gv.matches[keypoints_type]): logger.info(f'not all mapping matches available') # build COLMAP map try: colmap_build_map_from_loaded_data( kdata_sub_gv, map_plus_query_gv_path, tar_handlers_map_gv, kdata_sub_colmap_path, colmap_binary, keypoints_type, False, [], ['model_converter'], True) except ValueError: logger.info(f'{img_query} was not localized') continue if not os.path.exists(os.path.join(kdata_sub_colmap_path, 'reconstruction/images.bin')): logger.info(f'colmap mapping for {img_query} did not work, image was not localized') continue query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map) with open(sub_kapture_pairsfile_path, 'w') as fid: table_to_file(fid, query_pairs) query_pairs = [(i, j) for i, j, _ in query_pairs] query_img_kapture_gv = add_image_to_kapture(kdata_map_gv, kdata_sub_gv, img_query, query_pairs, keypoints_type, descriptors_type) # sanity check if len(query_pairs) != len(query_img_kapture_gv.matches[keypoints_type]): logger.info(f'not all query matches available') # localize in COLMAP map try: colmap_localize_from_loaded_data( query_img_kapture_gv, map_plus_query_gv_path, tar_handlers_map_gv, os.path.join(kdata_sub_colmap_path, 'registered'), os.path.join(kdata_sub_colmap_path, 'colmap.db'), os.path.join(kdata_sub_colmap_path, 'reconstruction'), colmap_binary, keypoints_type, False, ['--Mapper.ba_refine_focal_length', '0', '--Mapper.ba_refine_principal_point', '0', '--Mapper.ba_refine_extra_params', '0', '--Mapper.min_num_matches', '4', '--Mapper.init_min_num_inliers', '4', '--Mapper.abs_pose_min_num_inliers', '4', '--Mapper.abs_pose_min_inlier_ratio', '0.05', '--Mapper.ba_local_max_num_iterations', '50', '--Mapper.abs_pose_max_error', '20', '--Mapper.filter_max_reproj_error', '12'], [], True) except ValueError: logger.info(f'{img_query} was not localized') continue if not os.path.exists(os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'reconstruction/images.txt')): logger.info(f'colmap localization of {img_query} did not work, image was not localized') continue # add to results kapture kdata_reg_query = import_colmap( kdata_reg_query_path, os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'colmap.db'), os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'reconstruction'), None, None, True, True, True, TransferAction.skip) if add_pose_to_query_kapture(kdata_reg_query, kdata_query, img_query): logger.info('successfully localized') # write results (after each image to see the progress) kapture_to_dir(output_path, kdata_query) # clean up (e.g. remove temporal files and folders) safe_remove_any_path(kdata_sub_colmap_path, True) safe_remove_any_path(kdata_reg_query_path, True) safe_remove_file(sub_kapture_pairsfile_path, True) logger.info('all done')