def add_image_to_kapture(kdata_src, kdata_src_path, kdata_trg, img_name, pairs, add_pose=False): timestamp_sensor_id_from_image_name = { img_name: (timestamp, sensor_id) for timestamp, sensor_id, img_name in kapture.flatten( kdata_src.records_camera) } timestamp, sensor_id = timestamp_sensor_id_from_image_name[img_name] kdata_trg.sensors[sensor_id] = kdata_src.sensors[sensor_id] kdata_trg.records_camera[timestamp, sensor_id] = img_name kdata_trg.keypoints.add(img_name) if kdata_trg.descriptors != None: kdata_trg.descriptors.add(img_name) if add_pose: kdata_trg.trajectories[timestamp, sensor_id] = kdata_src.trajectories[timestamp, sensor_id] if os.path.exists(kdata_src_path) and len(pairs) != 0: kdata_trg.matches = kapture.Matches() for i in pairs: image_matches_filepath = get_matches_fullpath((i[0], i[1]), kdata_src_path) if os.path.exists(image_matches_filepath): kdata_trg.matches.add(i[0], i[1]) kdata_trg.matches.normalize() return kdata_trg
def merge_matches(matches_list: List[Optional[kapture.Matches]], matches_paths: List[str], output_path: str) -> kapture.Matches: """ Merge several matches lists in one. :param matches_list: list of matches to merge :param matches_paths: matches files paths :param output_path: root path of the merged matches files :return: merged matches """ assert len(matches_list) > 0 assert len(matches_paths) == len(matches_list) merged_matches = kapture.Matches() for matches, matches_path in zip(matches_list, matches_paths): if matches is None: continue for pair in matches: if pair in merged_matches: getLogger().warning(f'{pair} was found multiple times.') else: merged_matches.add(pair[0], pair[1]) if output_path: in_path = kapture.io.features.get_matches_fullpath( pair, matches_path) out_path = kapture.io.features.get_matches_fullpath( pair, output_path) if in_path != out_path: # skip actual copy if file does not actually move. os.makedirs(os.path.dirname(out_path), exist_ok=True) shutil.copy(in_path, out_path) return merged_matches
def add_image_to_kapture(kdata_src, kdata_trg, img_name, pairs, keypoints_type, descriptors_type, add_pose=False): timestamp_sensor_id_from_image_name = {img_name: (timestamp, sensor_id) for timestamp, sensor_id, img_name in kapture.flatten(kdata_src.records_camera)} timestamp, sensor_id = timestamp_sensor_id_from_image_name[img_name] kdata_trg.sensors[sensor_id] = kdata_src.sensors[sensor_id] kdata_trg.records_camera[timestamp, sensor_id] = img_name kdata_trg.keypoints[keypoints_type].add(img_name) if kdata_trg.descriptors is not None and descriptors_type in kdata_trg.descriptors: kdata_trg.descriptors[descriptors_type].add(img_name) if add_pose: kdata_trg.trajectories[timestamp, sensor_id] = kdata_src.trajectories[timestamp, sensor_id] if len(pairs) != 0: if kdata_trg.matches is None: kdata_trg.matches = {} kdata_trg.matches[keypoints_type] = kapture.Matches() for i in pairs: if i in kdata_src.matches[keypoints_type]: kdata_trg.matches[keypoints_type].add(i[0], i[1]) kdata_trg.matches[keypoints_type].normalize() return kdata_trg
def match_features(self, kapture_data): image_list = [ filename for _, _, filename in kapture.flatten(kapture_data.records_camera) ] descriptors = [] descriptor_type = kapture_data.descriptors.dtype descriptor_size = kapture_data.descriptors.dsize for image_path in image_list: descriptors_full_path = get_descriptors_fullpath( kapture_data.kapture_path, image_path) descriptors.append( image_descriptors_from_file(descriptors_full_path, descriptor_type, descriptor_size)) kapture_data.matches = kapture.Matches() if self._sequential_length is None: self._sequential_length = len(image_list) for i in tqdm(range(len(image_list))): for j in range(i + 1, min(len(image_list), i + self._sequential_length)): matches = self._matcher.match_descriptors( descriptors[i], descriptors[j]) if self._minimal_score is not None: mask = matches[:, 2] > self._minimal_score matches = matches[mask] kapture_data.matches.add(image_list[i], image_list[j]) matches_full_path = get_matches_fullpath( (image_list[i], image_list[j]), kapture_data.kapture_path) image_matches_to_file(matches_full_path, matches)
def test_matches_from_dir(self): self._samples_dirpath image_pairs_expected = {('00.jpg', '01.jpg'), ('00.jpg', '02.jpg'), ('00.jpg', '03.jpg'), ('01.jpg', '02.jpg'), ('01.jpg', '03.jpg'), ('02.jpg', '03.jpg')} image_pairs_actual = set(kapture.io.features.matching_pairs_from_dirpath(self._kapture_dirpath)) self.assertEqual(6, len(image_pairs_actual)) self.assertEqual(image_pairs_expected, image_pairs_actual) # test matches constructor matches = kapture.Matches(image_pairs_expected) self.assertEqual(6, len(matches)) self.assertEqual(image_pairs_expected, matches)
def compute_matches(input_path: str, pairsfile_path: str): """ compute matches from descriptors. images to match are selected from a pairsfile (csv with name1, name2, score) :param input_path: input path to kapture input root directory :type input_path: str :param pairsfile_path: path to pairs file (csv with 3 fields, name1, name2, score) :type pairsfile_path: str """ logger.info(f'compute_matches. loading input: {input_path}') kdata = kapture_from_dir(input_path) assert kdata.sensors is not None assert kdata.records_camera is not None assert kdata.descriptors is not None image_pairs = get_pairs_from_file(pairsfile_path) matcher = MatchPairNnTorch(use_cuda=torch.cuda.is_available()) new_matches = kapture.Matches() logger.info('compute_matches. entering main loop...') hide_progress_bar = logger.getEffectiveLevel() > logging.INFO for image_path1, image_path2 in tqdm(image_pairs, disable=hide_progress_bar): if image_path1 == image_path2: continue if image_path1 > image_path2: image_path1, image_path2 = image_path2, image_path1 if image_path1 not in kdata.descriptors or image_path2 not in kdata.descriptors: logger.warning('unable to find descriptors for image pair : ' '\n\t{} \n\t{}'.format(image_path1, image_path2)) continue descriptor1 = load_descriptors(input_path, image_path1, kdata.descriptors.dtype, kdata.descriptors.dsize) descriptor2 = load_descriptors(input_path, image_path2, kdata.descriptors.dtype, kdata.descriptors.dsize) matches = matcher.match_descriptors(descriptor1, descriptor2) matches_path = get_matches_fullpath((image_path1, image_path2), input_path) image_matches_to_file(matches_path, matches) new_matches.add(image_path1, image_path2) if not matches_check_dir(new_matches, input_path): logger.critical( 'matching ended successfully but not all files were saved') logger.info('all done')
def test_init_matches(self): matches = kapture.Matches(set([('bb', 'aa'), ('cc', 'dd')])) matches.normalize() self.assertFalse(('bb', 'aa') in matches) self.assertTrue(('aa', "bb") in matches) self.assertFalse(('dd', 'cc') in matches) self.assertTrue(('cc', 'dd') in matches) matches.add('ee', 'ff') matches.add('hh', 'gg') matches.normalize() self.assertFalse(('ff', 'ee') in matches) self.assertTrue(('ee', "ff") in matches) self.assertFalse(('hh', 'gg') in matches) self.assertTrue(('gg', 'hh') in matches)
def compute_matches_from_loaded_data(input_path: str, kdata: kapture.Kapture, image_pairs: list, overwrite_existing: bool = False): assert kdata.sensors is not None assert kdata.records_camera is not None assert kdata.descriptors is not None matcher = MatchPairNnTorch(use_cuda=torch.cuda.is_available()) new_matches = kapture.Matches() logger.info('compute_matches. entering main loop...') hide_progress_bar = logger.getEffectiveLevel() > logging.INFO skip_count = 0 for image_path1, image_path2 in tqdm(image_pairs, disable=hide_progress_bar): if image_path1 == image_path2: continue if image_path1 > image_path2: image_path1, image_path2 = image_path2, image_path1 # skip existing matches if (not overwrite_existing) and (kdata.matches is not None) and ((image_path1, image_path2) in kdata.matches): new_matches.add(image_path1, image_path2) skip_count += 1 continue if image_path1 not in kdata.descriptors or image_path2 not in kdata.descriptors: logger.warning('unable to find descriptors for image pair : ' '\n\t{} \n\t{}'.format(image_path1, image_path2)) continue descriptor1 = load_descriptors(input_path, image_path1, kdata.descriptors.dtype, kdata.descriptors.dsize) descriptor2 = load_descriptors(input_path, image_path2, kdata.descriptors.dtype, kdata.descriptors.dsize) matches = matcher.match_descriptors(descriptor1, descriptor2) matches_path = get_matches_fullpath((image_path1, image_path2), input_path) image_matches_to_file(matches_path, matches) new_matches.add(image_path1, image_path2) if not overwrite_existing: logger.debug(f'{skip_count} pairs were skipped because the match file already existed') if not matches_check_dir(new_matches, input_path): logger.critical('matching ended successfully but not all files were saved') logger.info('all done')
def sub_kapture_from_img_list(kdata, kdata_path, img_list, pairs): trajectories = kapture.Trajectories() sensors = kapture.Sensors() records = kapture.RecordsCamera() keypoints = kapture.Keypoints(kdata.keypoints._tname, kdata.keypoints._dtype, kdata.keypoints._dsize) if kdata.descriptors != None: descriptors = kapture.Descriptors(kdata.descriptors._tname, kdata.descriptors._dtype, kdata.descriptors._dsize) else: descriptors = None matches = kapture.Matches() timestamp_sensor_id_from_image_name = { img_name: (timestamp, sensor_id) for timestamp, sensor_id, img_name in kapture.flatten( kdata.records_camera) } for img in img_list: timestamp, sensor_id = timestamp_sensor_id_from_image_name[img] pose = kdata.trajectories[timestamp][sensor_id] sensors[sensor_id] = kdata.sensors[sensor_id] records[timestamp, sensor_id] = img trajectories[timestamp, sensor_id] = pose keypoints.add(img) if kdata.descriptors != None: descriptors.add(img) for i in pairs: image_matches_filepath = get_matches_fullpath((i[0], i[1]), kdata_path) if os.path.exists(image_matches_filepath): matches.add(i[0], i[1]) matches.normalize() return kapture.Kapture(sensors=sensors, trajectories=trajectories, records_camera=records, descriptors=descriptors, keypoints=keypoints, matches=matches)
def sub_kapture_from_img_list(kdata, img_list, pairs, keypoints_type, descriptors_type): trajectories = kapture.Trajectories() sensors = kapture.Sensors() records = kapture.RecordsCamera() keypoints = kapture.Keypoints(kdata.keypoints[keypoints_type].type_name, kdata.keypoints[keypoints_type].dtype, kdata.keypoints[keypoints_type].dsize) if kdata.descriptors is not None and descriptors_type in kdata.descriptors: descriptors = kapture.Descriptors(kdata.descriptors[descriptors_type].type_name, kdata.descriptors[descriptors_type].dtype, kdata.descriptors[descriptors_type].dsize, kdata.descriptors[descriptors_type].keypoints_type, kdata.descriptors[descriptors_type].metric_type) else: descriptors = None matches = kapture.Matches() timestamp_sensor_id_from_image_name = {img_name: (timestamp, sensor_id) for timestamp, sensor_id, img_name in kapture.flatten(kdata.records_camera)} for img in img_list: timestamp, sensor_id = timestamp_sensor_id_from_image_name[img] sensors[sensor_id] = kdata.sensors[sensor_id] records[timestamp, sensor_id] = img if (timestamp, sensor_id) in kdata.trajectories: pose = kdata.trajectories[timestamp][sensor_id] trajectories[timestamp, sensor_id] = pose keypoints.add(img) if kdata.descriptors is not None: descriptors.add(img) for i in pairs: if i in kdata.matches[keypoints_type]: matches.add(i[0], i[1]) matches.normalize() return kapture.Kapture(sensors=sensors, trajectories=trajectories, records_camera=records, descriptors={descriptors_type: descriptors}, keypoints={keypoints_type: keypoints}, matches={keypoints_type: matches})
def matches_from_dir(kapture_dirpath: str, image_filenames: Optional[Set[str]] = None, matches_pairsfile_path: Optional[str] = None) -> kapture.Matches: """ Reads and builds Matches from images_filenames if given, or directly from actual files in kapture_dirpath. :param kapture_dirpath: root path of kapture :param image_filenames: optional list of image file names :param matches_pairsfile_path: text file in the csv format; where each line is image_name1, image_name2, score :return: Matches """ if matches_pairsfile_path is None: # populate files from disk match_pairs_generator = kapture.io.features.matching_pairs_from_dirpath(kapture_dirpath) else: with open(matches_pairsfile_path, 'r') as fid: table = table_from_file(fid) # get matches list from pairsfile match_pairs_generator = ((query_name, map_name) if query_name < map_name else (map_name, query_name) for query_name, map_name, _ in table) # keeps only the one that actually exists on disk match_pairs_generator = (image_pair for image_pair in match_pairs_generator if path.isfile(kapture.io.features.get_matches_fullpath(image_pair, kapture_dirpath)) ) if image_filenames is not None: # retains only files that correspond to known images match_pairs_generator = ( image_pair for image_pair in match_pairs_generator if image_pair[0] in image_filenames and image_pair[1] in image_filenames ) match_pairs = set(match_pairs_generator) return kapture.Matches(match_pairs)
def local_sfm_from_loaded_data(kdata_map: kapture.Kapture, kdata_map_gv: kapture.Kapture, kdata_query: kapture.Kapture, map_plus_query_path: str, map_plus_query_gv_path: str, tar_handlers_map: Optional[TarCollection], tar_handlers_map_gv: Optional[TarCollection], descriptors_type: Optional[str], pairsfile_path: str, output_path_root: str, colmap_binary: str, force: bool): """ Localize query images in a COLMAP model built from topk retrieved images. :param map_plus_query_path: path to the kapture data consisting of mapping and query data (sensors and reconstruction) :param map_plus_query_gv_path: path to the kapture data consisting of mapping and query data after geometric verification (sensors and reconstruction) :param query_path: path to the query kapture data (sensors) :param descriptors_type: type of descriptors, name of the descriptors subfolder :param pairsfile_path: path to the pairsfile that contains the topk retrieved mapping images for each query image :param output_path_root: root path where outputs should be stored :param colmap_binary: path to the COLMAP binary :param force: silently overwrite already existing results """ # load query kapture (we use query kapture to reuse sensor_ids etc.) if kdata_query.trajectories: logger.warning("Query data contains trajectories: they will be ignored") kdata_query.trajectories.clear() else: kdata_query.trajectories = kapture.Trajectories() # clear query trajectories in map_plus_query kdata_map_cleared_trajectories = kapture.Trajectories() query_image_list = set(kdata_query.records_camera.data_list()) for timestamp, subdict in kdata_map.records_camera.items(): for sensor_id, image_name in subdict.items(): if image_name in query_image_list: continue if (timestamp, sensor_id) in kdata_map.trajectories: pose = kdata_map.trajectories.get(timestamp)[sensor_id] kdata_map_cleared_trajectories.setdefault(timestamp, {})[sensor_id] = pose kdata_map.trajectories = kdata_map_cleared_trajectories # load output kapture output_path = os.path.join(output_path_root, 'localized') if os.path.exists(os.path.join(output_path, 'sensors/trajectories.txt')): kdata_output = kapture_from_dir(output_path) if kdata_query.records_camera == kdata_output.records_camera and len( kdata_output.trajectories) != 0 and not force: kdata_query.trajectories = kdata_output.trajectories if kdata_map.rigs is not None: rigs_remove_inplace(kdata_map.trajectories, kdata_map.rigs) if kdata_map_gv.rigs is not None: rigs_remove_inplace(kdata_map_gv.trajectories, kdata_map_gv.rigs) # load pairsfile pairs = {} with open(pairsfile_path, 'r') as fid: table = table_from_file(fid) for img_query, img_map, _ in table: if img_query not in pairs: pairs[img_query] = [] pairs[img_query].append(img_map) kdata_sub_colmap_path = os.path.join(output_path_root, 'colmap') kdata_reg_query_path = os.path.join(output_path_root, 'query_registered') sub_kapture_pairsfile_path = os.path.join(output_path_root, 'tmp_pairs.txt') if descriptors_type is None: descriptors_type = try_get_only_key_from_collection(kdata_map.descriptors) assert descriptors_type is not None assert descriptors_type in kdata_map.descriptors keypoints_type = kdata_map.descriptors[descriptors_type].keypoints_type # init matches for kdata_map and kdata_map_gv if kdata_map.matches is None: kdata_map.matches = {} if keypoints_type not in kdata_map.matches: kdata_map.matches[keypoints_type] = kapture.Matches() if kdata_map_gv.matches is None: kdata_map_gv.matches = {} if keypoints_type not in kdata_map_gv.matches: kdata_map_gv.matches[keypoints_type] = kapture.Matches() # run all matching # loop over query images img_skip_list = set() for img_query, img_list_map in pairs.items(): if pose_found(kdata_query, img_query): logger.info(f'{img_query} already processed, skipping...') img_skip_list.add(img_query) continue else: map_pairs = get_pairfile_from_img_list(img_list_map) query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map) with open(sub_kapture_pairsfile_path, 'w') as fid: logger.info(f'matching for {img_query}') table_to_file(fid, map_pairs) table_to_file(fid, query_pairs) pairs_all = map_pairs + query_pairs pairs_all = [(i, j) for i, j, _ in pairs_all] # match missing pairs # kdata_map.matches is being updated by compute_matches_from_loaded_data compute_matches_from_loaded_data(map_plus_query_path, tar_handlers_map, kdata_map, descriptors_type, pairs_all) # if kdata_map have matches in tar, they need to be switched to read mode matches_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map) if matches_handler is not None: matches_handler.close() tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_path) tar_handlers_map.matches[keypoints_type] = TarHandler(tarfile_path, 'r') # run all gv # loop over query images for img_query, img_list_map in pairs.items(): if img_query in img_skip_list: continue else: # recompute the pairs map_pairs = get_pairfile_from_img_list(img_list_map) query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map) with open(sub_kapture_pairsfile_path, 'w') as fid: logger.info(f'geometric verification of {img_query}') table_to_file(fid, map_pairs) table_to_file(fid, query_pairs) pairs_all = map_pairs + query_pairs pairs_all = [(i, j) for i, j, _ in pairs_all] if all(pair in kdata_map_gv.matches[keypoints_type] for pair in pairs_all): continue # create a sub kapture in order to minimize the amount of data exported to colmap # kdata_sub needs to be re-created to add the new matches kdata_sub = sub_kapture_from_img_list(kdata_map, img_list_map + [img_query], pairs_all, keypoints_type, descriptors_type) kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map + [img_query], pairs_all, keypoints_type, descriptors_type) # run colmap gv on missing pairs run_colmap_gv_from_loaded_data(kdata_sub, kdata_sub_gv, map_plus_query_path, map_plus_query_gv_path, tar_handlers_map, tar_handlers_map_gv, colmap_binary, keypoints_type, [], True) # update kdata_map_gv.matches kdata_map_gv.matches[keypoints_type].update(kdata_sub_gv.matches[keypoints_type]) # if kdata_map_gv have matches in tar, they need to be switched to read mode matches_gv_handler = retrieve_tar_handler_from_collection(kapture.Matches, keypoints_type, tar_handlers_map_gv) if matches_gv_handler is not None: print(matches_gv_handler) matches_gv_handler.close() tarfile_path = get_feature_tar_fullpath(kapture.Matches, keypoints_type, map_plus_query_gv_path) tar_handlers_map_gv.matches[keypoints_type] = TarHandler(tarfile_path, 'r') # loop over query images for img_query, img_list_map in pairs.items(): if img_query in img_skip_list: continue else: map_pairs = get_pairfile_from_img_list(img_list_map) with open(sub_kapture_pairsfile_path, 'w') as fid: logger.info(f'mapping and localization for {img_query}') table_to_file(fid, map_pairs) map_pairs = [(i, j) for i, j, _ in map_pairs] kdata_sub_gv = sub_kapture_from_img_list(kdata_map_gv, img_list_map, map_pairs, keypoints_type, descriptors_type) # sanity check if len(map_pairs) != len(kdata_sub_gv.matches[keypoints_type]): logger.info(f'not all mapping matches available') # build COLMAP map try: colmap_build_map_from_loaded_data( kdata_sub_gv, map_plus_query_gv_path, tar_handlers_map_gv, kdata_sub_colmap_path, colmap_binary, keypoints_type, False, [], ['model_converter'], True) except ValueError: logger.info(f'{img_query} was not localized') continue if not os.path.exists(os.path.join(kdata_sub_colmap_path, 'reconstruction/images.bin')): logger.info(f'colmap mapping for {img_query} did not work, image was not localized') continue query_pairs = get_pairfile_img_vs_img_list(img_query, img_list_map) with open(sub_kapture_pairsfile_path, 'w') as fid: table_to_file(fid, query_pairs) query_pairs = [(i, j) for i, j, _ in query_pairs] query_img_kapture_gv = add_image_to_kapture(kdata_map_gv, kdata_sub_gv, img_query, query_pairs, keypoints_type, descriptors_type) # sanity check if len(query_pairs) != len(query_img_kapture_gv.matches[keypoints_type]): logger.info(f'not all query matches available') # localize in COLMAP map try: colmap_localize_from_loaded_data( query_img_kapture_gv, map_plus_query_gv_path, tar_handlers_map_gv, os.path.join(kdata_sub_colmap_path, 'registered'), os.path.join(kdata_sub_colmap_path, 'colmap.db'), os.path.join(kdata_sub_colmap_path, 'reconstruction'), colmap_binary, keypoints_type, False, ['--Mapper.ba_refine_focal_length', '0', '--Mapper.ba_refine_principal_point', '0', '--Mapper.ba_refine_extra_params', '0', '--Mapper.min_num_matches', '4', '--Mapper.init_min_num_inliers', '4', '--Mapper.abs_pose_min_num_inliers', '4', '--Mapper.abs_pose_min_inlier_ratio', '0.05', '--Mapper.ba_local_max_num_iterations', '50', '--Mapper.abs_pose_max_error', '20', '--Mapper.filter_max_reproj_error', '12'], [], True) except ValueError: logger.info(f'{img_query} was not localized') continue if not os.path.exists(os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'reconstruction/images.txt')): logger.info(f'colmap localization of {img_query} did not work, image was not localized') continue # add to results kapture kdata_reg_query = import_colmap( kdata_reg_query_path, os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'colmap.db'), os.path.join(os.path.join(kdata_sub_colmap_path, 'registered'), 'reconstruction'), None, None, True, True, True, TransferAction.skip) if add_pose_to_query_kapture(kdata_reg_query, kdata_query, img_query): logger.info('successfully localized') # write results (after each image to see the progress) kapture_to_dir(output_path, kdata_query) # clean up (e.g. remove temporal files and folders) safe_remove_any_path(kdata_sub_colmap_path, True) safe_remove_any_path(kdata_reg_query_path, True) safe_remove_file(sub_kapture_pairsfile_path, True) logger.info('all done')
def _import_features_and_matches(opensfm_root_dir, kapture_root_dir, disable_tqdm)\ -> Tuple[kapture.Descriptors, kapture.Keypoints, kapture.Matches]: # import features (keypoints + descriptors) kapture_keypoints = None # kapture.Keypoints(type_name='opensfm', dsize=4, dtype=np.float64) kapture_descriptors = None # kapture.Descriptors(type_name='opensfm', dsize=128, dtype=np.uint8) opensfm_features_dir_path = path.join(opensfm_root_dir, 'features') opensfm_features_suffix = '.features.npz' if path.isdir(opensfm_features_dir_path): logger.info('importing keypoints and descriptors ...') opensfm_features_file_list = (path.join( dp, fn) for dp, _, fs in os.walk(opensfm_features_dir_path) for fn in fs) opensfm_features_file_list = ( filepath for filepath in opensfm_features_file_list if filepath.endswith(opensfm_features_suffix)) for opensfm_feature_filename in tqdm(opensfm_features_file_list, disable=disable_tqdm): image_filename = path.relpath( opensfm_feature_filename, opensfm_features_dir_path)[:-len(opensfm_features_suffix)] opensfm_image_features = np.load(opensfm_feature_filename) opensfm_image_keypoints = opensfm_image_features['points'] opensfm_image_descriptors = opensfm_image_features['descriptors'] logger.debug( f'parsing keypoints and descriptors in {opensfm_feature_filename}' ) if kapture_keypoints is None: # print(type(opensfm_image_keypoints.dtype)) # HAHOG = Hessian Affine feature point detector + HOG descriptor kapture_keypoints = kapture.Keypoints( type_name='HessianAffine', dsize=opensfm_image_keypoints.shape[1], dtype=opensfm_image_keypoints.dtype) if kapture_descriptors is None: kapture_descriptors = kapture.Descriptors( type_name='HOG', dsize=opensfm_image_descriptors.shape[1], dtype=opensfm_image_descriptors.dtype) # convert keypoints file keypoint_file_path = kapture.io.features.get_features_fullpath( data_type=kapture.Keypoints, kapture_dirpath=kapture_root_dir, image_filename=image_filename) kapture.io.features.image_keypoints_to_file( filepath=keypoint_file_path, image_keypoints=opensfm_image_keypoints) # register the file kapture_keypoints.add(image_filename) # convert descriptors file descriptor_file_path = kapture.io.features.get_features_fullpath( data_type=kapture.Descriptors, kapture_dirpath=kapture_root_dir, image_filename=image_filename) kapture.io.features.image_descriptors_to_file( filepath=descriptor_file_path, image_descriptors=opensfm_image_descriptors) # register the file kapture_descriptors.add(image_filename) # import matches kapture_matches = kapture.Matches() opensfm_matches_suffix = '_matches.pkl.gz' opensfm_matches_dir_path = path.join(opensfm_root_dir, 'matches') if path.isdir(opensfm_matches_dir_path): logger.info('importing matches ...') opensfm_matches_file_list = (path.join( dp, fn) for dp, _, fs in os.walk(opensfm_matches_dir_path) for fn in fs) opensfm_matches_file_list = ( filepath for filepath in opensfm_matches_file_list if filepath.endswith(opensfm_matches_suffix)) for opensfm_matches_filename in tqdm(opensfm_matches_file_list, disable=disable_tqdm): image_filename_1 = path.relpath( opensfm_matches_filename, opensfm_matches_dir_path)[:-len(opensfm_matches_suffix)] logger.debug(f'parsing matches in {image_filename_1}') with gzip.open(opensfm_matches_filename, 'rb') as f: opensfm_matches = pickle.load(f) for image_filename_2, opensfm_image_matches in opensfm_matches.items( ): image_pair = (image_filename_1, image_filename_2) # register the pair to kapture kapture_matches.add(*image_pair) # convert the bin file to kapture kapture_matches_filepath = kapture.io.features.get_matches_fullpath( image_filename_pair=image_pair, kapture_dirpath=kapture_root_dir) kapture_image_matches = np.hstack([ opensfm_image_matches.astype(np.float64), # no matches scoring = assume all to one np.ones(shape=(opensfm_image_matches.shape[0], 1), dtype=np.float64) ]) kapture.io.features.image_matches_to_file( kapture_matches_filepath, kapture_image_matches) return kapture_descriptors, kapture_keypoints, kapture_matches
def compute_matches_from_loaded_data(input_path: str, tar_handlers: Optional[TarCollection], kdata: kapture.Kapture, descriptors_type: Optional[str], image_pairs: list, overwrite_existing: bool = False): assert kdata.sensors is not None assert kdata.records_camera is not None assert kdata.descriptors is not None os.umask(0o002) if descriptors_type is None: descriptors_type = try_get_only_key_from_collection(kdata.descriptors) assert descriptors_type is not None assert descriptors_type in kdata.descriptors keypoints_type = kdata.descriptors[descriptors_type].keypoints_type # assert kdata.descriptors[descriptors_type].metric_type == "L2" matcher = MatchPairNnTorch(use_cuda=torch.cuda.is_available()) new_matches = kapture.Matches() logger.info('compute_matches. entering main loop...') hide_progress_bar = logger.getEffectiveLevel() > logging.INFO skip_count = 0 for image_path1, image_path2 in tqdm(image_pairs, disable=hide_progress_bar): if image_path1 == image_path2: continue if image_path1 > image_path2: image_path1, image_path2 = image_path2, image_path1 # skip existing matches if (not overwrite_existing) \ and (kdata.matches is not None) \ and keypoints_type in kdata.matches \ and ((image_path1, image_path2) in kdata.matches[keypoints_type]): new_matches.add(image_path1, image_path2) skip_count += 1 continue if image_path1 not in kdata.descriptors[descriptors_type] \ or image_path2 not in kdata.descriptors[descriptors_type]: logger.warning('unable to find descriptors for image pair : ' '\n\t{} \n\t{}'.format(image_path1, image_path2)) continue descriptor1 = load_descriptors( descriptors_type, input_path, tar_handlers, image_path1, kdata.descriptors[descriptors_type].dtype, kdata.descriptors[descriptors_type].dsize) descriptor2 = load_descriptors( descriptors_type, input_path, tar_handlers, image_path2, kdata.descriptors[descriptors_type].dtype, kdata.descriptors[descriptors_type].dsize) matches = matcher.match_descriptors(descriptor1, descriptor2) matches_path = get_matches_fullpath((image_path1, image_path2), keypoints_type, input_path, tar_handlers) image_matches_to_file(matches_path, matches) new_matches.add(image_path1, image_path2) if not overwrite_existing: logger.debug( f'{skip_count} pairs were skipped because the match file already existed' ) if not matches_check_dir(new_matches, keypoints_type, input_path, tar_handlers): logger.critical( 'matching ended successfully but not all files were saved') # update kapture matches if kdata.matches is None: kdata.matches = {} if keypoints_type not in kdata.matches: kdata.matches[keypoints_type] = kapture.Matches() kdata.matches[keypoints_type].update(new_matches) logger.info('all done')
def get_matches_from_database(database: COLMAPDatabase, images: kapture.RecordsCamera, kapture_dirpath: str, no_geometric_filtering: bool) -> kapture.Matches: """ Writes Matches files and return the list in kapture format from the colmap database. :param database: input colmap database. :param images: input list of images (as RecordsCamera). :param kapture_dirpath: input root path to kapture. :param no_geometric_filtering: only retrieve matches with geometric consistency. :return: kapture matches """ kapture_matches = kapture.Matches() # check there is geometric matches available matches_table_name = 'matches' if not no_geometric_filtering: if not exists_table('two_view_geometries', database): logger.warning( "No table: two_view_geometries: skipping geometric filtering") else: request = database.execute( 'SELECT COUNT (*) FROM two_view_geometries') nb_verified_matches = next(request)[0] if nb_verified_matches > 0: request = database.execute('SELECT COUNT (*) FROM matches') nb_total_matches = next(request)[0] logger.info('keeps {}% of verified matches ({}/{}) ...'.format( nb_verified_matches / nb_total_matches * 100, nb_verified_matches, nb_total_matches)) matches_table_name = 'two_view_geometries' colmap_matches = [(pair_id_to_image_ids(pair_id), blob_to_array(data, np.uint32, (rows, cols))) if rows > 0 else (pair_id_to_image_ids(pair_id), np.empty((rows, cols), dtype=np.uint32)) for pair_id, rows, cols, data in database.execute( 'SELECT pair_id, rows, cols, data FROM {}'.format( matches_table_name))] logger.debug('matches: {}'.format(len(colmap_matches))) hide_progressbar = logger.getEffectiveLevel() > logging.INFO for (image_id1, image_id2), image_matches in tqdm(colmap_matches, disable=hide_progressbar): if image_id1 not in images or image_id2 not in images: logger.critical('inconsistent image ID {} or {}'.format( image_id1, image_id2)) continue filename1 = next((v for v in images[image_id1].values()), None) filename2 = next((v for v in images[image_id2].values()), None) assert filename1 and filename2 if (filename1, filename2) != kapture.Matches.lexical_order( filename1, filename2): # have to swap matches (keypoint image1, keypoint image2) become (keypoint image2, keypoint image1) image_matches = image_matches[:, ::-1] filename1, filename2 = kapture.Matches.lexical_order( filename1, filename2) # actually write the file # convert colmap image matches into kapture (cast to float and add a score column) image_matches = image_matches.astype(np.float) image_matches = np.hstack( [image_matches, np.zeros((image_matches.shape[0], 1))]) image_matches_filepath = kapture.io.features.get_matches_fullpath( (filename1, filename2), kapture_dirpath) kapture.io.features.image_matches_to_file(image_matches_filepath, image_matches) # register the matching in kapture kapture_matches.add(filename1, filename2) return kapture_matches
def run_colmap_gv_from_loaded_data(kapture_none_matches: kapture.Kapture, kapture_colmap_matches: kapture.Kapture, kapture_none_matches_dirpath: str, kapture_colmap_matches_dirpath: str, tar_handlers_none_matches: Optional[TarCollection], tar_handlers_colmap_matches: Optional[TarCollection], colmap_binary: str, keypoints_type: Optional[str], skip_list: List[str], force: bool): logger.info('run_colmap_gv...') if not (kapture_none_matches.records_camera and kapture_none_matches.sensors and kapture_none_matches.keypoints and kapture_none_matches.matches): raise ValueError('records_camera, sensors, keypoints, matches are mandatory') # COLMAP does not fully support rigs. if kapture_none_matches.rigs is not None and kapture_none_matches.trajectories is not None: # make sure, rigs are not used in trajectories. logger.info('remove rigs notation.') rigs_remove_inplace(kapture_none_matches.trajectories, kapture_none_matches.rigs) # Set fixed name for COLMAP database colmap_db_path = os.path.join(kapture_colmap_matches_dirpath, 'colmap.db') if 'delete_existing' not in skip_list: safe_remove_file(colmap_db_path, force) if keypoints_type is None: keypoints_type = try_get_only_key_from_collection(kapture_none_matches.matches) assert keypoints_type is not None assert keypoints_type in kapture_none_matches.keypoints assert keypoints_type in kapture_none_matches.matches if 'matches_importer' not in skip_list: logger.debug('compute matches difference.') if kapture_colmap_matches.matches is not None and keypoints_type in kapture_colmap_matches.matches: colmap_matches = kapture_colmap_matches.matches[keypoints_type] else: colmap_matches = kapture.Matches() matches_to_verify = kapture.Matches(kapture_none_matches.matches[keypoints_type].difference(colmap_matches)) kapture_data_to_export = kapture.Kapture(sensors=kapture_none_matches.sensors, trajectories=kapture_none_matches.trajectories, records_camera=kapture_none_matches.records_camera, keypoints={ keypoints_type: kapture_none_matches.keypoints[keypoints_type] }, matches={ keypoints_type: matches_to_verify }) # creates a new database with matches logger.debug('export matches difference to db.') colmap_db = COLMAPDatabase.connect(colmap_db_path) database_extra.kapture_to_colmap(kapture_data_to_export, kapture_none_matches_dirpath, tar_handlers_none_matches, colmap_db, keypoints_type, None, export_two_view_geometry=False) # close db before running colmap processes in order to avoid locks colmap_db.close() logger.debug('run matches_importer command.') colmap_lib.run_matches_importer_from_kapture_matches( colmap_binary, colmap_use_cpu=True, colmap_gpu_index=None, colmap_db_path=colmap_db_path, kapture_matches=matches_to_verify, force=force ) if 'import' not in skip_list: logger.debug('import verified matches.') os.umask(0o002) colmap_db = COLMAPDatabase.connect(colmap_db_path) kapture_data = kapture.Kapture() kapture_data.records_camera, _ = get_images_and_trajectories_from_database(colmap_db) kapture_data.matches = { keypoints_type: get_matches_from_database(colmap_db, kapture_data.records_camera, kapture_colmap_matches_dirpath, tar_handlers_colmap_matches, keypoints_type, no_geometric_filtering=False) } colmap_db.close() if kapture_colmap_matches.matches is None: kapture_colmap_matches.matches = {} if keypoints_type not in kapture_colmap_matches.matches: kapture_colmap_matches.matches[keypoints_type] = kapture.Matches() kapture_colmap_matches.matches[keypoints_type].update(kapture_data.matches[keypoints_type]) if 'delete_db' not in skip_list: logger.debug('delete intermediate colmap db.') os.remove(colmap_db_path)
def colmap_localize_from_loaded_data(kapture_data: kapture.Kapture, kapture_path: str, tar_handlers: Optional[TarCollection], colmap_path: str, input_database_path: str, input_reconstruction_path: str, colmap_binary: str, keypoints_type: Optional[str], use_colmap_matches_importer: bool, image_registrator_options: List[str], skip_list: List[str], force: bool) -> None: """ Localize images on a colmap model with the kapture data. :param kapture_data: kapture data to use :param kapture_path: path to the kapture to use :param tar_handler: collection of preloaded tar archives :param colmap_path: path to the colmap build :param input_database_path: path to the map colmap.db :param input_database_path: path to the map colmap.db :param input_reconstruction_path: path to the map reconstruction folder :param colmap_binary: path to the colmap binary executable :param keypoints_type: type of keypoints, name of the keypoints subfolder :param use_colmap_matches_importer: bool, :param image_registrator_options: options for the image registrator :param skip_list: list of steps to skip :param force: Silently overwrite kapture files if already exists. """ os.makedirs(colmap_path, exist_ok=True) if not (kapture_data.records_camera and kapture_data.sensors and kapture_data.keypoints and kapture_data.matches): raise ValueError('records_camera, sensors, keypoints, matches are mandatory') if kapture_data.trajectories: logger.warning("Input data contains trajectories: they will be ignored") kapture_data.trajectories.clear() else: kapture_data.trajectories = kapture.Trajectories() # COLMAP does not fully support rigs. if kapture_data.rigs is not None and kapture_data.trajectories is not None: # make sure, rigs are not used in trajectories. logger.info('remove rigs notation.') rigs_remove_inplace(kapture_data.trajectories, kapture_data.rigs) kapture_data.rigs.clear() # Prepare output # Set fixed name for COLMAP database colmap_db_path = path.join(colmap_path, 'colmap.db') image_list_path = path.join(colmap_path, 'images.list') reconstruction_path = path.join(colmap_path, "reconstruction") if 'delete_existing' not in skip_list: safe_remove_file(colmap_db_path, force) safe_remove_file(image_list_path, force) safe_remove_any_path(reconstruction_path, force) os.makedirs(reconstruction_path, exist_ok=True) # Copy colmap db to output if not os.path.exists(colmap_db_path): shutil.copy(input_database_path, colmap_db_path) # find correspondences between the colmap db and the kapture data images_all = {image_path: (ts, cam_id) for ts, shot in kapture_data.records_camera.items() for cam_id, image_path in shot.items()} colmap_db = COLMAPDatabase.connect(colmap_db_path) colmap_image_ids = database_extra.get_colmap_image_ids_from_db(colmap_db) colmap_images = database_extra.get_images_from_database(colmap_db) colmap_db.close() # dict ( kapture_camera -> colmap_camera_id ) colmap_camera_ids = {images_all[image_path][1]: colmap_cam_id for image_path, colmap_cam_id in colmap_images if image_path in images_all} images_to_add = {image_path: value for image_path, value in images_all.items() if image_path not in colmap_image_ids} flatten_images_to_add = [(ts, kapture_cam_id, image_path) for image_path, (ts, kapture_cam_id) in images_to_add.items()] if 'import_to_db' not in skip_list: logger.info("Step 1: Add precomputed keypoints and matches to colmap db") if keypoints_type is None: keypoints_type = try_get_only_key_from_collection(kapture_data.keypoints) assert keypoints_type is not None assert keypoints_type in kapture_data.keypoints assert keypoints_type in kapture_data.matches cameras_to_add = kapture.Sensors() for _, (_, kapture_cam_id) in images_to_add.items(): if kapture_cam_id not in colmap_camera_ids: kapture_cam = kapture_data.sensors[kapture_cam_id] cameras_to_add[kapture_cam_id] = kapture_cam colmap_db = COLMAPDatabase.connect(colmap_db_path) colmap_added_camera_ids = database_extra.add_cameras_to_database(cameras_to_add, colmap_db) colmap_camera_ids.update(colmap_added_camera_ids) colmap_added_image_ids = database_extra.add_images_to_database_from_flatten( colmap_db, flatten_images_to_add, kapture_data.trajectories, colmap_camera_ids) colmap_image_ids.update(colmap_added_image_ids) colmap_image_ids_reversed = {v: k for k, v in colmap_image_ids.items()} # colmap_id : name # add new features colmap_keypoints = database_extra.get_keypoints_set_from_database(colmap_db, colmap_image_ids_reversed) keypoints_all = kapture_data.keypoints[keypoints_type] keypoints_to_add = {name for name in keypoints_all if name not in colmap_keypoints} keypoints_to_add = kapture.Keypoints(keypoints_all.type_name, keypoints_all.dtype, keypoints_all.dsize, keypoints_to_add) database_extra.add_keypoints_to_database(colmap_db, keypoints_to_add, keypoints_type, kapture_path, tar_handlers, colmap_image_ids) # add new matches colmap_matches = kapture.Matches(database_extra.get_matches_set_from_database(colmap_db, colmap_image_ids_reversed)) colmap_matches.normalize() matches_all = kapture_data.matches[keypoints_type] matches_to_add = kapture.Matches({pair for pair in matches_all if pair not in colmap_matches}) # print(list(matches_to_add)) database_extra.add_matches_to_database(colmap_db, matches_to_add, keypoints_type, kapture_path, tar_handlers, colmap_image_ids, export_two_view_geometry=not use_colmap_matches_importer) colmap_db.close() if use_colmap_matches_importer: logger.info('Step 2: Run geometric verification') logger.debug('running colmap matches_importer...') if keypoints_type is None: keypoints_type = try_get_only_key_from_collection(kapture_data.matches) assert keypoints_type is not None assert keypoints_type in kapture_data.matches # compute two view geometry colmap_lib.run_matches_importer_from_kapture_matches( colmap_binary, colmap_use_cpu=True, colmap_gpu_index=None, colmap_db_path=colmap_db_path, kapture_matches=kapture_data.matches[keypoints_type], force=force) else: logger.info('Step 2: Run geometric verification - skipped') if 'image_registrator' not in skip_list: logger.info("Step 3: Run image_registrator") # run image_registrator colmap_lib.run_image_registrator( colmap_binary, colmap_db_path, input_reconstruction_path, reconstruction_path, image_registrator_options ) # run model_converter if 'model_converter' not in skip_list: logger.info("Step 4: Export reconstruction results to txt") colmap_lib.run_model_converter( colmap_binary, reconstruction_path, reconstruction_path )
def import_opensfm( opensfm_rootdir: str, kapture_rootdir: str, force_overwrite_existing: bool = False, images_import_method: TransferAction = TransferAction.copy) -> None: disable_tqdm = logger.getEffectiveLevel() != logging.INFO # load reconstruction opensfm_reconstruction_filepath = path.join(opensfm_rootdir, 'reconstruction.json') with open(opensfm_reconstruction_filepath, 'rt') as f: opensfm_reconstruction = json.load(f) # remove the single list @ root opensfm_reconstruction = opensfm_reconstruction[0] # prepare space for output os.makedirs(kapture_rootdir, exist_ok=True) delete_existing_kapture_files(kapture_rootdir, force_erase=force_overwrite_existing) # import cameras kapture_sensors = kapture.Sensors() assert 'cameras' in opensfm_reconstruction # import cameras for osfm_camera_id, osfm_camera in opensfm_reconstruction['cameras'].items( ): camera = import_camera(osfm_camera, name=osfm_camera_id) kapture_sensors[osfm_camera_id] = camera # import shots logger.info('importing images and trajectories ...') kapture_images = kapture.RecordsCamera() kapture_trajectories = kapture.Trajectories() opensfm_image_dirpath = path.join(opensfm_rootdir, 'images') assert 'shots' in opensfm_reconstruction image_timestamps, image_sensors = {}, { } # used later to retrieve the timestamp of an image. for timestamp, (image_filename, shot) in enumerate( opensfm_reconstruction['shots'].items()): sensor_id = shot['camera'] image_timestamps[image_filename] = timestamp image_sensors[image_filename] = sensor_id # in OpenSfm, (sensor, timestamp) is not unique. rotation_vector = shot['rotation'] q = quaternion.from_rotation_vector(rotation_vector) translation = shot['translation'] # capture_time = shot['capture_time'] # may be invalid # gps_position = shot['gps_position'] kapture_images[timestamp, sensor_id] = image_filename kapture_trajectories[timestamp, sensor_id] = kapture.PoseTransform(r=q, t=translation) # copy image files filename_list = [f for _, _, f in kapture.flatten(kapture_images)] import_record_data_from_dir_auto( source_record_dirpath=opensfm_image_dirpath, destination_kapture_dirpath=kapture_rootdir, filename_list=filename_list, copy_strategy=images_import_method) # gps from pre-extracted exif, in exif/image_name.jpg.exif kapture_gnss = None opensfm_exif_dirpath = path.join(opensfm_rootdir, 'exif') opensfm_exif_suffix = '.exif' if path.isdir(opensfm_exif_dirpath): logger.info('importing GNSS from exif ...') camera_ids = set(image_sensors.values()) # add a gps sensor for each camera map_cam_to_gnss_sensor = { cam_id: 'GPS_' + cam_id for cam_id in camera_ids } for gnss_id in map_cam_to_gnss_sensor.values(): kapture_sensors[gnss_id] = kapture.Sensor( sensor_type='gnss', sensor_params=['EPSG:4326']) # build epsg_code for all cameras kapture_gnss = kapture.RecordsGnss() opensfm_exif_filepath_list = ( path.join(dirpath, filename) for dirpath, _, filename_list in os.walk(opensfm_exif_dirpath) for filename in filename_list if filename.endswith(opensfm_exif_suffix)) for opensfm_exif_filepath in tqdm(opensfm_exif_filepath_list, disable=disable_tqdm): image_filename = path.relpath( opensfm_exif_filepath, opensfm_exif_dirpath)[:-len(opensfm_exif_suffix)] image_timestamp = image_timestamps[image_filename] image_sensor_id = image_sensors[image_filename] gnss_timestamp = image_timestamp gnss_sensor_id = map_cam_to_gnss_sensor[image_sensor_id] with open(opensfm_exif_filepath, 'rt') as f: js_root = json.load(f) if 'gps' not in js_root: logger.warning(f'NO GPS data in "{opensfm_exif_filepath}"') continue gps_coords = { 'x': js_root['gps']['longitude'], 'y': js_root['gps']['latitude'], 'z': js_root['gps'].get('altitude', 0.0), 'dop': js_root['gps'].get('dop', 0), 'utc': 0, } logger.debug( f'found GPS data for ({gnss_timestamp}, {gnss_sensor_id}) in "{opensfm_exif_filepath}"' ) kapture_gnss[gnss_timestamp, gnss_sensor_id] = kapture.RecordGnss(**gps_coords) # import features (keypoints + descriptors) kapture_keypoints = None # kapture.Keypoints(type_name='opensfm', dsize=4, dtype=np.float64) kapture_descriptors = None # kapture.Descriptors(type_name='opensfm', dsize=128, dtype=np.uint8) opensfm_features_dirpath = path.join(opensfm_rootdir, 'features') opensfm_features_suffix = '.features.npz' if path.isdir(opensfm_features_dirpath): logger.info('importing keypoints and descriptors ...') opensfm_features_file_list = (path.join( dp, fn) for dp, _, fs in os.walk(opensfm_features_dirpath) for fn in fs) opensfm_features_file_list = ( filepath for filepath in opensfm_features_file_list if filepath.endswith(opensfm_features_suffix)) for opensfm_feature_filename in tqdm(opensfm_features_file_list, disable=disable_tqdm): image_filename = path.relpath( opensfm_feature_filename, opensfm_features_dirpath)[:-len(opensfm_features_suffix)] opensfm_image_features = np.load(opensfm_feature_filename) opensfm_image_keypoints = opensfm_image_features['points'] opensfm_image_descriptors = opensfm_image_features['descriptors'] logger.debug( f'parsing keypoints and descriptors in {opensfm_feature_filename}' ) if kapture_keypoints is None: # print(type(opensfm_image_keypoints.dtype)) # HAHOG = Hessian Affine feature point detector + HOG descriptor kapture_keypoints = kapture.Keypoints( type_name='HessianAffine', dsize=opensfm_image_keypoints.shape[1], dtype=opensfm_image_keypoints.dtype) if kapture_descriptors is None: kapture_descriptors = kapture.Descriptors( type_name='HOG', dsize=opensfm_image_descriptors.shape[1], dtype=opensfm_image_descriptors.dtype) # convert keypoints file keypoint_filpath = kapture.io.features.get_features_fullpath( data_type=kapture.Keypoints, kapture_dirpath=kapture_rootdir, image_filename=image_filename) kapture.io.features.image_keypoints_to_file( filepath=keypoint_filpath, image_keypoints=opensfm_image_keypoints) # register the file kapture_keypoints.add(image_filename) # convert descriptors file descriptor_filpath = kapture.io.features.get_features_fullpath( data_type=kapture.Descriptors, kapture_dirpath=kapture_rootdir, image_filename=image_filename) kapture.io.features.image_descriptors_to_file( filepath=descriptor_filpath, image_descriptors=opensfm_image_descriptors) # register the file kapture_descriptors.add(image_filename) # import matches kapture_matches = kapture.Matches() opensfm_matches_suffix = '_matches.pkl.gz' opensfm_matches_dirpath = path.join(opensfm_rootdir, 'matches') if path.isdir(opensfm_matches_dirpath): logger.info('importing matches ...') opensfm_matches_file_list = (path.join( dp, fn) for dp, _, fs in os.walk(opensfm_matches_dirpath) for fn in fs) opensfm_matches_file_list = ( filepath for filepath in opensfm_matches_file_list if filepath.endswith(opensfm_matches_suffix)) for opensfm_matches_filename in tqdm(opensfm_matches_file_list, disable=disable_tqdm): image_filename_1 = path.relpath( opensfm_matches_filename, opensfm_matches_dirpath)[:-len(opensfm_matches_suffix)] logger.debug(f'parsing mathes in {image_filename_1}') with gzip.open(opensfm_matches_filename, 'rb') as f: opensfm_matches = pickle.load(f) for image_filename_2, opensfm_image_matches in opensfm_matches.items( ): image_pair = (image_filename_1, image_filename_2) # register the pair to kapture kapture_matches.add(*image_pair) # convert the bin file to kapture kapture_matches_filepath = kapture.io.features.get_matches_fullpath( image_filename_pair=image_pair, kapture_dirpath=kapture_rootdir) kapture_image_matches = np.hstack([ opensfm_image_matches.astype(np.float64), # no macthes scoring = assume all to one np.ones(shape=(opensfm_image_matches.shape[0], 1), dtype=np.float64) ]) kapture.io.features.image_matches_to_file( kapture_matches_filepath, kapture_image_matches) # import 3-D points if 'points' in opensfm_reconstruction: logger.info('importing points 3-D') opensfm_points = opensfm_reconstruction['points'] points_data = [] for point_id in sorted(opensfm_points): point_data = opensfm_points[point_id] point_data = point_data['coordinates'] + point_data['color'] points_data.append(point_data) kapture_points = kapture.Points3d(points_data) else: kapture_points = None # saving kapture csv files logger.info('saving kapture files') kapture_data = kapture.Kapture(sensors=kapture_sensors, records_camera=kapture_images, records_gnss=kapture_gnss, trajectories=kapture_trajectories, keypoints=kapture_keypoints, descriptors=kapture_descriptors, matches=kapture_matches, points3d=kapture_points) kapture.io.csv.kapture_to_dir(dirpath=kapture_rootdir, kapture_data=kapture_data)