def main(cfg): '''Main function to compute model. Parameters ---------- cfg: Namespace Configurations for running this part of the code. ''' if os.path.exists(get_geom_file(cfg)): print(' -- already exists, skipping model computation') return # Get data directory keypoints_dict = load_h5(get_kp_file(cfg)) # Load keypoints and matches matches_dict = load_h5(get_filter_match_file_for_computing_model(cfg)) # Feature Matching print('Computing model') num_cores = cfg.num_opencv_threads if cfg.num_opencv_threads > 0 else int( len(os.sched_getaffinity(0)) * 0.9) # Load camera information data_dir = get_data_path(cfg) images_list = get_fullpath_list(data_dir, 'images') image_names = get_item_name_list(images_list) calib_list = get_fullpath_list(data_dir, 'calibration') calib_dict = load_calib(calib_list) pairs_per_th = get_pairs_per_threshold(data_dir) # Get data directory try: desc_dict = defaultdict(list) desc_dict = load_h5(get_desc_file(cfg)) for k, v in desc_dict.items(): desc_dict[k] = v except Exception: desc_dict = defaultdict(list) try: aff_dict = defaultdict(list) aff_dict1 = load_h5(get_affine_file(cfg)) for k, v in aff_dict1.items(): aff_dict[k] = v except Exception: aff_dict = defaultdict(list) try: ori_dict = defaultdict(list) ori_dict1 = load_h5(get_angle_file(cfg)) for k, v in ori_dict1.items(): ori_dict[k] = v except Exception: ori_dict = defaultdict(list) try: scale_dict = defaultdict(list) scale_dict1 = load_h5(get_scale_file(cfg)) for k, v in scale_dict1.items(): scale_dict[k] = v except Exception: scale_dict = defaultdict(list) random.shuffle(pairs_per_th['0.0']) result = Parallel(n_jobs=num_cores)(delayed(compute_model)( cfg, np.asarray(matches_dict[pair]), np.asarray(keypoints_dict[pair.split('-')[0]]), np.asarray(keypoints_dict[pair.split('-')[1]]), calib_dict[pair.split( '-')[0]], calib_dict[pair.split('-')[1]], images_list[ image_names.index(pair.split('-')[0])], images_list[ image_names.index(pair.split('-')[1])], np.asarray(scale_dict[pair.split('-')[0]]), np.asarray(scale_dict[pair.split('-')[1]]), np.asarray(ori_dict[pair.split('-')[0]]), np.asarray(ori_dict[pair.split('-')[1]]), np.asarray(aff_dict[pair.split('-')[0]]), np.asarray(aff_dict[pair.split('-')[1]]), np.asarray(desc_dict[pair.split('-')[0]]), np.asarray(desc_dict[pair.split('-')[1]])) for pair in tqdm(pairs_per_th['0.0'])) # Make model dictionary model_dict = {} inl_dict = {} timings_list = [] for i, pair in enumerate(pairs_per_th['0.0']): model_dict[pair] = result[i][0] inl_dict[pair] = result[i][1] timings_list.append(result[i][2]) # Check model directory if not os.path.exists(get_geom_path(cfg)): os.makedirs(get_geom_path(cfg)) # Finally save packed models save_h5(model_dict, get_geom_file(cfg)) save_h5(inl_dict, get_geom_inl_file(cfg)) # Save computational cost save_h5({'cost': np.mean(timings_list)}, get_geom_cost_file(cfg)) print('Geometry cost (averaged over image pairs): {:0.2f} sec'.format( np.mean(timings_list)))
def main(cfg): '''Main function to compute features. Parameters ---------- cfg: Namespace Configuration ''' if os.path.exists(get_kp_file(cfg)) and os.path.exists(get_desc_file(cfg)): print(' -- already exists, skipping feature extraction') return # Get data directory data_dir = get_data_path(cfg) # Get list of all images and visibility files in the 'set_100' images_list = get_fullpath_list(data_dir, 'images') # Also create a list which only contains the image names, so that it can be # used as keys in the dictionary later image_names = get_item_name_list(images_list) # Create folder save_dir = get_feature_path(cfg) if not os.path.exists(save_dir): os.makedirs(save_dir) # Compute and save keypoints and descriptors # # Parallel processing actually slows down stuff, because opencv is already # using multiple threads. We just simply go through one by one without # parallel processing for now print('Extracting Keypoints and Descriptors:') result = [] for img_path in tqdm(images_list): result.append(compute_per_img_file(img_path, cfg)) # num_cores = int(multiprocessing.cpu_count() * 0.9) # print('Extracting Keypoints and Descriptors:') # result = Parallel(n_jobs=num_cores)(delayed(compute_per_img_file)( # img_path, cfg) for img_path in tqdm(images_list)) # Save keypoints and descriptors kp_dict = {} scale_dict = {} angle_dict = {} score_dict = {} descs_dict = {} affine_dict = {} for _i in range(len(image_names)): assert 'kp' in result[_i], 'Must provide keypoints' assert 'descs' in result[_i], 'Must provide descriptors' if 'kp' in result[_i]: kp_dict[image_names[_i]] = result[_i]['kp'] if 'scale' in result[_i]: scale_dict[image_names[_i]] = result[_i]['scale'] if 'angle' in result[_i]: angle_dict[image_names[_i]] = result[_i]['angle'] if 'affine' in result[_i]: affine_dict[image_names[_i]] = result[_i]['affine'] if 'score' in result[_i]: score_dict[image_names[_i]] = result[_i]['score'] if 'descs' in result[_i]: descs_dict[image_names[_i]] = result[_i]['descs'] # Finally, save packed keypoints and descriptors save_h5(kp_dict, get_kp_file(cfg)) save_h5(scale_dict, get_scale_file(cfg)) save_h5(angle_dict, get_angle_file(cfg)) save_h5(score_dict, get_score_file(cfg)) save_h5(descs_dict, get_desc_file(cfg)) save_h5(affine_dict, get_affine_file(cfg))