def getMatches(data): from models.model_wrap import PointTracker desc = data['desc'] warped_desc = data['warped_desc'] nn_thresh = 1.2 print("nn threshold: ", nn_thresh) tracker = PointTracker(max_length=2, nn_thresh=nn_thresh) # matches = tracker.nn_match_two_way(desc, warped_desc, nn_) tracker.update(keypoints.T, desc.T) tracker.update(warped_keypoints.T, warped_desc.T) matches = tracker.get_matches().T mscores = tracker.get_mscores().T # mAP # matches = data['matches'] print("matches: ", matches.shape) print("mscores: ", mscores.shape) try: print("mscore max: ", mscores.max(axis=0)) print("mscore min: ", mscores.min(axis=0)) except ValueError: pass return matches, mscores
def export_descriptor(config, output_dir, args): """ # input 2 images, output keypoints and correspondence save prediction: pred: 'image': np(320,240) 'prob' (keypoints): np (N1, 2) 'desc': np (N2, 256) 'warped_image': np(320,240) 'warped_prob' (keypoints): np (N2, 2) 'warped_desc': np (N2, 256) 'homography': np (3,3) 'matches': np [N3, 4] """ from utils.loader import get_save_path from utils.var_dim import squeezeToNumpy # basic settings device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") logging.info("train on device: %s", device) with open(os.path.join(output_dir, "config.yml"), "w") as f: yaml.dump(config, f, default_flow_style=False) writer = SummaryWriter(getWriterPath(task=args.command, date=True)) save_path = get_save_path(output_dir) save_output = save_path / "../predictions" os.makedirs(save_output, exist_ok=True) ## parameters outputMatches = True subpixel = config["model"]["subpixel"]["enable"] patch_size = config["model"]["subpixel"]["patch_size"] # data loading from utils.loader import dataLoader_test as dataLoader task = config["data"]["dataset"] data = dataLoader(config, dataset=task) test_set, test_loader = data["test_set"], data["test_loader"] from utils.print_tool import datasize datasize(test_loader, config, tag="test") # model loading from utils.loader import get_module Val_model_heatmap = get_module("", config["front_end_model"]) ## load pretrained val_agent = Val_model_heatmap(config["model"], device=device) val_agent.loadModel() ## tracker tracker = PointTracker(max_length=2, nn_thresh=val_agent.nn_thresh) ###### check!!! count = 0 for i, sample in tqdm(enumerate(test_loader)): img_0, img_1 = sample["image"], sample["warped_image"] # first image, no matches # img = img_0 def get_pts_desc_from_agent(val_agent, img, device="cpu"): """ pts: list [numpy (3, N)] desc: list [numpy (256, N)] """ heatmap_batch = val_agent.run( img.to(device) ) # heatmap: numpy [batch, 1, H, W] # heatmap to pts pts = val_agent.heatmap_to_pts() # print("pts: ", pts) if subpixel: pts = val_agent.soft_argmax_points(pts, patch_size=patch_size) # heatmap, pts to desc desc_sparse = val_agent.desc_to_sparseDesc() # print("pts[0]: ", pts[0].shape, ", desc_sparse[0]: ", desc_sparse[0].shape) # print("pts[0]: ", pts[0].shape) outs = {"pts": pts[0], "desc": desc_sparse[0]} return outs def transpose_np_dict(outs): for entry in list(outs): outs[entry] = outs[entry].transpose() outs = get_pts_desc_from_agent(val_agent, img_0, device=device) pts, desc = outs["pts"], outs["desc"] # pts: np [3, N] if outputMatches == True: tracker.update(pts, desc) # save keypoints pred = {"image": squeezeToNumpy(img_0)} pred.update({"prob": pts.transpose(), "desc": desc.transpose()}) # second image, output matches outs = get_pts_desc_from_agent(val_agent, img_1, device=device) pts, desc = outs["pts"], outs["desc"] if outputMatches == True: tracker.update(pts, desc) pred.update({"warped_image": squeezeToNumpy(img_1)}) # print("total points: ", pts.shape) pred.update( { "warped_prob": pts.transpose(), "warped_desc": desc.transpose(), "homography": squeezeToNumpy(sample["homography"]), } ) if outputMatches == True: matches = tracker.get_matches() print("matches: ", matches.transpose().shape) pred.update({"matches": matches.transpose()}) print("pts: ", pts.shape, ", desc: ", desc.shape) # clean last descriptor tracker.clear_desc() filename = str(count) path = Path(save_output, "{}.npz".format(filename)) np.savez_compressed(path, **pred) # print("save: ", path) count += 1 print("output pairs: ", count)