def AffNetHardNet_describeFromKeys(img_np, KPlist): img = torch.autograd.Variable(torch.from_numpy(img_np.astype(np.float32)), volatile=True) img = img.view(1, 1, img.size(0), img.size(1)) HessianAffine = ScaleSpaceAffinePatchExtractor(mrSize=5.192, num_features=0, border=0, num_Baum_iters=0) if USE_CUDA: HessianAffine = HessianAffine.cuda() img = img.cuda() with torch.no_grad(): HessianAffine.createScaleSpace( img) # to generate scale pyramids and stuff descriptors = [] Alist = [] n = 0 # for patch_np in patches: for kp in KPlist: x, y = np.float32(kp.pt) LAFs = normalizeLAFs( torch.tensor([[AffNetPix.PS / 2, 0, x], [0, AffNetPix.PS / 2, y]]).reshape(1, 2, 3), img.size(3), img.size(2)) with torch.no_grad(): patch = HessianAffine.extract_patches_from_pyr(denormalizeLAFs( LAFs, img.size(3), img.size(2)), PS=AffNetPix.PS) if WRITE_IMGS_DEBUG: SaveImageWithKeys(patch.detach().cpu().numpy().reshape([32, 32]), [], 'p2/' + str(n) + '.png') if USE_CUDA: # or ---> A = AffNetPix(subpatches.cuda()).cpu() with torch.no_grad(): A = batched_forward(AffNetPix, patch.cuda(), 256).cpu() else: with torch.no_grad(): A = AffNetPix(patch) new_LAFs = torch.cat([torch.bmm(A, LAFs[:, :, 0:2]), LAFs[:, :, 2:]], dim=2) dLAFs = denormalizeLAFs(new_LAFs, img.size(3), img.size(2)) with torch.no_grad(): patchaff = HessianAffine.extract_patches_from_pyr(dLAFs, PS=32) if WRITE_IMGS_DEBUG: SaveImageWithKeys( patchaff.detach().cpu().numpy().reshape([32, 32]), [], 'p1/' + str(n) + '.png') SaveImageWithKeys(img_np, [kp], 'im1/' + str(n) + '.png') descriptors.append( HardNetDescriptor(patchaff).cpu().numpy().astype(np.float32)) Alist.append( convertLAFs_to_A23format(LAFs.detach().cpu().numpy().astype( np.float32))) n = n + 1 return descriptors, Alist
def HessAff_Detect(img, PatchSize=60, Nfeatures=500): var_image = torch.autograd.Variable(torch.from_numpy(img.astype( np.float32)), volatile=True) var_image_reshape = var_image.view(1, 1, var_image.size(0), var_image.size(1)) HessianAffine = ScaleSpaceAffinePatchExtractor(mrSize=5.192, num_features=Nfeatures, border=PatchSize / 2, num_Baum_iters=1) # if USE_CUDA: # HessianAffine = HessianAffine.cuda() # var_image_reshape = var_image_reshape.cuda() with torch.no_grad(): LAFs, responses = HessianAffine(var_image_reshape, do_ori=True) patches = HessianAffine.extract_patches_from_pyr(LAFs, PS=PatchSize).cpu() # these are my affine maps to work with Alist = convertLAFs_to_A23format(LAFs).cpu().numpy().astype(np.float32) KPlist = [ cv2.KeyPoint(x=A[0, 2], y=A[1, 2], _size=10, _angle=0.0, _response=1, _octave=packSIFTOctave(0, 0), _class_id=1) for A in Alist ] return KPlist, np.array(patches), Alist, responses.cpu()