def test_sift_patchmatch_scores(data, labels): """ data = X_test labels = y_test """ import pyhesaff import numpy as np if len(data.shape) == 4 and data.shape[-1] == 1: data = data.reshape(data.shape[0:3]) elif len(data.shape) == 4 and data.shape[-1] == 3: import vtool as vt # TODO use dataset to infer data colorspace data = vt.convert_image_list_colorspace(data, 'GRAY', src_colorspace='BGR') patch_list = data print('Extract SIFT descr') vecs_list = pyhesaff.extract_desc_from_patches(patch_list) print('Compute SIFT dist') sqrddist = ( (vecs_list[0::2].astype(np.float32) - vecs_list[1::2].astype(np.float32)) ** 2).sum(axis=1) sqrddist_ = sqrddist[None, :].T VEC_PSEUDO_MAX_DISTANCE_SQRD = 2.0 * (512.0 ** 2.0) #sift_scores = 1 - (sqrddist_.flatten() / VEC_PSEUDO_MAX_DISTANCE_SQRD) sift_scores = (sqrddist_.flatten() / VEC_PSEUDO_MAX_DISTANCE_SQRD) sift_list = vecs_list return sift_scores, sift_list
def extract_patches(ibs, aid_list, fxs_list=None, patch_size=None, colorspace=None): """ Example: >>> # DISABLE_DOCTEST >>> from ibeis_cnn.ingest_ibeis import * # NOQA >>> ut.show_if_requested() """ depc = ibs.depc kpts_list = depc.d.get_feat_kpts(aid_list) if fxs_list is None: fxs_list = [slice(None)] * len(kpts_list) kpts_list_ = ut.ziptake(kpts_list, fxs_list) chip_list = depc.d.get_chips_img(aid_list) # convert to approprate colorspace if colorspace is not None: chip_list = vt.convert_image_list_colorspace(chip_list, colorspace) # ut.print_object_size(chip_list, 'chip_list') patch_size = 64 patches_list = [ vt.get_warped_patches(chip, kpts, patch_size=patch_size)[0] for chip, kpts in ut.ProgIter(zip(chip_list, kpts_list_), nTotal=len(aid_list), lbl='warping patches') ] return patches_list
def test_cv2_flann(): """ Ignore: [name for name in dir(cv2) if 'create' in name.lower()] [name for name in dir(cv2) if 'stereo' in name.lower()] ut.grab_zipped_url('https://priithon.googlecode.com/archive/a6117f5e81ec00abcfb037f0f9da2937bb2ea47f.tar.gz', download_dir='.') """ import cv2 from vtool.tests import dummy import plottool as pt import vtool as vt img1 = vt.imread(ut.grab_test_imgpath('easy1.png')) img2 = vt.imread(ut.grab_test_imgpath('easy2.png')) stereo = cv2.StereoBM_create(numDisparities=16, blockSize=15) disparity = stereo.compute(img1, img2) pt.imshow(disparity) pt.show() #cv2.estima flow = cv2.createOptFlow_DualTVL1() img1, img2 = vt.convert_image_list_colorspace([img1, img2], 'gray', src_colorspace='bgr') img2 = vt.resize(img2, img1.shape[0:2][::-1]) out = img1.copy() flow.calc(img1, img2, out) orb = cv2.ORB_create() kp1, vecs1 = orb.detectAndCompute(img1, None) kp2, vecs2 = orb.detectAndCompute(img2, None) detector = cv2.FeatureDetector_create("SIFT") descriptor = cv2.DescriptorExtractor_create("SIFT") skp = detector.detect(img1) skp, sd = descriptor.compute(img1, skp) tkp = detector.detect(img2) tkp, td = descriptor.compute(img2, tkp) out = img1.copy() cv2.drawKeypoints(img1, kp1, outImage=out) pt.imshow(out) vecs1 = dummy.testdata_dummy_sift(10) vecs2 = dummy.testdata_dummy_sift(10) # NOQA FLANN_INDEX_KDTREE = 0 # bug: flann enums are missing #flann_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=4) index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5) search_params = dict(checks=50) # or pass empty dictionary flann = cv2.FlannBasedMatcher(index_params, search_params) # NOQA cv2.flann.Index(vecs1, index_params) #cv2.FlannBasedMatcher(flann_params) cv2.flann.Index(vecs1, flann_params) # NOQA