def get_points(c_img1, c_img2): # convert to gray img1 = cvtColor(c_img1, COLOR_BGR2GRAY) img2 = cvtColor(c_img2, COLOR_BGR2GRAY) surf = SURF() # Initiate SURF detector # find the key points and descriptors with SURF kp1, des1 = surf.detectAndCompute(img1, None) kp2, des2 = surf.detectAndCompute(img2, None) my_flan_index_tree = 0 index_params = dict(algorithm=my_flan_index_tree, trees=6) search_params = dict(checks=50) my_flan = FlannBasedMatcher(index_params, search_params) matches = my_flan.knnMatch(des1, des2, k=2) # store all the good matches as per Lowe's ratio test. good = [] pts2 = [] pts1 = [] for m, n in matches: if m.distance < 0.9 * n.distance: good.append(m) pts1 = float32([kp1[m.queryIdx].pt for m in good]).reshape(-1, 1, 2) pts2 = float32([kp2[m.trainIdx].pt for m in good]).reshape(-1, 1, 2) # get color of key points lengths = len(pts1) - 1 color1 = zeros((len(pts1), 1, 3)) color2 = zeros((len(pts1), 1, 3)) color = zeros((len(pts1), 1, 3), dtype=int) for i in range(1, lengths): color1[i] = c_img1[int(pts1[i][0][1]), int(pts1[i][0][0])] color2[i] = c_img2[int(pts2[i][0][1]), int(pts2[i][0][0])] color[i] = (color1[i] + color2[i]) / 2 # avg of colors # convert the 2D features into homogeneous coordinates into array of 3x51 dimension pt1 = pts1.reshape((pts1.shape[0], 2)).T pt1 = vstack((pt1, ones(pt1.shape[1]))) pt2 = pts2.reshape((pts2.shape[0], 2)).T pt2 = vstack((pt2, ones(pt2.shape[1]))) return pt1, pt2, color
def fit_cv2(data, algorithm): logger.info('Fitting cv2 FLANN...') from cv2 import FlannBasedMatcher KDTREE = 0 index_params = { 'algorithm': KDTREE, 'trees': 5, #'target_precision': 0.9, #'build_weight': 0.01, #'memory_weight': 0, #'sample_fraction': 0.1, } search_params = {'checks': 5} flann = FlannBasedMatcher(index_params, search_params) flann.add(np.float32(data)) flann.train() return flann
def match_descriptors(descriptors1, descriptors2, matcher='flann', max_ratio=0.8): """ Args: descriptors1: descriptors2: matcher: max_ratio: Returns: [1] https://stackoverflow.com/questions/30716610/how-to-get-pixel-coordinates-from-feature-matching-in-opencv-python """ if matcher is 'flann': # FLANN parameters FLANN_INDEX_KDTREE = 0 index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5) search_params = dict(checks=50) # or pass empty dictionary flann = FlannBasedMatcher(index_params, search_params) matches = flann.knnMatch(descriptors1, descriptors2, k=2) # Need to draw only good matches, so create a mask using ratio test as per Lowe's paper. good_matches = [] for i, (m, n) in enumerate(matches): if m.distance < max_ratio * n.distance: good_matches.append([n.queryIdx, m.trainIdx ]) # Keep indexes of matched keypoints good_matches = array(good_matches) return good_matches
def algorithm_SURF(self, photo, screen, screen_colored, hessianThreshold=3500, descMatcher=1): t1 = time.perf_counter() # Init algorithm surf = SURF_create(hessianThreshold) surf.setUpright(True) t2 = time.perf_counter() self.writeLog('Created SURF object - {}ms'.format( self.formatTimeDiff(t1, t2))) # Detect and compute kp_photo, des_photo = surf.detectAndCompute(photo, None) kp_screen, des_screen = surf.detectAndCompute(screen, None) t3 = time.perf_counter() self.writeLog('Detected keypoints - {}ms'.format( self.formatTimeDiff(t2, t3))) # Descriptor Matcher try: index_params = dict(algorithm=descMatcher, trees=5) search_params = dict(checks=50) flann = FlannBasedMatcher(index_params, search_params) except: return False t4 = time.perf_counter() self.writeLog('Initialized Flann Matcher - {}ms'.format( self.formatTimeDiff(t3, t4))) # Calc knn Matches try: matches = flann.knnMatch(des_photo, des_screen, k=2) except: return False logging.info('knn {}'.format(len(matches))) t5 = time.perf_counter() self.writeLog('Calced knn matches - {}ms'.format( self.formatTimeDiff(t4, t5))) if not matches or len(matches) == 0: return False # store all the good matches as per Lowe's ratio test. good = [] for m, n in matches: if m.distance < 0.75 * n.distance: good.append(m) logging.info('good {}'.format(len(good))) t6 = time.perf_counter() self.writeLog('Filtered good matches - {}ms'.format( self.formatTimeDiff(t5, t6))) if not good or len(good) < 10: return False photo_pts = np.float32([kp_photo[m.queryIdx].pt for m in good]).reshape(-1, 1, 2) # pylint: disable=too-many-function-args screen_pts = np.float32([kp_screen[m.trainIdx].pt for m in good]).reshape(-1, 1, 2) # pylint: disable=too-many-function-args M, _ = findHomography(photo_pts, screen_pts, RANSAC, 5.0) t7 = time.perf_counter() self.writeLog('Found Homography - {}ms'.format( self.formatTimeDiff(t6, t7))) if M is None or not M.any() or len(M) == 0: return False h, w = photo.shape pts = np.float32([[0, 0], [0, h - 1], [w - 1, h - 1], [w - 1, 0]]).reshape(-1, 1, 2) # pylint: disable=too-many-function-args dst = perspectiveTransform(pts, M) t8 = time.perf_counter() self.writeLog('Perspective Transform - {}ms'.format( self.formatTimeDiff(t7, t8))) minX = dst[0][0][0] minY = dst[0][0][1] maxX = dst[0][0][0] maxY = dst[0][0][1] for i in range(4): if dst[i][0][0] < minX: minX = dst[i][0][0] if dst[i][0][0] > maxX: maxX = dst[i][0][0] if dst[i][0][1] < minY: minY = dst[i][0][1] if dst[i][0][1] > maxY: maxY = dst[i][0][1] minX = int(minX) minY = int(minY) maxX = int(maxX) maxY = int(maxY) if minX < 0: minX = 0 if minY < 0: minY = 0 logging.info('minY {}'.format(int(minY))) logging.info('minX {}'.format(int(minX))) logging.info('maxY {}'.format(int(maxY))) logging.info('maxX {}'.format(int(maxX))) if maxX - minX <= 0: return False if maxY - minY <= 0: return False imwrite(self.match_dir + '/result.png', screen_colored[minY:maxY, minX:maxX]) t9 = time.perf_counter() self.writeLog('Wrote Image - {}ms'.format(self.formatTimeDiff(t8, t9))) return True
def getmarker(self, image: object): def findmacth(image_file): print('\nprocessing', str(image_file)) start = timer() desc_1 = get_points(image_file, "", size, brisk) if desc_1 is not None: titles = [] similarity = [] titles_append = titles.append similarity_append = similarity.append for title, desc_2, len_desc_2 in root: good_points = 0 matches = flann.knnMatch(desc_1, desc_2, k=2) for m_n in matches: if len(m_n) != 2: continue elif m_n[0].distance < self.threshold * m_n[1].distance: good_points += 1 percentage_similarity = good_points / len_desc_2 * 100 if percentage_similarity > 2: titles_append(title) similarity_append(percentage_similarity) if similarity: idx = argmax(similarity) # for idx1, t in enumerate(titles): print("Info: " + str(titles[idx])) # print("percentage_similarity: {0}".format(str(similarity[idx]))) end = timer() print('find_match_time: ', (end - start)) if not self.isfolder: return str(titles[idx]) else: end = timer() print("{0} no similarity".format(str(self.image))) print('find_match_time: ', (end - start)) else: print("\n{0} has not points".format(str(image))) if not self.isfolder: return None if not os.path.exists(self.descriptions): print("\nDid not find file {}".format(self.descriptions)) return None with open(self.descriptions, 'rb') as handle: root = load(handle) flann = FlannBasedMatcher(self.index_params, {}) if self.iscover: thresh, octaves, size, ext_of_files = get_parameters("covers") else: thresh, octaves, size, ext_of_files = get_parameters(self.current_book) if not thresh: return None brisk = BRISK_create(thresh, octaves) # norm = cv.NORM_HAMMING (70,2) 30days if self.isfolder: files = os.listdir(self.image) for f in files: if f.endswith(ext_of_files): findmacth(str(image) + "/" + f) # cv_file.release() else: cur_book = findmacth(image) # cv_file.release() return cur_book return None
return error if __name__ == "__main__": img1 = imread('rect_left.jpeg') img2 = imread('rect_right.jpeg') # find the keypoints and descriptors with SIFT sift = xfeatures2d.SIFT_create() kp1, des1 = sift.detectAndCompute(img1,None) kp2, des2 = sift.detectAndCompute(img2,None) # FLANN parameters for points match FLANN_INDEX_KDTREE = 0 index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5) search_params = dict(checks=50) flann = FlannBasedMatcher(index_params,search_params) matches = flann.knnMatch(des1,des2,k=2) good = [] pts1 = [] pts2 = [] dis_ratio = [] for i,(m,n) in enumerate(matches): if m.distance < 0.3*n.distance: good.append(m) dis_ratio.append(m.distance/n.distance) pts2.append(kp2[m.trainIdx].pt) pts1.append(kp1[m.queryIdx].pt) min_idx = np.argmin(dis_ratio) # calculate fundamental matrix and check error fundMat = rectify(pts1, pts2)