def match_frames(f1, f2): # Instantiate BFMatcher with OpenCV bf = cv2.BFMatcher(cv2.NORM_HAMMING) # Use BFMatcher with k-nearest neighbors matching to find matched points within two images matches = bf.knnMatch(f1.des, f2.des, k=2) # Lowe's ratio test ret = [] idx1, idx2 = [], [] idx1s, idx2s = set(), set() # Iterate over matches for m, n in matches: # Only use points within a certain range if m.distance < 0.75 * n.distance: p1 = f1.kps[m.queryIdx] p2 = f2.kps[m.trainIdx] # Only use points within ORB distance equal to 32 if m.distance < 32: # Append points if m.queryIdx not in idx1s and m.trainIdx not in idx2s: idx1.append(m.queryIdx) idx2.append(m.trainIdx) idx1s.add(m.queryIdx) idx2s.add(m.trainIdx) ret.append((p1, p2)) # Assertions assert (len(set(idx1)) == len(idx1)) assert (len(set(idx2)) == len(idx2)) assert len(ret) >= 8 # Convert lists to numpy arrays ret = np.array(ret) idx1 = np.array(idx1) idx2 = np.array(idx2) # Identify transformation matrix between to frames using RANSAC algorithm model, inliers = ransac((ret[:, 0], ret[:, 1]), EssentialMatrixTransform, min_samples=8, residual_threshold=RANSAC_RESIDUAL_THRES, max_trials=RANSAC_MAX_TRIALS) # Print model inliers and results print("Matches: %d -> %d -> %d -> %d" % (len(f1.des), len(matches), len(inliers), sum(inliers))) # Return model inliers and model parameters print("Model params", model.params) print("...fundamental to R-t", fundamentalToRt(model.params)) return idx1[inliers], idx2[inliers], fundamentalToRt(model.params)
def match_frames(frame_1: Frame, frame_2: Frame): bf = cv2.BFMatcher(cv2.NORM_HAMMING) matches = bf.knnMatch(frame_1.descriptors, frame_2.descriptors, k=2) # Lowe's ratio test ret = [] idx1, idx2 = [], [] idx1s, idx2s = set(), set() for m,n in matches: if m.distance < 0.75*n.distance: p1 = frame_1.kps[m.queryIdx] p2 = frame_2.kps[m.trainIdx] # be within orb distance 32 if m.distance < 32: # keep around indices # TODO: refactor this to not be O(N^2) if m.queryIdx not in idx1s and m.trainIdx not in idx2s: idx1.append(m.queryIdx) idx2.append(m.trainIdx) idx1s.add(m.queryIdx) idx2s.add(m.trainIdx) ret.append((p1, p2)) # no duplicates assert(len(set(idx1)) == len(idx1)) assert(len(set(idx2)) == len(idx2)) if len(ret) < 8: logger.warning("Skipping match of frame {} to frame {}".format(frame_1.id, frame_2.id)) raise NoFrameMatchError ret = np.array(ret) idx1 = np.array(idx1) idx2 = np.array(idx2) # fit matrix model, inliers = ransac((ret[:, 0], ret[:, 1]), EssentialMatrixTransform, min_samples=8, residual_threshold=RANSAC_RESIDUAL_THRES, max_trials=RANSAC_MAX_TRIALS) logger.info("Quality: {}".format(np.mean(ret[:, 0]-ret[:, 1]))) logger.info("Matches: %d -> %d -> %d -> %d" % (len(frame_1.descriptors), len(matches), len(inliers), sum(inliers))) return idx1[inliers], idx2[inliers], fundamentalToRt(model.params)
def match_frames(f1, f2): bf = cv2.BFMatcher(cv2.NORM_HAMMING) matches = bf.knnMatch(f1.des, f2.des, k=2) # Lowe's ratio test ret = [] idx1, idx2 = [], [] idx1s, idx2s = set(), set() for m, n in matches: if m.distance < 0.75 * n.distance: p1 = f1.kps[m.queryIdx] p2 = f2.kps[m.trainIdx] # be within orb distance 32 if m.distance < 32: # keep around indices # TODO: refactor this to not be O(N^2) if m.queryIdx not in idx1s and m.trainIdx not in idx2s: idx1.append(m.queryIdx) idx2.append(m.trainIdx) idx1s.add(m.queryIdx) idx2s.add(m.trainIdx) ret.append((p1, p2)) # no duplicates assert (len(set(idx1)) == len(idx1)) assert (len(set(idx2)) == len(idx2)) assert len(ret) >= 8 ret = np.array(ret) idx1 = np.array(idx1) idx2 = np.array(idx2) # fit matrix model, inliers = ransac((ret[:, 0], ret[:, 1]), FundamentalMatrixTransform, min_samples=8, residual_threshold=0.001, max_trials=100) print("Matches: %d -> %d -> %d -> %d" % (len(f1.des), len(matches), len(inliers), sum(inliers))) return idx1[inliers], idx2[inliers], fundamentalToRt(model.params)