def getViaSift(): img2 = cv2.cvtColor(getImageFromCam(), cv2.COLOR_BGR2GRAY) # trainImage # Initiate SIFT detector sift = cv2.xfeatures2d.SIFT_create() # find the keypoints and descriptors with SIFT kp2, des2 = sift.detectAndCompute(img2, None) FLANN_INDEX_KDTREE = 0 index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5) search_params = dict(checks=50) flann = cv2.FlannBasedMatcher(index_params, search_params) mypath = 'imdata' fileList = [ f for f in os.listdir(mypath) if os.path.isfile(os.path.join(mypath, f)) ] for k in fileList: img1 = cv2.imread(mypath + "/" + k, 0) # queryImage kp1, des1 = sift.detectAndCompute(img1, None) matches = flann.knnMatch(des1, des2, k=2) # store all the good matches as per Lowe's ratio test. good = [] for m, n in matches: if m.distance < 0.7 * n.distance: good.append(m) if len(good) > MIN_MATCH_COUNT: return k.split('_')[0] return "Cannot Detect"
database_images = args.d descriptor_file = args.l load_descriptor = args.load database_descriptors = {} name_list = [] if load_descriptor == 'Y' or load_descriptor =='y': print("Loading Pre Computed Descriptors") assert os.path.exists(descriptor_file) with open(descriptor_file, 'rb') as file: database_descriptors = pickle.load(file) else: for image_name in tqdm(os.listdir(database_images)): img = cv2.imread(database_images+image_name,0) keypoint, descriptor = sift.detectAndCompute(img) img_name,img_extension = os.path.splitext(image_name) database_descriptors[img_name] = descriptor name_list.append(img_name) with open(descriptor_file, 'wb') as file: pickle.dump(database_descriptors, file) print("--Database Descriptors Loaded--") correct_count = 0 for query_name in tqdm(os.listdir(query_images)): max_count_name = None max_count = 0 query = cv2.imread(query_images+query_name, 0)
GOOD_MATCH_THRESH = 10 descriptor_file = args.l query_image = args.q matches_track = None database_image = None distances_track = list() with open(descriptor_file, 'rb') as file: database_descriptors = pickle.load(file) img_q = cv2.imread(query_image) img_q_gray = cv2.imread(query_image, 0) # using the SIFT algorithm to generate keypoints and descriptors keypoints_1, descriptors_1 = sift.detectAndCompute(img_q_gray, verbose=True) max_count = 0 for database_name in database_descriptors.keys(): d_desc = database_descriptors[database_name] FLANN_INDEX_KDTREE = 0 index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5) search_params = dict(checks=100) flann = cv2.FlannBasedMatcher(index_params, search_params) matches = flann.knnMatch(descriptors_1, d_desc, k=2) distances = list() counts = list() for i, (m, n) in enumerate(matches): if m.distance < 0.7 * n.distance: distances.append(m) counts.append(len(distances)) if len(distances) != 0:
import cv2 import sift as sift import pylab img = cv2.imread('E:/Plots/myplot4.png') gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) sift = cv2.xfeatures2d.SIFT_create() (kps, descs) = sift.detectAndCompute(gray, None) img1 = cv2.drawKeypoints(gray, kps, img, flags=cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) cv2.imshow('SIFT_Algorithm', img1) pylab.show() surf = cv2.xfeatures2d.SURF_create() (kps2, descs2) = surf.detectAndCompute(gray, None) img2 = cv2.drawKeypoints(gray, kps2, img, flags=cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) cv2.imshow('SURF_Algorithm', img2) pylab.show() fast = cv2.FastFeatureDetector_create() kps3 = fast.detect(gray, None) img3 = cv2.drawKeypoints(gray, kps3, img, flags=cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)