print(type(Images)) ref_idx, idx, max_img = 1, 0, len(Images) print("The image {} chosen as reference".format(ref_idx)) pano = np.asarray(Images[ref_idx]) pt_ref, descri_ref = sift_test(Images[ref_idx]) while True: if len(Images) == idx: break if idx == ref_idx: idx += 1 continue img = Images[idx] pts, descr = sift_test(img) M = SIFTSimpleMatcher(descr, descri_ref, Thre) if len(M) < 3: Images.update({max_img: img}) continue H = RANSACFit(pts, pt_ref, M) pano = PairStitch(Image.fromarray(np.asarray(img)), Image.fromarray(pano), H, save=False, get_array=True) pt_ref, descri_ref = sift_test(pano) pano = np.asarray(pano) idx += 1 result = Image.fromarray(pano) result.save(saveFileName) print('The completed file has been saved as ' + saveFileName)
if (max(img.size) > 1000 or len(imgList) > 10): img.thumbnail((np.asarray(img.size) * RESIZE).astype('int'), Image.ANTIALIAS) Images.update({idx: img}) print('Images loaded. Beginning feature detection...') #%% Feature detection Descriptor = {} PointInImg = {} for idx, (key, img) in enumerate(sorted(Images.items())): I = np.asarray(img.convert('L')).astype('single') [f, d] = sift(I, compute_descriptor=True, float_descriptors=True) pointsInImage = swapcolumn(f[:, 0:2]) PointInImg.update({idx: pointsInImage}) Descriptor.update({idx: d}) #%% Compute Transformation Transform = {} for idx in range(len(imgList) - 1): print('fitting transformation from ' + str(idx) + ' to ' + str(idx + 1) + '\t') M = SIFTSimpleMatcher(Descriptor[idx], Descriptor[idx + 1], Thre) print('matching points:', len(M, ), '\n') Transform.update({idx: RANSACFit(PointInImg[idx], PointInImg[idx + 1], M)}) #%% Make Panoramic image print('Stitching images...') MultipleStitch(Images, Transform, saveFileName) print('The completed file has been saved as ' + saveFileName) plt.imshow(Image.open(saveFileName))
- descriptors : F x 128 array F is the number of keypoints (frames) used. The 128 length vectors per keypoint extracted, uint8 by default. Only returned if compute_descriptors=True Ref: https://github.com/menpo/cyvlfeat/blob/master/cyvlfeat/sift/sift.py ''' I = np.asarray(img1.convert('L')).astype('single') # rgb2gray [f, desc1] = sift(I, compute_descriptor=True, float_descriptors=True) pointsInImage1 = swapcolumn(f[:, 0:2]) I = np.asarray(img2.convert('L')).astype('single') # rgb2gray [f, desc2] = sift(I, compute_descriptor=True, float_descriptors=True) pointsInImage2 = swapcolumn(f[:, 0:2]) #%% Matching M = SIFTSimpleMatcher(desc1, desc2) #%% Transformation maxIter = 500 maxInlierErrorPixels = 0.01 * len(np.asarray(img1)) seedSetSize = np.max([3, np.ceil(0.5 * len(M))]) minInliersForAcceptance = np.ceil(0.2 * len(M)) H = RANSACFit(pointsInImage1, pointsInImage2, M, maxIter, seedSetSize, maxInlierErrorPixels, minInliersForAcceptance) #%% Make Panoramic image Pano = PairStitch(img1, img2, H, saveFileName) print('Panorama was saved as uttower_pano.jpg', saveFileName) plt.imshow(Image.open(saveFileName))