def generatePanorama(im1, im2):
    '''
    Returns a panorama of im1 and im2 without cliping.
    '''
    ######################################
    # TO DO ...

    locs1, desc1 = briefLite(im1)
    print(f'No of desc1 is {desc1.shape[0]}')

    locs2, desc2 = briefLite(im2)
    print(f'No of desc2 is {desc2.shape[0]}')

    matches = briefMatch(desc1, desc2)
    print(f'No of matches is {matches.shape[0]}')

    H2to1 = ransacH(matches, locs1, locs2, num_iter=10000, tol=1)

    #Save result
    np.save('../results/q6_1.npy', H2to1)

    #pano_im = imageStitching_mask(im1, im2, H2to1)

    pano_im = imageStitching(im1, im2, H2to1)
    pano_im = imageStitching_noClip(im1, im2, H2to1)

    cv2.imwrite('../results/q6_3.jpg', pano_im)

    return pano_im
Example #2
0
def generatePanorama(im1, im2):
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc1, desc2)
    H2to1 = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)
    pano_im_no_clip = imageStitching_noClip(im1, im2, H2to1)
    return pano_im_no_clip
Example #3
0
def generatePanaroma(im1, im2):
    '''
    Generate and save panorama of im1 and im2.

    INPUT
        im1 and im2 - two images for stitching
    OUTPUT
        Blends img1 and warped img2 (with no clipping) 
        and saves the panorama image.
    '''

    ######################################
    # TO DO ...
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc1, desc2)

    locs1[:, [0, 1]] = locs1[:, [1, 0]]
    locs2[:, [0, 1]] = locs2[:, [1, 0]]

    bestH = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)
    #print(bestH)
    output = imageStitching(im1, im2, bestH)
    #output=imageStitching_noClip(im1,im2,bestH)

    return output
Example #4
0
def generatePanorama(im1, im2):
    '''
    Accepts two images as input, computes keypoints and descriptors for 
    both the images, finds putative feature correspondences by matching keypoint
    descriptors, estimates a homography using RANSAC and then warps one of the
    images with the homography so that they are aligned and then overlays them

    [input]
    * im1 - Input image 1
    * im2 - Input image 2

    [output]
    * pano_im - Output panorama image
    '''

    # Compute keypoints and descriptors
    print('Computing feature descriptors for im1...')
    locs1, desc1 = briefLite(im1)

    print('Computing feature descriptors for im2...')
    locs2, desc2 = briefLite(im2)
    
    # Match keypoint descriptors
    matches = briefMatch(desc1, desc2)

    # Estimate homography
    H2to1 = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)

    # Align and blend the images to form the panorama
    pano_im = imageStitching_noClip(im1, im2, H2to1)
    return pano_im
Example #5
0
def generatePanaroma(im1, im2):
    '''
    Generate and save panorama of im1 and im2.

    INPUT
        im1 and im2 - two images for stitching
    OUTPUT
        Blends img1 and warped img2 (with no clipping) 
        and saves the panorama image.
    '''

    ######################################
    # TO DO ...
    t = time.time()
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    print('Time to compute Brieflite: ' + str(time.time() - t))
    t = time.time()
    matches = briefMatch(desc1, desc2)
    print('Time to compute BriefMatch: ' + str(time.time() - t))
    t = time.time()
    H = ransacH(matches, locs1, locs2, num_iter=2000, tol=2)
    print('Time to compute Ransac: ' + str(time.time() - t))
    np.save('../results/q6_1.npy', H)
    return imageStitching_noClip(im1, im2, H)
Example #6
0
def generatePanaroma(img1, img2):
    im1 = cv2.imread(img1)
    im2 = cv2.imread(img2)
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc1, desc2)
    H2to1 = ransacH(matches, locs1, locs2, num_iter=10000, tol=2)
    return imageStitching_noClip(im1, im2, H2to1)
def generatePanorama(im1, im2):
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc1, desc2)
    H2to1 = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)
    pano_im = imageStitching_noClip(im1, im2, H2to1)

    cv2.imwrite('../results/q6_3.jpg', pano_im)
Example #8
0
def generatePanorama(im1, im2):

    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches, m = briefMatch(desc1, desc2)

    m, n = matches.shape
    H2to1 = ransacH(matches, m, locs1, locs2, num_iter=5000, tol=2)

    # To get the M - Translation and scalling matrix
    first_1 = np.array([[1064], [576], [1]])
    first_2 = np.array([[0], [0], [1]])
    first_3 = np.array([[0], [576], [1]])
    first_4 = np.array([[1064], [0], [1]])

    second_1 = np.matmul(H2to1, first_1)
    n4 = second_1[2, 0]
    second_1 = second_1 / n4
    print(second_1)

    second_2 = np.matmul(H2to1, first_2)
    n4 = second_2[2, 0]
    second_2 = second_2 / n4
    print(second_2)

    second_3 = np.matmul(H2to1, first_3)
    n4 = second_3[2, 0]
    second_3 = second_3 / n4
    print(second_3)

    second_4 = np.matmul(H2to1, first_4)
    n4 = second_4[2, 0]
    second_4 = second_4 / n4
    print(second_4)

    # Through geometry
    image_width = second_1[1] - second_4[1]
    image_height = second_4[0] - second_3[0]

    # 600 and 1500 are previously defined distances in the function (Random values taken)
    translation_height = image_height - 600
    translation_width = 1500 - image_width

    # im1 = cv2.warpPerspective(im1,np.eye(3),(3000,600))

    M = np.eye(3) + np.array([[.85, 0, translation_height], [0, .85, 400],
                              [0, 0, 1]])

    warp_im_1 = cv2.warpPerspective(im1, M, (1714, 815))
    warp_im_2 = cv2.warpPerspective(im2, np.matmul(M, H2to1), (1714, 815))
    im3 = np.maximum(warp_im_1, warp_im_2)

    return im3
def generatePanorama(im1, im2):
    '''
    Returns a panorama of im1 and im2 without cliping.
    '''
    ######################################

    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc1, desc2)
    bestH = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)
    blended_image = imageStitching(im1, im2, bestH)
    panorama = imageStitching_noClip(im1, im2, bestH)
    cv2.imwrite('../results/q6_3.jpg', panorama)
    cv2.imshow('6.3p', panorama)
def generatePanorama(im1, im2):
    '''
    Returns a panorama of im1 and im2 without cliping.
    '''
    ######################################
    # TO DO ...

    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc1, desc2)
    H2to1 = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)
    pano_im = imageStitching_noClip(im1, im2, H2to1)

    return pano_im
Example #11
0
def perform_match(original_image, rotation_image, display=False):
    locs1, desc1 = briefLite(original_image)
    locs2, desc2 = briefLite(rotation_image)
    matches = briefMatch(desc1, desc2)
    if display:
        plotMatches(original_image, rotation_image, matches, locs1, locs2)
    original_points = []
    rotation_points = []
    for item in matches:
        point1 = locs1[item[0]][:-1]
        point2 = locs2[item[1]][:-1]
        original_points.append(point1)
        rotation_points.append(point2)
    return np.array(original_points), np.array(rotation_points)
Example #12
0
def generatePanorama(im1, im2):
    """ This is to generate the panorama given im1 and im2 by detecting and
        matching keypoints, calculating homography with RANSAC.

    Args:
      im1: input image1 in numpy.array with size [H, W, 3]
      im2: input image2 in numpy.array with size [H, W, 3]
    Returns:
      im3: stitched panorama in numpy.array.
    """
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc1, desc2)
    H2to1 = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)
    im3 = imageStitching_noClip(im1, im2, H2to1)

    return im3
Example #13
0
def generatePanorama(im1, im2):
    # computes keypoints and descriptors for both the images
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)

    # finds putative feature correspondences by matching keypoint descriptors
    matches = briefMatch(desc1, desc2)

    # estimates a homography using RANSAC
    H2to1 = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)

    # warps one of the images with the homography so that they are aligned and then overlays them.
    pano_im = imageStitching_noClip(im1, im2, H2to1)

    # save and display panaroma
    cv2.imwrite('../results/panoImg.jpg', pano_im)
    cv2.imshow('panoramas', pano_im)
    cv2.waitKey(0)
    cv2.destroyAllWindows()
Example #14
0
def main(args):
    im1 = cv2.imread(args.im1)
    im2 = cv2.imread(args.im2)

    # Compute BREIF descriptors
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)

    # Match descriptors
    matches = briefMatch(desc1, desc2)

    # Estimate best Homogrpahy H
    H2to1, _ = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)

    # Stitch images together
    pano_im = imageStitching_noClip(im1, im2, H2to1)

    # Save and show
    cv2.imwrite('../results/panoImg.jpg', pano_im)
def generatePanaroma(im1, im2):
    '''
    Generate and save panorama of im1 and im2.

    INPUT
        im1 and im2 - two images for stitching
    OUTPUT
        Blends img1 and warped img2 (with no clipping) 
        and saves the panorama image.
    '''

    ######################################
    # TO DO ...
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc2, desc1)
    H = np.load('../results/q6_1.npy')
    #H = ransacH(matches, locs2, locs1, num_iter=5000, tol=2)
    im3 = imageStitching_noClip(im1, im2, H)
    cv2.imwrite('../results/panorama.png', im3)
def generatePanaroma(im1, im2):
    '''
    Generate and save panorama of im1 and im2.

    INPUT
        im1 and im2 - two images for stitching
    OUTPUT
        Blends img1 and warped img2 (with no clipping) 
        and saves the panorama image.
    '''

    ######################################
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc1, desc2)
    H2to1 = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)
    pano_im = imageStitching_noClip(im1, im2, H2to1)
    cv2.imwrite('../results/6_3_stitched.jpg', pano_im)

    return pano_im
Example #17
0
def get_brief_rot_accuracy(im, degree):
    im_rot, M = get_rotated_image(im, degree)

    locs1, desc1 = briefLite(im)
    locs2, desc2 = briefLite(im_rot)
    matches = briefMatch(desc1, desc2)

    im_rot_matched_locs = locs2[matches[:, 1], :]
    # homogeneous coord
    im_rot_matched_locs[:, 2] = 1
    # (2*N)
    im_rot_reprojected_locs = cv2.invertAffineTransform(M) @ np.transpose(
        im_rot_matched_locs, (1, 0))
    # (N*2)
    im_rot_reprojected_locs = im_rot_reprojected_locs.transpose((1, 0))
    # (N*2)
    im_matched_locs = locs1[matches[:, 0], :][:, 0:2]

    dists = np.sum((im_matched_locs - im_rot_reprojected_locs)**2, axis=1)
    n_correct = np.sum(dists < 25)
    n_matches = matches.shape[0]

    accuracy = n_correct * 1.0 / n_matches
    return accuracy
    '''

    ######################################
    # TO DO ...
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc2, desc1)
    H = np.load('../results/q6_1.npy')
    #H = ransacH(matches, locs2, locs1, num_iter=5000, tol=2)
    im3 = imageStitching_noClip(im1, im2, H)
    cv2.imwrite('../results/panorama.png', im3)
    #cv2.waitKey(0)


if __name__ == '__main__':

    im1 = cv2.imread('../data/incline_L.png')
    im2 = cv2.imread('../data/incline_R.png')
    locs1, desc1 = briefLite(im1)
    locs2, desc2 = briefLite(im2)
    matches = briefMatch(desc2, desc1)

    #H = ransacH(matches, locs2, locs1, num_iter=5000, tol=2)
    #np.save('../results/q6_1.npy', H)
    #H = np.load('H2to1.npy')
    #cv2.warpPerspective(color_warp, Minv, (image.shape[1], image.shape[0]))
    #im3 = imageStitching_noClip(im1, im2, H)
    #im3 = cv2.warpPerspective(im2, H, (im2.shape[1], im2.shape[0]))
    #cv2.imshow('Pyramid of image', im3)
    #cv2.waitKey(0)
    generatePanaroma(im1, im2)
def plot_bar_chart(matches_num, label):
    # this is for plotting purpose
    index = np.arange(len(label))
    plt.bar(index, matches_num)
    plt.xlabel('Angle')
    plt.ylabel('No. of Matches')
    plt.xticks(index, label, fontsize=7, rotation=30)
    plt.title('Number of matches at each angle')
    plt.show()


if __name__ == '__main__':

    print('-------------------------   IMAGE 1  -----------------------------')
    im1 = cv2.imread('../data/model_chickenbroth.jpg')
    locs1, desc1 = briefLite(im1)

    print('-------------------------   IMAGE 2  -----------------------------')
    im2 = cv2.imread('../data/model_chickenbroth.jpg')

    angles = []
    matches_num = []

    for angle in range(0, 361, 10):
        print('angle', angle)
        im_rotated = rotateImage(im2, angle)
        locs2, desc2 = briefLite(im_rotated)

        matches = briefMatch(desc1, desc2)
        print('matches shape: ', matches.shape)
        # plotMatches(im1, im_rotated, matches, locs1, locs2)
import numpy as np
import cv2
import os
from scipy.spatial.distance import cdist
from keypointDetect import DoGdetector
from BRIEF import briefLite
from BRIEF import briefMatch
import matplotlib.pyplot as plt

if __name__ == '__main__':

    im3 = cv2.imread(
        '/home/geekerlink/Desktop/Computer Vision/Homeworks/hw3/data/model_chickenbroth.jpg'
    )
    locs3, desc3 = briefLite(im3)
    (h, w) = im3.shape[:2]
    centre = (w / 2, h / 2)
    scale = 1
    num_of_matches = []
    rotation_angles = []
    for angle in range(0, 360, 10):
        rotation = cv2.getRotationMatrix2D(centre, angle, scale)
        im3_rotated = cv2.warpAffine(im3, rotation, (w, h))
        locs4, desc4 = briefLite(im3_rotated)
        matches = briefMatch(desc3, desc4)
        num_of_matches.append(matches.shape[0])
        rotation_angles.append(angle)

    plt.bar(rotation_angles, num_of_matches, width=3.5, align='center')
    plt.xlabel('angle')
    plt.ylabel('number of matches')
Example #21
0
from BRIEF import briefLite, briefMatch

im = cv2.imread('../data/model_chickenbroth.jpg')

inc = 10
#cv2.getRotationMatrix2D()
#cv2.warpAffine()

h, w = im.shape[:2]
centerX, centerY = (h // 2, w // 2)
center = (centerX, centerY)

angle = np.arange(0, 360, 10, dtype=int)
numAngles = len(angle)
numMatches = np.zeros(numAngles, dtype=int)
locs1, desc1 = briefLite(im)
#locs2, desc2 = briefLite(im2)
#matches = briefMatch(desc1, desc2)

for i in range(numAngles):
    currAngle = angle[i]

    a = cv2.getRotationMatrix2D(center, currAngle, 1.0)
    rotIm = cv2.warpAffine(im, a, (h, w))
    locs2, desc2 = briefLite(rotIm)
    matches = briefMatch(desc1, desc2)

    numMatches[i] = len(matches)

plt.bar(angle, numMatches, align='center', alpha=0.5)
plt.show()
        # Normalize the x,y coordinate according to z
        normal = np.matlib.repmat(im1_pred_kps[2, :], 2, 1)
        im1_pred_kps = np.divide(im1_pred_kps[0:2, :], normal)

        inline = 0

        # for every predicted point, compute the distance between it and the original points
        for j in range(4):
            dist = np.linalg.norm(im1_pred_kps[:, j] - im1_orig_kps[0:2, j])
            if dist < tol:
                inline += 1

        inline_all_H[i, 0] = inline

    bestH = all_H[np.argmax(inline_all_H), :, :]

    return bestH


if __name__ == '__main__':
    compareX, compareY = makeTestPattern()
    im1 = cv2.imread('../data/incline_L.png')
    im2 = cv2.imread('../data/incline_R.png')
    locs1, desc1 = briefLite(im1, compareX, compareY)
    locs2, desc2 = briefLite(im2, compareX, compareY)
    matches = briefMatch(desc1, desc2)
    plotMatches(im1, im2, matches, locs1, locs2)
    bestH = ransacH(matches, locs1, locs2, num_iter=5000, tol=2)
    np.save('../results/q6_1', bestH)
    print(1)