示例#1
0
文件: tp1.py 项目: NelleV/ROVAR
def oxford():
    mem = Memory(cachedir='.')
    image1 = mean(imread('keble_a.jpg'), 2)[::-1].astype(np.float)
    image2 = mean(imread('keble_b.jpg'), 2)[::-1].astype(np.float)
    image3 = mean(imread('keble_c.jpg'), 2)[::-1].astype(np.float)

    image1 = image1
    # image2 = image1 + 10
    image2 = image2

    # image2 = mean(imread('keble_b.jpg'), 2)[::-1]
    #FIXME we assume that image1.shape = image2.shape

    # the image is rotated
    coords1 = mem.cache(detect_harris_detector)(image1, threshold=.995)
    key_points1 = utils.create_frames_from_harris_points(coords1)

    # the image is rotated
    coords2 = mem.cache(detect_harris_detector)(image2, threshold=.995)
    key_points2 = utils.create_frames_from_harris_points(coords2)

    coords3 = mem.cache(detect_harris_detector)(image3, threshold=.999)
    key_points3 = utils.create_frames_from_harris_points(coords3)


    # Rearrange the keypoints to be close
    if 0:
        import hungarian
        dist = euclidean_distances(key_points1.T, key_points2.T)
        ordering = mem.cache(hungarian.hungarian)(dist)[:, 1]
        key_points2 = key_points2[:, ordering]

    # Get sift descriptors
    f1, d1 = mem.cache(vl_sift)(np.array(image1, 'f', order='F'))
                     #frames=key_points1,
                     #orientations=False)
    f1, d1 = f1.transpose(), d1.transpose()

    # Get sift descriptors
    f2, d2 = mem.cache(vl_sift)(np.array(image2, 'f', order='F'))
                     #frames=key_points2,
                     #orientations=False)
    f2, d2 = f2.transpose(), d2.transpose()

#    f3, d3 = mem.cache(vl_sift)(np.array(image3, 'f', order='F'),
#                     frames=key_points3,
#                     orientations=False)
#    f3, d3 = f3.transpose(), d3.transpose()
#

    matched_desc, matches_d = match_descriptors(d1, d2, f1, f2)
    matched_desc = np.array(matched_desc)
    matches_d = np.array(matches_d)
    match_image = show_matched_desc(image1.copy(), image2.copy(), matched_desc.copy())
    return match_image

    image1 =  show_sift_desc(image1, f1)
    image2 =  show_sift_desc(image2, f2)
    image3 =  show_sift_desc(image3, f3)

#    points12 = np.array([[340, 38, 51, 23],
#                         [359, 340, 62, 337],
#                         [691, 286, 395, 286],
#                         [655, 116, 367, 128]])
    points12 = np.array([[38, 340, 23, 51],
                         [340, 359, 337, 62],
                         [286, 691, 286, 395],
                         [116, 655, 128, 367]])

    em12 = stitchLR(image1, image2, points12)
    #return em12

    points32 = np.array([[621, 18, 323, 35],
                         [323, 44, 11, 22],
                         [435, 398, 125, 400],
                         [349, 360, 34, 363],
                         [653, 336, 344, 340]])

    points23 = np.array([[35, 323, 18, 621],
                         [22, 11, 44, 323],
                         [400, 125, 398, 435],
                         [363, 34, 360, 349],
                         [340, 344, 336, 653]])


    em23 = stitchRL(image2, image3, points23)
    em = np.zeros((image2.shape[0], image2.shape[1] + 1000))
    em[:, :image2.shape[1] + 500] = em12
    em[:, 500:] = em23

    imsave( "oxford.eps", em)
    return em
示例#2
0
文件: tp1.py 项目: NelleV/ROVAR
def breteuil():
#if __name__ == "__main__":

    mem = Memory(cachedir='.')
    if 1:
        image1 = mean(imread('breteuil_1.jpg'), 2)[::-1].astype(np.float)
        image2 = mean(imread('breteuil_2.jpg'), 2)[::-1].astype(np.float)
        image3 = mean(imread('breteuil_3.jpg'), 2)[::-1].astype(np.float)

        image1 = image1
        # image2 = image1 + 10
        image2 = image2
    else:
        image = np.zeros((300, 400))
        image += 30
        np.random.seed(0)
        image += np.random.random(size=image.shape)
        image[125:175, 125:175] = 125
        image[100:150, 150:200] = 200
        #image = ndimage.gaussian_filter(image, 3)
        image1 = image.copy()[50:250, 70:270]
        image2 = image.copy()[50:250, 50:250]


    # image2 = mean(imread('keble_b.jpg'), 2)[::-1]
    #FIXME we assume that image1.shape = image2.shape

    # the image is rotated
    coords1 = mem.cache(detect_harris_detector)(image1, threshold=.995)
    key_points1 = utils.create_frames_from_harris_points(coords1)

    # the image is rotated
    coords2 = mem.cache(detect_harris_detector)(image2, threshold=.995)
    key_points2 = utils.create_frames_from_harris_points(coords2)

    coords3 = mem.cache(detect_harris_detector)(image3, threshold=.995)
    key_points3 = utils.create_frames_from_harris_points(coords3)


    # Rearrange the keypoints to be close
    if 0:
        import hungarian
        dist = euclidean_distances(key_points1.T, key_points2.T)
        ordering = mem.cache(hungarian.hungarian)(dist)[:, 1]
        key_points2 = key_points2[:, ordering]

    # Get sift descriptors
    f1, d1 = mem.cache(vl_sift)(np.array(image1, 'f', order='F'),
                     frames=key_points1,
                     orientations=False)
    f1, d1 = f1.transpose(), d1.transpose()

    # Get sift descriptors
    f2, d2 = mem.cache(vl_sift)(np.array(image2, 'f', order='F'),
                     frames=key_points2,
                     orientations=False)
    f2, d2 = f2.transpose(), d2.transpose()

    f3, d3 = mem.cache(vl_sift)(np.array(image3, 'f', order='F'),
                     frames=key_points3,
                     orientations=False)
    f3, d3 = f3.transpose(), d3.transpose()


    matched_desc, matches_d = match_descriptors(d1, d2, f1, f2)
    matched_desc = np.array(matched_desc)
    matches_d = np.array(matches_d)

    image1 =  show_sift_desc(image1, f1)
    image2 =  show_sift_desc(image2, f2)
    image3 =  show_sift_desc(image3, f3)
    match_image = show_matched_desc(image1, image2, matched_desc) 

    points12 = np.array([[386, 143, 157, 136],
                         [327, 261, 87, 255],
                         [738, 196, 489, 209],
                         [584, 246, 364, 251]])
    em12 = stitchLR(image1, image2, points12)

    points32 = np.array([[431, 178, 65, 180],
                         [545, 175, 190, 185],
                         [445, 307, 77, 318],
                         [720, 258, 347, 268],
                         [543, 206, 188, 213],
                         [740, 248, 367, 256]])

    points23 = points32.copy()
    points23[:, :2] = points32[:, 2:]
    points23[:, 2:] = points32[:, :2]
    em23 = stitchRL(image2, image3, points23)
    em = np.zeros((image2.shape[0], image2.shape[1] + 1000))
    em[:, :image2.shape[1] + 500] = em12
    em[:, 500:] = em23
    
    imsave("breteuil.png", em)
    return em
示例#3
0
文件: tp1.py 项目: NelleV/ROVAR
        image += 30
        np.random.seed(0)
        image += np.random.random(size=image.shape)
        image[125:175, 125:175] = 125
        image[100:150, 150:200] = 200
        #image = ndimage.gaussian_filter(image, 3)
        image1 = image.copy()[50:250, 70:270]
        image2 = image.copy()[50:250, 50:250]


    # image2 = mean(imread('keble_b.jpg'), 2)[::-1]
    #FIXME we assume that image1.shape = image2.shape

    # the image is rotated
    coords1 = mem.cache(detect_harris_detector)(image1, threshold=.995)
    key_points1 = utils.create_frames_from_harris_points(coords1)

    # the image is rotated
    coords2 = mem.cache(detect_harris_detector)(image2, threshold=.995)
    key_points2 = utils.create_frames_from_harris_points(coords2)

    coords3 = mem.cache(detect_harris_detector)(image3, threshold=.995)
    key_points3 = utils.create_frames_from_harris_points(coords3)


    # Rearrange the keypoints to be close
    if 0:
        import hungarian
        dist = euclidean_distances(key_points1.T, key_points2.T)
        ordering = mem.cache(hungarian.hungarian)(dist)[:, 1]
        key_points2 = key_points2[:, ordering]