Esempio n. 1
0
def test_match_keypoints_brief_lena_rotation():
    """Verify matched keypoints result between lena image and its rotated
    version with the expected keypoint pairs."""
    img = data.lena()
    img = rgb2gray(img)
    img.shape
    tform = tf.SimilarityTransform(scale=1, rotation=0.10, translation=(0, 0))
    rotated_img = tf.warp(img, tform)

    keypoints1 = corner_peaks(corner_harris(img), min_distance=5)
    descriptors1, keypoints1 = brief(img, keypoints1, descriptor_size=512)

    keypoints2 = corner_peaks(corner_harris(rotated_img), min_distance=5)
    descriptors2, keypoints2 = brief(rotated_img,
                                     keypoints2,
                                     descriptor_size=512)

    matched_keypoints = match_keypoints_brief(keypoints1,
                                              descriptors1,
                                              keypoints2,
                                              descriptors2,
                                              threshold=0.07)

    expected = np.array([[[263, 272], [234, 298]], [[271, 120], [258, 146]],
                         [[323, 164], [305, 195]], [[414, 70], [405, 111]],
                         [[435, 181], [415, 223]], [[454, 176], [435, 221]]])

    assert_array_equal(matched_keypoints, expected)
Esempio n. 2
0
def test_match_keypoints_brief_lena_translation():
    """Test matched keypoints between lena image and its translated version."""
    img = data.lena()
    img = rgb2gray(img)
    img.shape
    tform = tf.SimilarityTransform(scale=1, rotation=0, translation=(15, 20))
    translated_img = tf.warp(img, tform)

    keypoints1 = corner_peaks(corner_harris(img), min_distance=5)
    descriptors1, keypoints1 = brief(img, keypoints1, descriptor_size=512)

    keypoints2 = corner_peaks(corner_harris(translated_img), min_distance=5)
    descriptors2, keypoints2 = brief(translated_img, keypoints2,
                                     descriptor_size=512)

    matched_keypoints = match_keypoints_brief(keypoints1, descriptors1,
                                              keypoints2, descriptors2,
                                              threshold=0.10)

    assert_array_equal(matched_keypoints[:, 0, :], matched_keypoints[:, 1, :] +
                       [20, 15])
Esempio n. 3
0
def test_match_keypoints_brief_lena_rotation():
    """Verify matched keypoints result between lena image and its rotated
    version with the expected keypoint pairs."""
    img = data.lena()
    img = rgb2gray(img)
    img.shape
    tform = tf.SimilarityTransform(scale=1, rotation=0.10, translation=(0, 0))
    rotated_img = tf.warp(img, tform)

    keypoints1 = corner_peaks(corner_harris(img), min_distance=5)
    descriptors1, keypoints1 = brief(img, keypoints1, descriptor_size=512)

    keypoints2 = corner_peaks(corner_harris(rotated_img), min_distance=5)
    descriptors2, keypoints2 = brief(rotated_img, keypoints2,
                                     descriptor_size=512)

    matched_keypoints = match_keypoints_brief(keypoints1, descriptors1,
                                              keypoints2, descriptors2,
                                              threshold=0.07)

    expected = np.array([[[263, 272],
                          [234, 298]],

                         [[271, 120],
                          [258, 146]],

                         [[323, 164],
                          [305, 195]],

                         [[414,  70],
                          [405, 111]],

                         [[435, 181],
                          [415, 223]],

                         [[454, 176],
                          [435, 221]]])

    assert_array_equal(matched_keypoints, expected)
transformed_img_color = tf.warp(img_color, tform)
img = rgb2gray(img_color)
transformed_img = rgb2gray(transformed_img_color)

# Extracting keypoints using Harris corner response and describing them
# using BRIEF for both the images
keypoints1 = corner_peaks(corner_harris(img), min_distance=5)
descriptors1, keypoints1 = brief(img, keypoints1, descriptor_size=512)

keypoints2 = corner_peaks(corner_harris(transformed_img), min_distance=5)
descriptors2, keypoints2 = brief(transformed_img, keypoints2, descriptor_size=512)

# Matching the BRIEF described keypoints in both the images using
# Hamming distance dissimilarity measure
pairwise_hamming_distance(descriptors1, descriptors2)
matched_keypoints = match_keypoints_brief(keypoints1, descriptors1, keypoints2, descriptors2, threshold=0.15)

print "Pairs of matched keypoints :\n"
print matched_keypoints

# Plotting the matched correspondences in both the images using matplotlib
src = matched_keypoints[:, 0, :]
dst = matched_keypoints[:, 1, :]

img_combined = np.concatenate((img_as_float(img_color), transformed_img_color), axis=1)
offset = img.shape

fig, ax = plt.subplots(nrows=1, ncols=1)
plt.gray()

ax.imshow(img_combined, interpolation='nearest')