forked from ALavault/Sequence-Test
/
featurepoint_matching.py
137 lines (113 loc) · 3.85 KB
/
featurepoint_matching.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 20 10:19:31 2017
@author: viper
Description : Implementation of an estimation of homogeneous matrix between 2 images
"""
from __future__ import print_function
import numpy as np
from matplotlib import pyplot as plt
from skimage import io
from skimage import data
from skimage.feature import (corner_harris, corner_subpix, corner_peaks,plot_matches)
from skimage import transform
import matplotlib
from skimage.feature import ORB, match_descriptors
matplotlib.use('pgf')
import numpy as np
from skimage import color
from skimage import feature
from skimage import filters
from skimage import segmentation
from skimage.measure import ransac
from skimage import util
import time as t
import regiongrowing as rg
plt.close('all')
def featurePointMatching(image0, image1, decimation = 1, n_keypoints=750):
"""
Get a transformation model knowing 2 images
"""
# 2 images, possibly with a certain decimation factor (reduce the size of the image)
image0 = transform.rescale(image0,1/float(decimation))
image1 = transform.rescale(image1, 1/float(decimation))
orb = ORB(n_keypoints=n_keypoints, fast_threshold=0.05) # definition of the ORB detector
# Get the keypoints from the first image
orb.detect_and_extract(image0)
keypoints1 = orb.keypoints
descriptors1 = orb.descriptors
# Get the keypoints from the second image
orb.detect_and_extract(image1)
keypoints2 = orb.keypoints
descriptors2 = orb.descriptors
# Matching of descriptors
matches12 = match_descriptors(descriptors1, descriptors2, cross_check=True)
# Select keypoints from both images with RANSAC algorithm
src = keypoints1[matches12[:, 0]][:, ::-1]
dst = keypoints2[matches12[:, 1]][:, ::-1]
model_robust, inliers = \
ransac((src, dst), transform.EuclideanTransform,
min_samples=6, residual_threshold=2)
# Get the inliners
outliers = inliers == False
return model_robust, inliers, outliers, src, dst
def getNewMarkers(model_robust, markers):
mat = model_robust.params
markers1 = transform.matrix_transform(markers, mat)
return markers1
def getRandomMarkersFromLabels(label):
"""
Get a random point knowing labels
"""
w, h = label.shape
marker1 = None
marker2 = None
while (marker1 == None) or (marker2 == None):
i = np.random.randint(w)
j = np.random.randint(h)
if label[i,j]==1:
marker1 = (i, j)
elif label[i,j]==2:
marker2 = (i, j)
else:
()
return np.asarray([marker1, marker2])
"""
Test ....
image0 = io.imread('move1-stokes/S1_0.tiff')
image1 = io.imread('move1-stokes/S1_1.tiff')
plt.imshow(image0, cmap='gray')
markers = plt.ginput(n=2)
markers=np.asarray(markers) # Python list to Numpy array conversion
x, y = markers.T
plt.imshow(image0, cmap='gray')
for i in range(len(markers)):
x_,y_ = markers[i]
markers[i]=[y_,x_]
markers.astype(int)
plt.close('all')
pixT = 2000
regT = 1500
labels = rg.regionGrowing(image0, markers, pixT, regT)
model_robust, inliers, outliers, src, dst = featurePointMatching(image0, image1)
# visualize correspondence
print(model_robust.params)
plt.gray()
fig, ax = plt.subplots(nrows=2, ncols=1)
image1 = transform.warp(image1, model_robust)
labels1 = util.img_as_uint(2**(63-16)*transform.warp(labels, model_robust))
ax[0].imshow(labels)
ax[1].imshow(labels1)
f, axarr = plt.subplots(1, 2)
axarr[0].imshow(segmentation.mark_boundaries(color.label2rgb(labels, image0), labels))
axarr[0].plot(y, x, 'or', ms=3)
axarr[0].set_title('')
axarr[0].axis('off')
markers1 = transform.matrix_transform(markers, model_robust.params)
x, y = markers1.T
axarr[1].imshow(segmentation.mark_boundaries(color.label2rgb(labels1, image1), labels1))
axarr[1].plot(y, x, 'or', ms=3)
axarr[1].set_title('')
axarr[1].axis('off')
"""