def test_corner_orientations_lena(): img = rgb2gray(data.lena()) corners = corner_peaks(corner_fast(img, 11, 0.35)) expected = np.array([-1.9195897 , -3.03159624, -1.05991162, -2.89573739, -2.61607644, 2.98660159]) actual = corner_orientations(img, corners, octagon(3, 2)) assert_almost_equal(actual, expected)
def test_corner_orientations_square(): square = np.zeros((12, 12)) square[3:9, 3:9] = 1 corners = corner_peaks(corner_fast(square, 9), min_distance=1) actual_orientations = corner_orientations(square, corners, octagon(3, 2)) actual_orientations_degrees = np.rad2deg(actual_orientations) expected_orientations_degree = np.array([45.0, 135.0, -45.0, -135.0]) assert_array_equal(actual_orientations_degrees, expected_orientations_degree)
def get_hog_features(samples): print 'getting hog features...' hog_features = [] for sample in samples: sample = np.array(sample) hog_features.append(corner_orientations(sample)) hog_features = np.array(hog_features) print "hog features = ", hog_features.shape return hog_features
def test_corner_orientations_square(): square = np.zeros((12, 12)) square[3:9, 3:9] = 1 corners = corner_peaks(corner_fast(square, 9), min_distance=1) actual_orientations = corner_orientations(square, corners, octagon(3, 2)) actual_orientations_degrees = np.rad2deg(actual_orientations) expected_orientations_degree = np.array([ 45., 135., -45., -135.]) assert_array_equal(actual_orientations_degrees, expected_orientations_degree)
def test_corner_orientations_square(dtype): square = np.zeros((12, 12), dtype=dtype) square[3:9, 3:9] = 1 corners = corner_peaks(corner_fast(square, 9), min_distance=1, threshold_rel=0) actual_orientations = corner_orientations(square, corners, octagon(3, 2)) assert actual_orientations.dtype == _supported_float_type(dtype) actual_orientations_degrees = np.rad2deg(actual_orientations) expected_orientations_degree = np.array([45, 135, -45, -135]) assert_array_equal(actual_orientations_degrees, expected_orientations_degree)
def test_corner_orientations_astronaut(): img = rgb2gray(data.astronaut()) corners = corner_peaks(corner_fast(img, 11, 0.35)) expected = np.array([ -1.75220190e+00, 2.01197383e+00, -2.01162417e+00, -1.88247204e-01, 1.19134149e+00, -6.61151410e-01, -2.99143370e+00, 2.17103132e+00, -7.52950306e-04, 1.25854853e+00, 2.43573659e+00, -1.69230287e+00, -9.88548213e-01, 1.47154532e+00, -1.65449964e+00, 1.09650167e+00, 1.07812134e+00, -1.68885773e+00, -1.64397304e+00, 3.09780364e+00, -3.49561988e-01, -1.46554357e+00, -2.81524886e+00, 8.12701702e-01, 2.47305654e+00, -1.63869275e+00, 5.46905279e-02, -4.40598471e-01, 3.14918803e-01, -1.76069982e+00, 3.05330950e+00, 2.39291733e+00, -1.22091334e-01, -3.09279990e-01, 1.45931342e+00 ]) actual = corner_orientations(img, corners, octagon(3, 2)) assert_almost_equal(actual, expected)
def test_corner_orientations_astronaut(): img = rgb2gray(data.astronaut()) corners = corner_peaks(corner_fast(img, 11, 0.35), min_distance=10, threshold_abs=0, threshold_rel=0.1) expected = np.array([-1.75220190e+00, 2.01197383e+00, -2.01162417e+00, -1.88247204e-01, 1.19134149e+00, -6.61151410e-01, -2.99143370e+00, 2.17103132e+00, -7.52950306e-04, 1.25854853e+00, 2.43573659e+00, -1.69230287e+00, -9.88548213e-01, 1.47154532e+00, -1.65449964e+00, 1.09650167e+00, 1.07812134e+00, -1.68885773e+00, -1.64397304e+00, 3.09780364e+00, -3.49561988e-01, -1.46554357e+00, -2.81524886e+00, 8.12701702e-01, 2.47305654e+00, -1.63869275e+00, 5.46905279e-02, -4.40598471e-01, 3.14918803e-01, -1.76069982e+00, 3.05330950e+00, 2.39291733e+00, -1.22091334e-01, -3.09279990e-01, 1.45931342e+00]) actual = corner_orientations(img, corners, octagon(3, 2)) assert_almost_equal(actual, expected)
def _detect_octave(self, octave_image): # Extract keypoints for current octave fast_response = corner_fast(octave_image, self.fast_n, self.fast_threshold) keypoints = corner_peaks(fast_response, min_distance=1) if len(keypoints) == 0: return (np.zeros((0, 2), dtype=np.double), np.zeros((0, ), dtype=np.double), np.zeros((0, ), dtype=np.double)) mask = _mask_border_keypoints(octave_image.shape, keypoints, distance=16) keypoints = keypoints[mask] orientations = corner_orientations(octave_image, keypoints, OFAST_MASK) harris_response = corner_harris(octave_image, method='k', k=self.harris_k) responses = harris_response[keypoints[:, 0], keypoints[:, 1]] return keypoints, orientations, responses
def _detect_octave(self, octave_image): # Extract keypoints for current octave fast_response = corner_fast(octave_image, self.fast_n, self.fast_threshold) keypoints = corner_peaks(fast_response, min_distance=1) if len(keypoints) == 0: return (np.zeros( (0, 2), dtype=np.double), np.zeros( (0, ), dtype=np.double), np.zeros((0, ), dtype=np.double)) mask = _mask_border_keypoints(octave_image.shape, keypoints, distance=16) keypoints = keypoints[mask] orientations = corner_orientations(octave_image, keypoints, OFAST_MASK) harris_response = corner_harris(octave_image, method='k', k=self.harris_k) responses = harris_response[keypoints[:, 0], keypoints[:, 1]] return keypoints, orientations, responses
def test_corner_orientations_even_shape_error(): img = np.zeros((20, 20)) with testing.raises(ValueError): corner_orientations(img, np.asarray([[7, 7]]), np.ones((4, 4)))
def test_corner_orientations_image_unsupported_error(): img = np.zeros((20, 20, 3)) with testing.raises(ValueError): corner_orientations(img, np.asarray([[7, 7]]), np.ones((3, 3)))
def test_corner_orientations_even_shape_error(): img = np.zeros((20, 20)) with testing.raises(ValueError): corner_orientations( img, np.asarray([[7, 7]]), np.ones((4, 4)))
def test_corner_orientations_image_unsupported_error(): img = np.zeros((20, 20, 3)) with testing.raises(ValueError): corner_orientations( img, np.asarray([[7, 7]]), np.ones((3, 3)))
# cval=0, order=None)#6个三通道的原图大小矩阵 hmd=feature.hessian_matrix_det(imgrey, sigma=1)#原图大小矩阵 # hme=feature.hessian_matrix_eigvals(hmf, Hxy=None, Hyy=None) si=feature.shape_index(imgrey, sigma=1, mode='constant', cval=0)#原图大小矩阵 # ckr=feature.corner_kitchen_rosenfeld(image, mode='constant', cval=0) ##原图大小矩阵 三通道 # ch=feature.corner_harris(imgrey, method='k', k=0.05, eps=1e-06, sigma=1)#原图大小矩阵 # cht=feature.corner_shi_tomasi(imgrey, sigma=1)#原图大小矩阵 # cfs=feature.corner_foerstner(imgrey, sigma=1)#2个 #原图大小矩阵 # csb=feature.corner_subpix(image, ch, window_size=11, alpha=0.99) cps=feature.corner_peaks(imgrey, min_distance=1, threshold_abs=None, threshold_rel=0.1, exclude_border=True, indices=True, footprint=None, labels=None)#一堆坐标值 # cmr=feature.corner_moravec(imgrey, window_size=1)#原图大小矩阵 # cft=feature.corner_fast(imgrey, n=12, threshold=0.15)#原图大小矩阵 corners = feature.corner_peaks(feature.corner_fast(imgrey, 9), min_distance=1)#一堆坐标 corts=feature.corner_orientations(imgrey, corners, octagon(3, 2))#一维矩阵长度不定 # mtem=feature.match_template(image, template, pad_input=False, # mode='constant', constant_values=0) # bldg=feature.blob_dog(imgrey, min_sigma=1, max_sigma=50, # sigma_ratio=1.6, threshold=2.0, overlap=0.5)#不懂 # bldoh=feature.blob_doh(imgrey, min_sigma=1, max_sigma=30, num_sigma=10, # threshold=0.01, overlap=0.5, log_scale=False)#不懂 # bllog=feature.blob_log(imgrey, min_sigma=1, max_sigma=50, num_sigma=10, # threshold=0.2, overlap=0.5, log_scale=False)#不懂 zong.append([imname, greycghg[0,0],greycghg[0,1],greycghg[0,2],greycghg[0,3],greycghg[0,4], greycgcl[0,0],greycgcl[0,1],greycgcl[0,2],greycgcl[0,3],greycgcl[0,4], greycgeg[0,0],greycgeg[0,1],greycgeg[0,2],greycgeg[0,3],greycgeg[0,4], greycgasm[0,0],greycgasm[0,1],greycgasm[0,2],greycgasm[0,3],greycgasm[0,4], greycgctt[0,0],greycgctt[0,1],greycgctt[0,2],greycgctt[0,3],greycgctt[0,4], np.mean(lbp),np.std(lbp),len(plm)/(len(image[:,0,0])*len(image[0,:,0])),