def feature_stats_test(): X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1) print(np.mean(X_data, axis=0)) print(np.std(X_data, axis=0))
def extract_coordinate_feature_test(): I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) fig = plt.figure(figsize=(10,10)) ax1 = fig.add_subplot(121) ax1.imshow(I) ax2 = fig.add_subplot(122) ax2.imshow(coord_im)
def extract_features(image_number, slice_number): # extracts features for [image_number]_[slice_number]_t1.tif and [image_number]_[slice_number]_t2.tif # Input: # image_number - Which subject (scalar) # slice_number - Which slice (scalar) # Output: # X - N x k dataset, where N is the number of pixels and k is the total number of features # features - k x 1 cell array describing each of the k features base_dir = '../data/dataset_brains/' t1 = plt.imread(base_dir + str(image_number) + '_' + str(slice_number) + '_t1.tif') t1g = scipy.ndimage.gaussian_filter(t1, sigma=1) s1x = scipy.ndimage.sobel(t1, axis=0, mode='constant') s1y = scipy.ndimage.sobel(t1, axis=1, mode='constant') t1s = np.hypot(s1x, s1y) t2 = plt.imread(base_dir + str(image_number) + '_' + str(slice_number) + '_t2.tif') t2g = scipy.ndimage.gaussian_filter(t2, sigma=1) s2x = scipy.ndimage.sobel(t2, axis=0, mode='constant') s2y = scipy.ndimage.sobel(t2, axis=1, mode='constant') t2s = np.hypot(s2x, s2y) n = t1.shape[0] features = () t1f = t1.flatten().T.astype(float).reshape(-1, 1) t1gf = t1g.flatten().T.astype(float).reshape(-1, 1) t1sf = t1s.flatten().T.astype(float).reshape(-1, 1) t2f = t2.flatten().T.astype(float).reshape(-1, 1) t2gf = t2g.flatten().T.astype(float).reshape(-1, 1) t2sf = t2s.flatten().T.astype(float).reshape(-1, 1) t_diff = np.abs(t1f - t2f) r, _ = seg.extract_coordinate_feature(t1) X = np.concatenate((t1f, t2f), axis=1) X = np.concatenate((X, t1gf), axis=1) X = np.concatenate((X, t2gf), axis=1) X = np.concatenate((X, t1sf), axis=1) X = np.concatenate((X, t2sf), axis=1) X = np.concatenate((X, t_diff), axis=1) X = np.concatenate((X, r), axis=1) features += ('T1 intensity', ) features += ('T2 intensity', ) features += ('T1 intensity gaussian filter', ) features += ('T2 intensity gaussian filter', ) features += ('T1 intensity sobel filter', ) features += ('T2 intensity sobel filter', ) features += ('abs(T1 - T2)', ) features += ('distance to center', ) return X, features
def feature_stats_test(): X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1) print("=========== Feature Stats Test ===========\n") for i in range(np.size(X_data[0,:])): mean = np.mean(X_data[:, i]) std = np.std(X_data[:, i]) print("Feature ", i+1, "\nMean: \t", round(mean, 3), "\nStd: \t", round(std, 3), "\n") print("=================== END ==================")
def kmeans_clustering_test(): #------------------------------------------------------------------# #TODO: Store errors for training data X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1) test_data, _ = seg.normalize_data(X_data) predicted_labels = seg.kmeans_clustering(test_data) predicted_labels = predicted_labels.reshape(I.shape) plt.imshow(predicted_labels)
def feature_stats_test(): X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1) #------------------------------------------------------------------# # TODO: Write code to examine the mean and standard deviation of your dataset containing variety of features mean = np.mean(X_data, axis=0) #berekent mean per rij standard_deviation = np.std(X_data, axis=0) print("Mean is" + str(mean)) print("Std is" + str(standard_deviation))
def kmeans_clustering_test(): #------------------------------------------------------------------# #TODO: Store errors for training data X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1) # normalized_Xdata, _ = seg.normalize_data(X_data) kmeans_cost = seg.kmeans_clustering(normalized_Xdata) return kmeans_cost
def feature_stats_test(): X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1) #------------------------------------------------------------------# # TODO: Write code to examine the mean and standard deviation of your dataset containing variety of features feature_mean = np.mean(X_data,0) feature_std = np.std(X_data,0) for i in range(6): print("Feature {} has the following properties. The mean is: {:.2f} and the standard deviation is: {:.2f}".format(i+1,feature_mean.item(i), feature_std.item(i)))
def feature_stats_test(): X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1) #------------------------------------------------------------------# # Write code to examine the mean and standard deviation of your dataset containing variety of features means = np.zeros(X_data.shape[1]) stds = np.zeros(X_data.shape[1]) for i in range(X_data.shape[1]): means[i] = np.mean(X_data[:, i]) stds[i] = np.std(X_data[:, i]) util.scatter_data(X_data, Y, 0, 1) print(means) print(stds)
def normalized_stats_test(): X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1) #------------------------------------------------------------------# # TODO: Write code to normalize your dataset containing variety of features, # then examine the mean and std dev normdata, _ = seg.normalize_data( X_data) #output van def normalize_data is 2 variables mean = np.mean(normdata, axis=0) standard_deviation = np.std(normdata, axis=0) print("Mean is" + str(mean)) print("Std is" + str(standard_deviation))
def normalized_stats_test(): X, Y = scatter_data_test(showFigs=False) I = plt.imread('../data/dataset_brains/1_1_t1.tif') c, coord_im = seg.extract_coordinate_feature(I) X_data = np.concatenate((X, c), axis=1)
def extract_features(image_number, slice_number): # extracts features for [image_number]_[slice_number]_t1.tif and [image_number]_[slice_number]_t2.tif # Input: # image_number - Which subject (scalar) # slice_number - Which slice (scalar) # Output: # X - N x k dataset, where N is the number of pixels and k is the total number of features # features - k x 1 cell array describing each of the k features base_dir = '../data/dataset_brains/' t1 = plt.imread(base_dir + str(image_number) + '_' + str(slice_number) + '_t1.tif') t2 = plt.imread(base_dir + str(image_number) + '_' + str(slice_number) + '_t2.tif') n = t1.shape[0] features = () t1f = t1.flatten().T.astype(float) t1f = t1f.reshape(-1, 1) t2f = t2.flatten().T.astype(float) t2f = t2f.reshape(-1, 1) X = np.concatenate((t1f, t2f), axis=1) features += ('T1 intensity', ) features += ('T2 intensity', ) #------------------------------------------------------------------# # Extract more features and add them to X. # Don't forget to provide (short) descriptions for the features # Feature 3: T1 Gaussian filtered I_blurred = ndimage.gaussian_filter(t1, sigma=10) X2 = I_blurred.flatten().T t1_blur_10 = X2.reshape(-1, 1) features += ('T1 Intensity (Gaussian)', ) # Feature 4: T2 Gaussian filtered I_blurred_t2 = ndimage.gaussian_filter(t2, sigma=10) X3 = I_blurred_t2.flatten().T t2_blur_10 = X3.reshape(-1, 1) features += ('T2 Intensity (Gaussian)', ) # Feature 5: T1 Median filtered t1_med = pro.create_my_feature(t1) t1f_med = t1_med.flatten().T t1f_med = t1f_med.reshape(-1, 1) features += ('T1 Intensity (Median)', ) # Feature 6: T2 Median filtered t2_med = pro.create_my_feature(t2) t2f_med = t2_med.flatten().T t2f_med = t2f_med.reshape(-1, 1) features += ('T2 Intensity (Median)', ) # Feature 7: T1-T2 absolute difference t_diff = np.sqrt((t1 - t2) ^ 2) tf_diff = t_diff.flatten().T.astype(float) tf_diff = tf_diff.reshape(-1, 1) features += ('T1-T2 difference', ) # Feature 8: Distance to center c, c_im = seg.extract_coordinate_feature(t1) features += ('Distance to Center', ) X = np.concatenate( (X, t1_blur_10, t2_blur_10, t1f_med, t2f_med, tf_diff, c), axis=1) #------------------------------------------------------------------# return X, features
t1_blurred_1 = ndimage.gaussian_filter(t1, sigma=3) t1_1 = t1_blurred_1.flatten().T t1_1 = t1_1.reshape(-1, 1) t1_blurred_2 = ndimage.gaussian_filter(t1, sigma=8) t1_2 = t1_blurred_2.flatten().T t1_2 = t1_2.reshape(-1, 1) #edges t1_LaPlacian = cv2.Laplacian(t1, cv2.CV_64F) t1_lapl = t1_LaPlacian.flatten().T t1_lapl = t1_lapl.reshape(-1, 1) #Coordinate feature t1_coord, _ = seg.extract_coordinate_feature(t1) #use to remove noise from images, while preserving edges median = cv2.medianBlur(t1, 5) t1_median = median.flatten().flatten().T t1_median = t1_median.reshape(-1, 1) #opening kernel1 = np.array([[0, 1, 0], [1, 1, 1], [0, 1, 0]], np.uint8) #np.ones((3,3), np.uint8) opening = cv2.morphologyEx(t1, cv2.MORPH_OPEN, kernel1) t1_opening = opening.flatten().flatten().T t1_opening = t1_opening.reshape(-1, 1) #closing kernel2 = np.ones((4, 4), np.uint8)
def extract_myfeatures(image_number, slice_number): # extracts features for [image_number]_[slice_number]_t1.tif and [image_number]_[slice_number]_t2.tif # Input: # image_number - Which subject (scalar) # slice_number - Which slice (scalar) # Output: # X - N x k dataset, where N is the number of pixels and k is the total number of features # features - k x 1 cell array describing each of the k features base_dir = '../data/dataset_brains/' t1 = plt.imread(base_dir + str(image_number) + '_' + str(slice_number) + '_t1.tif') n = t1.shape[0] features = () t1f = t1.flatten().T.astype(float) t1f = t1f.reshape(-1, 1) features += ('T1 intensity', ) #------------------------------------------------------------------# # TODO: Extract more features and add them to X. # Don't forget to provide (short) descriptions for the features #Features for T1 t1_blurred_1 = ndimage.gaussian_filter(t1, sigma=3) t1_1 = t1_blurred_1.flatten().T t1_1 = t1_1.reshape(-1, 1) t1_blurred_2 = ndimage.gaussian_filter(t1, sigma=8) t1_2 = t1_blurred_2.flatten().T t1_2 = t1_2.reshape(-1, 1) #edges t1_LaPlacian = cv2.Laplacian(t1, cv2.CV_64F) t1_lapl = t1_LaPlacian.flatten().T t1_lapl = t1_lapl.reshape(-1, 1) #Coordinate feature t1_coord, _ = seg.extract_coordinate_feature(t1) #use to remove noise from images, while preserving edges median = cv2.medianBlur(t1, 5) t1_median = median.flatten().flatten().T t1_median = t1_median.reshape(-1, 1) #opening kernel1 = np.array([[0, 1, 0], [1, 1, 1], [0, 1, 0]], np.uint8) #np.ones((3,3), np.uint8) opening = cv2.morphologyEx(t1, cv2.MORPH_OPEN, kernel1) t1_opening = opening.flatten().flatten().T t1_opening = t1_opening.reshape(-1, 1) #closing kernel2 = np.ones((4, 4), np.uint8) closing = cv2.morphologyEx(t1, cv2.MORPH_CLOSE, kernel2) t1_closing = closing.flatten().flatten().T t1_closing = closing.reshape(-1, 1) #morphological gradient kernel3 = np.ones((3, 3), np.uint8) gradient = cv2.morphologyEx(t1, cv2.MORPH_GRADIENT, kernel3) t1_grad = gradient.flatten().flatten().T t1_grad = t1_grad.reshape(-1, 1) t1_cv = cv2.imread( base_dir + str(image_number) + '_' + str(slice_number) + '_t1.tif', 0) equ = cv2.equalizeHist(t1_cv) cv2.imwrite('equ.png', equ) t1_equ = plt.imread('equ.png') t1_equ = t1_equ.flatten().T t1_equ = t1_equ.reshape(-1, 1) #Add features to the list of features features += ('T1 gauss 5', ) features += ("T1 equalized histogram", ) features += ('T1 gauss 15', ) features += ("T1 Coordinate feature", ) features += ("T1 median", ) features += ("T1 opening", ) features += ("T1 closing", ) features += ("T1 morphological gradient", ) features += ("T1 Laplacian", ) X = np.concatenate((t1f, t1_equ, t1_1, t1_1, t1_coord, t1_median, t1_opening, t1_closing, t1_grad, t1_lapl), axis=1) #------------------------------------------------------------------# return X, features