Num_Each_File[times - 1] = Num_Each_File[times - 1] + res_num start = 0 prob_map = np.zeros((1, n_classes)) for i in range(times): feed_x = np.reshape( np.asarray(All_data['patch'][start:start + Num_Each_File[i]]), (-1, n_input)) temp = sess.run(softmax_output, feed_dict={x: feed_x}) prob_map = np.concatenate((prob_map, temp), axis=0) start += Num_Each_File[i] prob_map = np.delete(prob_map, (0), axis=0) # MRF prob_map = compute_prob_map() Seg_Label, seg_Label, seg_accuracy = Post_Processing( prob_map, Height, Width, n_classes, y_test_scalar, TestIndex) print('The shape of prob_map is (%d,%d)' % (prob_map.shape[0], prob_map.shape[1])) DATA_PATH = os.getcwd() file_name = 'prob_map.mat' prob = {} prob['prob_map'] = prob_map scipy.io.savemat(os.path.join(DATA_PATH, file_name), prob) train_ind = {} train_ind['TrainIndex'] = TrainIndex scipy.io.savemat(os.path.join(DATA_PATH, 'TrainIndex.mat'), train_ind) test_ind = {} test_ind['TestIndex'] = TestIndex
test_map = test_map.reshape(GT_Label.shape[1], GT_Label.shape[0]).transpose(1, 0).astype(int) DATA_PATH = os.getcwd() train_ind = {} train_ind['train_indexes'] = train_indexes scipy.io.savemat(os.path.join(DATA_PATH, 'train_indexes.mat'), train_ind) test_ind = {} test_ind['test_indexes'] = test_indexes scipy.io.savemat(os.path.join(DATA_PATH, 'test_indexes.mat'), test_ind) ## Data Summary df = data_summary(y_train, y, num_classes) print('----------------------------------') print('Data Summary:') print(df) print('----------------------------------') print("Training samples: %d" % len(y_train)) print("Test samples: %d" % len(y_test)) print('----------------------------------') DATA_PATH = os.path.join(os.getcwd(), "datasets") prob_map = scipy.io.loadmat(os.path.join(DATA_PATH, 'p.mat'))['p'] prob_map = np.transpose(prob_map) # Post-processing using Graph-Cut Seg_Label, seg_accuracy = Post_Processing(prob_map,height,width,\ num_classes,y_test,test_indexes) print(seg_accuracy)
scaler = MinMaxScaler() X_train_scaled = scaler.fit_transform(X_train) X_test_scaled = scaler.transform(X_test) data_all_scaled = scaler.transform(data_all) ## Classifiers # KNN from sklearn.neighbors import KNeighborsClassifier start_time = time.time() KNN = KNeighborsClassifier(n_neighbors=7).fit(X_train_scaled, y_train) KNN_Label = KNN.predict(data_all_scaled).reshape(width, height).astype(int).transpose( 1, 0) KNN_predict_prob = KNN.predict_proba(data_all_scaled) # Post-processing using Graph-Cut Seg_Label, seg_accuracy = Post_Processing(KNN_predict_prob,height,width,\ num_classes,y_test,test_indexes) print('(KNN) Train_Acc=%.3f, Cla_Acc=%.3f, Seg_Acc=%.3f(Time_cost=%.3f)'\ % (KNN.score(X_train_scaled,y_train),KNN.score(X_test_scaled,y_test),\ seg_accuracy, (time.time()-start_time))) # draw classification map draw(GT_Label, KNN_Label, Seg_Label, train_map, test_map) print('--------------------------------------------------------------------') # Naive Bayes: GaussianNB from sklearn.naive_bayes import GaussianNB start_time = time.time() GaussNB = GaussianNB().fit(X_train, y_train) GaussNB_Label = GaussNB.predict(data_all).reshape( width, height).astype(int).transpose(1, 0) GaussNB_predict_prob = GaussNB.predict_proba(data_all) # Post-processing using Graph-Cut