plt.plot(train_sizes, test_scores_mean, 'o-', color="g", label="Cross-validation score") plt.legend(loc="best") if savefig == None: savefig = title plt.savefig('./images resul/' + savefig + '.png', dpi=600) plt.show() if __name__ == '__main__': #get the data data = HandleData(oneHotFlag=False) X, y = data.get_synthatic_data() print('data_sizes:', X.shape) print('label_sizes:', len(y)) ###################################### SVC ############################################################### estimator = SVC() # Set the parameters by cross-validation tuned_parameters = [{ 'kernel': ['rbf', 'poly', 'sigmoid'], 'gamma': np.logspace(-6, 10, 5), 'C': [1, 10, 100, 1000, 10000] }, { 'kernel': ['linear'], 'C': [1, 10, 100, 1000, 10000]
def train_DOA(): from get_csv_data import HandleData import csv ################ TEST DATA ################ data = HandleData(total_data=880, data_per_angle=110, num_angles=8) antenna_data, label_data = data.get_synthatic_data(test_data=False) antenna_data_mean = np.mean(antenna_data, axis=0) ########################################### ################ learning parameters ###### learning_rate = 0.001 batch_size = 20 n_epochs = 1000 ########################################### ################ AutoEncoder ############## ae = autoencoder(dimensions=[4, 200]) optimizer = tf.train.AdamOptimizer(learning_rate).minimize(ae['cost']) ########################################### ################ Training ################# sess = tf.Session() sess.run(tf.global_variables_initializer()) saver = tf.train.Saver() ########### restore ########### # saver_restore = tf.train.import_meta_graph('./DAE_save/DenoisingAE_save_noise_add.meta') # saver_restore = tf.train.import_meta_graph('DenoisingAE_save_noise_multiply.meta') # saver_restore.restore(sess, tf.train.latest_checkpoint('./DAE_save/')) ############################### train = 0 for epoch_i in range(n_epochs): for batch_i in range(data.total_data // batch_size): batch_xs, _ = data.next_batch(batch_size) train = np.array([img - antenna_data_mean for img in batch_xs]) # print(train.shape) sess.run(optimizer, feed_dict={ ae['x']: train, ae['corrupt_prob']: [1.0] }) print( epoch_i, sess.run([ae['cost'], ae['kl']], feed_dict={ ae['x']: train, ae['corrupt_prob']: [1.0] })) ##### debug kl ###### # tmp=sess.run(ae['encoder_out'], feed_dict={ae['x']: train, ae['corrupt_prob']: [1.0]}) # p_hat = tf.reduce_mean(tmp, 0) # p = np.repeat([-0.05], 200).astype(np.float32) # dummy = np.repeat([1], 200).astype(np.float32) # p_hat = p_hat+dummy # p = p+dummy # kl_tmp = p * tf.log(tf.abs(p)) - p * tf.log(tf.abs(p_hat)) + (1 - p) * tf.log(p-1) - (1 - p) * tf.log(p_hat-1) # print(sess.run( p_hat )) # ###################### ########################################### saver.save(sess, './DAE_save/DenoisingAE_save_noise_add') ############### Test Data ################ data_test = HandleData(total_data=80, data_per_angle=10, num_angles=8) antenna_data_test, label_data_test = data_test.get_synthatic_data( test_data=True) antenna_data_test_mean = np.mean(antenna_data_test, axis=0) ########################################### ################ Testing trained data ##### test_xs_norm = np.array( [img - antenna_data_test_mean for img in antenna_data]) a, b, output_y = sess.run( [ae['cost'], ae['noise_input'], ae['reconstruction']], feed_dict={ ae['x']: test_xs_norm, ae['corrupt_prob']: [1.0] }) print("Testing trained data avarage cost : ", a) ########################################### ################ Testing ################## test_xs, _ = data_test.next_batch(80) test_xs_norm = np.array([img - antenna_data_test_mean for img in test_xs]) a, b, output_y = sess.run( [ae['cost'], ae['noise_input'], ae['reconstruction']], feed_dict={ ae['x']: test_xs_norm, ae['corrupt_prob']: [1.0] }) print("avarage cost : ", a) for i in range(len(output_y)): comp = output_y[i] orgi = test_xs[i] noise = b[i] comp += antenna_data_test_mean noise += antenna_data_test_mean plt.subplot(8, 10, i + 1) plt.plot(comp, color='blue', label='rcon') plt.plot(orgi, color='green', label='orgi') plt.plot(noise, color='red', label='noise') plt.xticks(()) plt.yticks(()) plt.subplots_adjust(0.08, 0.02, 0.92, 0.85, 0.08, 0.23) plt.legend(loc='upper left') plt.show() print("difference between noise and origial :") # print(b-test_xs_norm) ############################################# ################ Test Data ################ data_test_noise = HandleData(total_data=120, data_per_angle=120, num_angles=8) antenna_data_test, label_data_test = data_test_noise.get_synthatic_data( test_data=-1) antenna_data_test_mean = np.mean(antenna_data_test, axis=0) ########################################### ################ Testing ################## test_xs, _ = data_test_noise.next_batch(120) test_xs_norm = np.array([img - antenna_data_test_mean for img in test_xs]) a, b, output_y = sess.run( [ae['cost'], ae['noise_input'], ae['reconstruction']], feed_dict={ ae['x']: test_xs_norm, ae['corrupt_prob']: [1.0] }) print("avarage cost : ", a) for i in range(len(output_y)): comp = output_y[i] orgi = test_xs[i] noise = b[i] comp += antenna_data_test_mean noise += antenna_data_test_mean plt.subplot(10, 12, i + 1) plt.plot(comp, color='blue', label='rcon') plt.plot(orgi, color='green', label='orgi') plt.plot(noise, color='red', label='noise') plt.xticks(()) plt.yticks(()) plt.subplots_adjust(0.08, 0.02, 0.92, 0.85, 0.08, 0.23) plt.show()
from __future__ import print_function from get_csv_data import HandleData import numpy as np import tensorflow as tf data = HandleData(total_data=880, data_per_angle=110, num_angles=8) antenna_data, label_data = data.get_synthatic_data(test_data=False) def get_predicted_angle(pred_class): return "angle = " + str(pred_class * 45) def corrupt(x): r = tf.add( x, tf.cast( tf.random_uniform(shape=tf.shape(x), minval=0, maxval=0.1, dtype=tf.float32), tf.float32)) # r = tf.multiply(x,tf.cast(tf.random_uniform(shape=tf.shape(x), minval=0, maxval=0.1, dtype=tf.float32), tf.float32)) return r # Parameters learning_rate = 0.0001 training_epochs = 1000 batch_size = 5 display_step = 1 # Network Parameters
ae['corrupt_prob']: [1.0] }) print("DEA avarage cost : ", a) return_list.append(output_y) tf.reset_default_graph() return return_list ########################################### if __name__ == '__main__': test_percentage = 0.2 seed = 1234 #instance of the Handle Data class data = HandleData(oneHotFlag=False) #get the data antenna_data, label_data = data.get_synthatic_data() antenna_data, antenna_data_test, label_data, label_test = train_test_split (antenna_data, label_data, \ test_size=test_percentage, random_state=42) # get denoising autoencoder outputs for the train and test data DAE_out = getDAE([antenna_data, antenna_data_test], seed) antenna_data = DAE_out[0] antenna_data_test = DAE_out[1] #Instantiate a Bagging Classifier clf = BaggingClassifier(DecisionTreeClassifier(random_state=42), n_estimators=300, max_samples=250, \ bootstrap=False, n_jobs=-1, random_state=42) #Train model tic = time()
############################### ################ Testing trained data ##### return_list = [] for data in antenna_data: antenna_data_mean = np.mean(data, axis=0) test_xs_norm = np.array([img - antenna_data_mean for img in data]) a,b,output_y = sess.run([ae['cost'],ae['noise_input'],ae['y']], feed_dict={ae['x']: test_xs_norm, ae['corrupt_prob']: [1.0]}) print("DEA avarage cost : ", a) return_list.append(output_y) return return_list ########################################### from get_csv_data import HandleData data = HandleData(total_data=880,data_per_angle=110,num_angles=8) antenna_data,label_data = data.get_synthatic_data(test_data=False) data_test = HandleData(total_data=80, data_per_angle=10, num_angles=8) antenna_data_test, label_data_test = data_test.get_synthatic_data(test_data=True) data_test_noise = HandleData(total_data=120, data_per_angle=120, num_angles=8) antenna_data_test_noise, label_data_test_noise = data_test_noise.get_synthatic_data(test_data=-1) # DAE_out = getDAE([antenna_data,antenna_data_test,antenna_data_test_noise]) DAE_out = [genfromtxt('TrainDOA_Data.csv', delimiter=','),genfromtxt('TestDOA_Data.csv', delimiter=','),genfromtxt('TestDOA_Noise_Data.csv', delimiter=',')] data.data_set = DAE_out[0] antenna_data = DAE_out[0] antenna_data_test = DAE_out[1]
def multilayer_perceptron(x, weights, biases): # Hidden layer with RELU activation layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'], name="DNN1") layer_1 = tf.nn.relu(layer_1, name="DNN2") # Hidden layer with RELU activation layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'], name="DNN3") layer_2 = tf.nn.relu(layer_2, name="DNN4") # Output layer with linear activation out_layer = tf.matmul(layer_2, weights['out'], name="DNN5") + biases['out'] return out_layer if __name__ == "__main__": data = HandleData(total_data=880, data_per_angle=110, num_angles=8) antenna_data, label_data = data.get_synthatic_data(test_data=False) data_test = HandleData(total_data=80, data_per_angle=10, num_angles=8) antenna_data_test, label_data_test = data_test.get_synthatic_data( test_data=True) DAE_out = getDAE([ antenna_data, antenna_data_test ]) # get denoising autoencoder outputs for the train and test data data.data_set = DAE_out[0] antenna_data = DAE_out[0] antenna_data_test = DAE_out[1] data_test.data_set = DAE_out[1]