Esempio n. 1
0
    print("Accuracy:", accuracy.eval({x: antenna_data, y: label_data}))

    data_test = HandleData(total_data=80, data_per_angle=10, num_angles=8)
    antenna_data_test, label_data_test = data_test.get_synthatic_data(
        test_data=True)
    print("Accuracy:", accuracy.eval({
        x: antenna_data_test,
        y: label_data_test
    }))

    data_test_noise = HandleData(total_data=120,
                                 data_per_angle=120,
                                 num_angles=8)
    antenna_data_test, label_data_test = data_test_noise.get_synthatic_data(
        test_data=-1)
    print("Accuracy:", accuracy.eval({
        x: antenna_data_test,
        y: label_data_test
    }))

    # pred_result = sess.run(tf.argmax(pred, 1), feed_dict={x: np.array([[24, 38, 20, 9]])})
    # print(get_predicted_angle(pred_result[0]))
    for i in range(0, 8):
        x_i, y_i = data.next_batch(110)
        pred_result = sess.run(tf.argmax(pred, 1), feed_dict={x: x_i, y: y_i})
        # print('angle = ',i*45 ,' ', collections.Counter(pred_result))
        unique, counts = np.unique(pred_result, return_counts=True)
        unique_angles = unique * 45
        percentage = (counts / 110) * 100
        print('angle = ', i * 45, ' ', dict(zip(unique_angles, percentage)))
Esempio n. 2
0
def train_DOA():
    from get_csv_data import HandleData
    import csv

    ################ TEST DATA ################
    data = HandleData(total_data=880, data_per_angle=110, num_angles=8)
    antenna_data, label_data = data.get_synthatic_data(test_data=False)
    antenna_data_mean = np.mean(antenna_data, axis=0)
    ###########################################

    ################ learning parameters ######
    learning_rate = 0.001
    batch_size = 20
    n_epochs = 1000
    ###########################################

    ################ AutoEncoder ##############
    ae = autoencoder(dimensions=[4, 200])
    optimizer = tf.train.AdamOptimizer(learning_rate).minimize(ae['cost'])
    ###########################################

    ################ Training #################
    sess = tf.Session()
    sess.run(tf.global_variables_initializer())
    saver = tf.train.Saver()
    ########### restore ###########
    # saver_restore = tf.train.import_meta_graph('./DAE_save/DenoisingAE_save_noise_add.meta')
    # saver_restore = tf.train.import_meta_graph('DenoisingAE_save_noise_multiply.meta')
    # saver_restore.restore(sess, tf.train.latest_checkpoint('./DAE_save/'))
    ###############################
    train = 0
    for epoch_i in range(n_epochs):
        for batch_i in range(data.total_data // batch_size):
            batch_xs, _ = data.next_batch(batch_size)
            train = np.array([img - antenna_data_mean for img in batch_xs])
            # print(train.shape)
            sess.run(optimizer,
                     feed_dict={
                         ae['x']: train,
                         ae['corrupt_prob']: [1.0]
                     })
        print(
            epoch_i,
            sess.run([ae['cost'], ae['kl']],
                     feed_dict={
                         ae['x']: train,
                         ae['corrupt_prob']: [1.0]
                     }))

        ##### debug kl ######
        # tmp=sess.run(ae['encoder_out'], feed_dict={ae['x']: train, ae['corrupt_prob']: [1.0]})
        # p_hat = tf.reduce_mean(tmp, 0)
        # p = np.repeat([-0.05], 200).astype(np.float32)
        # dummy = np.repeat([1], 200).astype(np.float32)
        # p_hat = p_hat+dummy
        # p = p+dummy
        # kl_tmp = p * tf.log(tf.abs(p)) - p * tf.log(tf.abs(p_hat)) + (1 - p) * tf.log(p-1) - (1 - p) * tf.log(p_hat-1)
        # print(sess.run( p_hat ))
        # ######################

    ###########################################
    saver.save(sess, './DAE_save/DenoisingAE_save_noise_add')
    ############### Test Data ################
    data_test = HandleData(total_data=80, data_per_angle=10, num_angles=8)
    antenna_data_test, label_data_test = data_test.get_synthatic_data(
        test_data=True)
    antenna_data_test_mean = np.mean(antenna_data_test, axis=0)
    ###########################################

    ################ Testing trained data #####
    test_xs_norm = np.array(
        [img - antenna_data_test_mean for img in antenna_data])
    a, b, output_y = sess.run(
        [ae['cost'], ae['noise_input'], ae['reconstruction']],
        feed_dict={
            ae['x']: test_xs_norm,
            ae['corrupt_prob']: [1.0]
        })
    print("Testing trained data avarage cost : ", a)
    ###########################################

    ################ Testing ##################
    test_xs, _ = data_test.next_batch(80)
    test_xs_norm = np.array([img - antenna_data_test_mean for img in test_xs])
    a, b, output_y = sess.run(
        [ae['cost'], ae['noise_input'], ae['reconstruction']],
        feed_dict={
            ae['x']: test_xs_norm,
            ae['corrupt_prob']: [1.0]
        })
    print("avarage cost : ", a)
    for i in range(len(output_y)):
        comp = output_y[i]
        orgi = test_xs[i]
        noise = b[i]
        comp += antenna_data_test_mean
        noise += antenna_data_test_mean
        plt.subplot(8, 10, i + 1)
        plt.plot(comp, color='blue', label='rcon')
        plt.plot(orgi, color='green', label='orgi')
        plt.plot(noise, color='red', label='noise')
        plt.xticks(())
        plt.yticks(())
    plt.subplots_adjust(0.08, 0.02, 0.92, 0.85, 0.08, 0.23)
    plt.legend(loc='upper left')
    plt.show()
    print("difference between noise and origial :")
    # print(b-test_xs_norm)
    #############################################

    ################ Test Data ################
    data_test_noise = HandleData(total_data=120,
                                 data_per_angle=120,
                                 num_angles=8)
    antenna_data_test, label_data_test = data_test_noise.get_synthatic_data(
        test_data=-1)
    antenna_data_test_mean = np.mean(antenna_data_test, axis=0)
    ###########################################

    ################ Testing ##################
    test_xs, _ = data_test_noise.next_batch(120)
    test_xs_norm = np.array([img - antenna_data_test_mean for img in test_xs])
    a, b, output_y = sess.run(
        [ae['cost'], ae['noise_input'], ae['reconstruction']],
        feed_dict={
            ae['x']: test_xs_norm,
            ae['corrupt_prob']: [1.0]
        })
    print("avarage cost : ", a)
    for i in range(len(output_y)):
        comp = output_y[i]
        orgi = test_xs[i]
        noise = b[i]
        comp += antenna_data_test_mean
        noise += antenna_data_test_mean
        plt.subplot(10, 12, i + 1)
        plt.plot(comp, color='blue', label='rcon')
        plt.plot(orgi, color='green', label='orgi')
        plt.plot(noise, color='red', label='noise')
        plt.xticks(())
        plt.yticks(())
    plt.subplots_adjust(0.08, 0.02, 0.92, 0.85, 0.08, 0.23)
    plt.show()
Esempio n. 3
0
    # Initializing the Graph
    init = tf.global_variables_initializer()
    saver = tf.train.Saver()

    # Launch the graph
    with tf.Session() as sess:
        sess.run(init)

        if TRAIN:
            ############### Training #################
            for epoch in range(training_epochs):
                avg_cost = 0.
                total_batch = int(data.total_data / batch_size)
                for i in range(total_batch):
                    batch_x, batch_y = data.next_batch(batch_size)
                    _, c = sess.run([optimizer, cost],
                                    feed_dict={
                                        x: batch_x,
                                        y: batch_y
                                    })
                    avg_cost += c / total_batch
                # Display logs per epoch step
                if epoch % display_step == 0:
                    print("Epoch:", '%04d' % (epoch + 1), "cost=",
                          "{:.9f}".format(avg_cost))
            print("Optimization Finished!")
            ##########################################

            ########## save ###########
            saver.save(sess, './DAEandDNN_save/DAEandDNN_save')