Example #1
0
def get_data_all(path_train, path_test, array_max):
    data_all = Dataset_csv(path_data=path_train, max_value=array_max)
    data_all.set_minibatch(data_all.total_inputs)
    X_train, y_train = data_all.generate_batch()

    data_all = Dataset_csv(path_data=path_test, max_value=array_max)
    data_all.set_minibatch(data_all.total_inputs)
    X_test, y_test = data_all.generate_batch()

    return X_train, X_test, y_train, y_test, len(y_train), len(y_test)
def get_data_all(path_data, array_max):

    data_all = Dataset_csv(path_data=path_data, max_value=array_max)
    data_all.set_minibatch(data_all.total_inputs)
    X_data, y_data = data_all.generate_batch()

    return X_data, y_data, len(y_data)
def get_data_split(path_data, array_max, test_size=0.3):
    data_all = Dataset_csv(path_data=path_data, max_value=array_max)
    data_all.set_minibatch(data_all.total_inputs)
    data, label = data_all.generate_batch()

    X_train, X_test, y_train, y_test = model_selection.train_test_split(
        data, label, test_size=test_size, random_state=42)
    return X_train, X_test, y_train, y_test, len(y_train), len(y_test)
    data_test = Dataset_csv(path_data=path_data_test,
                            minibatch=mini_batch_test,
                            max_value=Damax,
                            random=False)
    # data_test = Dataset_csv(path_data=path_data_train, minibatch=mini_batch_train, max_value=Damax, random=False)

    with tf.Session() as sess:

        x_batch = tf.placeholder(tf.float32, [None, 4096])
        mask = tf.placeholder(tf.float32, [None, 4096])
        noise_mode = tf.placeholder(tf.bool)

        AEncode = AE.AEncoder(path_load_weight,
                              learning_rate=learning_rate,
                              noise=noise_level)
        AEncode.build(x_batch, mask, noise_mode, [2048, 1024])
        sess.run(tf.global_variables_initializer())

        print('Original Cost: ', test_model(AEncode, sess, data_test))
        train_model(AEncode,
                    sess,
                    data_train,
                    objDatatest=data_test,
                    epoch=epoch)

        # SAVE WEIGHTs
        AEncode.save_npy(sess, path_save_weight)

        # Plot example reconstructions
        plot_result(AEncode, sess, data_train.generate_batch()[0])
Example #5
0
    # Damax = data_normal.amax
    # del data_normal

    # utils.generate_max_csvData([path_data_train_all[0], path_data_test_all[0]], path+'maximo.csv', has_label=True)
    Damax = utils.load_max_csvData(path + 'maximo.csv')

    c = tf.ConfigProto()
    c.gpu_options.visible_device_list = "1,2"

    print('SEARCH SAMPLES')
    print('--------------')

    data = Dataset_csv(path_data=path_data_test_all,
                       minibatch=1,
                       max_value=Damax,
                       restrict=False,
                       random=False)

    with tf.device('/cpu:0'):
        with tf.Session(config=c) as sess:
            aencoder = AE.ae_multiClass(session=sess,
                                        npy_weight_paths=path_weight_ae,
                                        num_class=num_class)
            aencoder.build(dim_input=dim_input, layers=layers)
            for i in range(data.total_batchs_complete):
                x, label = data.generate_batch()

                res = aencoder.search_sample(sample=x)
                data.next_batch_test()
                print(res, label)
                            minibatch=mini_batch_train,
                            max_value=Damax,
                            random=False)

    with tf.Session() as sess:

        x_batch = tf.placeholder(tf.float32, [None, 4096])

        CAEncode = CAE.ConvAEncoder(path_load_weight,
                                    learning_rate=learning_rate)
        CAEncode.build(input_batch=x_batch,
                       n_filters=[1, 10, 10],
                       corruption=False)
        sess.run(tf.global_variables_initializer())

        print('Original Cost: ', test_model(CAEncode, sess, data_test))
        train_model(CAEncode,
                    sess,
                    data_train,
                    objDatatest=data_test,
                    epoch=epoch)

        # SAVE WEIGHTs
        CAEncode.save_npy(sess, path_save_weight)

        # Plot example reconstructions
        plot_result(CAEncode,
                    sess,
                    data_train.generate_batch()[0],
                    dim=(64, 64))