Exemplo n.º 1
0
def main():
    sess = tf.Session()
    sda = SDAutoencoder(dims=[784, 500],
                        activations=["sigmoid"],
                        sess=sess,
                        noise=0.40,
                        loss="cross-entropy")

    mnist_train_gen_f = lambda: get_mnist_batch_xs_generator(True, batch_size=100, batch_limit=12000)

    sda.pretrain_network_gen(mnist_train_gen_f)
    trained_parameters = sda.finetune_parameters_gen(
        get_mnist_batch_generator(True, batch_size=100, batch_limit=18000), output_dim=10)
    transformed_filepath = "../data/mnist_test_transformed.csv"
    test_ys_filepath = "../data/mnist_test_ys.csv"
    output_filepath = "../data/mnist_pred_ys.csv"

    sda.write_encoded_input_with_ys(transformed_filepath, test_ys_filepath,
                                    get_mnist_batch_generator(False, batch_size=100, batch_limit=100))
    sess.close()

    test_model(parameters_dict=trained_parameters,
               input_dim=sda.output_dim,
               output_dim=10,
               x_test_filepath=transformed_filepath,
               y_test_filepath=test_ys_filepath,
               output_filepath=output_filepath)
Exemplo n.º 2
0
def sca():
    sess = tf.Session()
    sca = SCDAutoencoder(dims=[784, 400, 200, 80],
                         activations=["relu", "relu", "relu"],
                         sess=sess,
                         noise=0.20,
                         loss="cross-entropy",
                         pretrain_lr=0.0001,
                         finetune_lr=0.0001)

    mnist_train_gen_f = lambda: get_mnist_batch_xs_generator(
        True, batch_size=100, batch_limit=12000)

    # pretrain locally, layer by layer
    sca.pretrain_network_gen(mnist_train_gen_f)

    # fine-tune the model by training all the weights
    trained_parameters = sca.finetune_parameters_gen(get_mnist_batch_generator(
        True, batch_size=100, batch_limit=18000),
                                                     output_dim=10)
    mainDir = data_storage_path
    if not os.path.exists(mainDir):
        os.makedirs(mainDir)
    transformed_filepath = mainDir + "/mnist_test_transformed.csv"
    test_ys_filepath = mainDir + "/mnist_test_ys.csv"
    output_filepath = mainDir + "/mnist_pred_ys.csv"

    # for testing. write the encoded x value along with y values  to csv
    sda.write_encoded_input_with_ys(
        transformed_filepath, test_ys_filepath,
        get_mnist_batch_generator(False, batch_size=100, batch_limit=100))
    sess.close()

    test_model(parameters_dict=trained_parameters,
               input_dim=sda.output_dim,
               output_dim=10,
               x_test_filepath=transformed_filepath,
               y_test_filepath=test_ys_filepath,
               output_filepath=output_filepath)
Exemplo n.º 3
0
from softmax import test_model, openData

OUTPUT_PATH = "../data/deephand/pred.csv"
TRANSFORMED_PATH = "../data/deephand/transformed.csv"

X_TRAIN_PATH = "../data/trainX.csv"
Y_TRAIN_PATH = "../data/trainY.csv"
X_TEST_PATH = "../data/testX.csv"
Y_TEST_PATH = "../data/testY.csv"
BIASES_PATH = "../data/biases.csv"
WEIGHT_PATH = "../data/weights.csv"

biases = openData(BIASES_PATH)
# print(type(biases[0][0]))
biases = [[float(y.strip('"')) for y in x] for x in biases]
weights = openData(WEIGHT_PATH)
print(type(weights[0][0]))
weights = [[float(y.strip('"')) for y in x] for x in weights]
trained_parameters = {'biases': biases, 'weights': weights}
test_model(parameters_dict=trained_parameters,
           input_dim=len(weights),
           output_dim=6,
           x_test_filepath=TRANSFORMED_PATH,
           y_test_filepath=Y_TEST_PATH,
           output_filepath=OUTPUT_PATH)