Ejemplo n.º 1
0
def get_random_block_from_data(data, batch_size):
    start_index = np.random.randint(0, len(data) - batch_size)
    return data[start_index:(start_index + batch_size)]

X_train, X_test = standard_scale(mnist.train.images, mnist.test.images)


n_samples = int(mnist.train.num_examples)
training_epochs = 100
batch_size = 128
display_step = 1

autoencoder = MaskingNoiseAutoencoder(n_input = 784,
                                      n_hidden = 200,
                                      transfer_function = tf.nn.softplus,
                                      optimizer = tf.train.AdamOptimizer(learning_rate = 0.001),
                                      dropout_probability = 0.95)

for epoch in range(training_epochs):
    avg_cost = 0.
    total_batch = int(n_samples / batch_size)
    for i in range(total_batch):
        batch_xs = get_random_block_from_data(X_train, batch_size)

        cost = autoencoder.partial_fit(batch_xs)

        avg_cost += cost / n_samples * batch_size

    if epoch % display_step == 0:
        print("Epoch:", '%04d' % (epoch + 1), "cost=", "{:.9f}".format(avg_cost))
def get_random_block_from_data(data, batch_size):
    start_index = np.random.randint(0, len(data) - batch_size)
    return data[start_index:(start_index + batch_size)]


X_train, X_test = standard_scale(mnist.train.images, mnist.test.images)

n_samples = int(mnist.train.num_examples)
training_epochs = 100
batch_size = 128
display_step = 1

autoencoder = MaskingNoiseAutoencoder(
    n_input=784,
    n_hidden=200,
    transfer_function=tf.nn.softplus,
    optimizer=tf.train.AdamOptimizer(learning_rate=0.001),
    dropout_probability=0.95)

for epoch in range(training_epochs):
    avg_cost = 0.
    total_batch = int(n_samples / batch_size)
    for i in range(total_batch):
        batch_xs = get_random_block_from_data(X_train, batch_size)

        cost = autoencoder.partial_fit(batch_xs)

        avg_cost += cost / n_samples * batch_size

    if epoch % display_step == 0:
        print("Epoch:", '%d,' % (epoch + 1), "Cost:",
Ejemplo n.º 3
0
    return data[start_index:(start_index + batch_size)]


X_train, X_text = standard_scale(mnist.train.images, mnist.test.images)

n_samples = int(mnist.train.num_examples)

train_epochs = 20

batch_sizd = 128

desplay_step = 10

gae = MaskingNoiseAutoencoder(
    n_input=784,
    n_hidden=200,
    transfer_function=tf.nn.softplus,
    optimizer=tf.train.AdamOptimizer(learning_rate=0.001),
)

for epoch in range(train_epochs):
    avg_cost = 0.
    total_batch = int(n_samples / batch_sizd)

    for i in range(total_batch):
        batch_xs = get_random_black_from_data(X_train, batch_sizd)

        cost = gae.partial_fit(batch_xs)

        avg_cost += cost / n_samples * batch_sizd

    if epoch % desplay_step == 0: