Пример #1
0
X = T.matrix("data")
y = T.ivector("label")
idx = T.lscalar()
ep_idx = T.lscalar()
corruption_level = T.fscalar()

encode_layer = SigmoidLayer(in_dim=784, out_dim=500)

decode_layer = SigmoidLayer(in_dim=500, out_dim=784)

model = AutoEncoder(layers=[encode_layer, decode_layer])

#out=model.fprop(X, corruption_level=corruption_level, noise_type="gaussian");
out = model.fprop(X, corruption_level=corruption_level)
cost = binary_cross_entropy_cost(out[-1], X)

updates = gd_updates(cost=cost,
                     params=model.params,
                     method="sgd",
                     learning_rate=0.1)

train = theano.function(
    inputs=[idx, corruption_level],
    outputs=[cost],
    updates=updates,
    givens={X: train_set_x[idx * batch_size:(idx + 1) * batch_size]})

print "[MESSAGE] The model is built"

epoch = 0
Пример #2
0
y=T.ivector("label");
idx=T.lscalar();
ep_idx=T.lscalar();
corruption_level=T.fscalar();

encode_layer=SigmoidLayer(in_dim=784,
                          out_dim=500);
                       
decode_layer=SigmoidLayer(in_dim=500,
                          out_dim=784);
                          
model=AutoEncoder(layers=[encode_layer, decode_layer]);

#out=model.fprop(X, corruption_level=corruption_level, noise_type="gaussian");
out=model.fprop(X, corruption_level=corruption_level);
cost=binary_cross_entropy_cost(out[-1], X);

updates=gd_updates(cost=cost, params=model.params, method="sgd", learning_rate=0.1);

train=theano.function(inputs=[idx, corruption_level],
                      outputs=[cost],
                      updates=updates,
                      givens={X: train_set_x[idx * batch_size: (idx + 1) * batch_size]});
                      
print "[MESSAGE] The model is built"

epoch = 0;
min_cost=None;
corr=np.random.uniform(low=0.2, high=0.3, size=1).astype("float32");
corr_best=corr[0]
while (epoch < n_epochs):
Пример #3
0
n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size
n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size

print "[MESSAGE] The data is loaded"

X = T.matrix("data")
idx = T.lscalar()
noise = theano.shared(np.asarray(np.random.normal(scale=0.1,
                                                  size=(batch_size, 784)),
                                 dtype="float32"),
                      borrow=True)

#corrupted=corrupt_input(X, corruption_level=noise, noise_type="gaussian");
corrupted = X + noise

cost = binary_cross_entropy_cost(corrupted, X)

updates = gd_updates(cost, [noise], method="sgd", learning_rate=0.001)

train = theano.function(
    inputs=[idx],
    outputs=[cost],
    updates=updates,
    givens={X: train_set_x[idx * batch_size:(idx + 1) * batch_size]})

epoch = 0
while (epoch < n_epochs):
    epoch = epoch + 1
    c = []

    for batch_index in xrange(n_train_batches):
Пример #4
0
n_train_batches=train_set_x.get_value(borrow=True).shape[0]/batch_size;
n_valid_batches=valid_set_x.get_value(borrow=True).shape[0]/batch_size;
n_test_batches=test_set_x.get_value(borrow=True).shape[0]/batch_size;

print "[MESSAGE] The data is loaded"

X=T.matrix("data");
idx=T.lscalar();
noise=theano.shared(np.asarray(np.random.normal(scale=0.1, size=(batch_size, 784)),
                               dtype="float32"),
                    borrow=True);
                    
#corrupted=corrupt_input(X, corruption_level=noise, noise_type="gaussian");
corrupted=X+noise;

cost=binary_cross_entropy_cost(corrupted, X);

updates=gd_updates(cost, [noise], method="sgd", learning_rate=0.001);

train=theano.function(inputs=[idx],
                      outputs=[cost],
                      updates=updates,
                      givens={X: train_set_x[idx * batch_size: (idx + 1) * batch_size]});
                      
epoch = 0;
while (epoch < n_epochs):
    epoch = epoch + 1;
    c = [];
    
    for batch_index in xrange(n_train_batches):
        train_cost=train(batch_index);