Exemple #1
0
n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size
n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size

print "[MESSAGE] The data is loaded"

X = T.matrix("data")
y = T.ivector("label")
idx = T.lscalar()
dropout_rate = T.fscalar()

layer_0 = ReLULayer(in_dim=784, out_dim=500)
layer_1 = ReLULayer(in_dim=500, out_dim=200)
layer_2 = SoftmaxLayer(in_dim=200, out_dim=10)

dropout = multi_dropout([(batch_size, 784), (batch_size, 500), (batch_size, 200)], dropout_rate)

model = FeedForward(layers=[layer_0, layer_1, layer_2], dropout=dropout)
model_test = FeedForward(layers=[layer_0, layer_1, layer_2])
# model=FeedForward(layers=[layer_0, layer_1, layer_2]);

out = model.fprop(X)
out_test = model_test.fprop(X)
cost = categorical_cross_entropy_cost(out[-1], y)

# Configurations that work
updates = gd_updates(cost=cost, params=model.params, method="sgd", learning_rate=0.01, momentum=0.9)

train = theano.function(
    inputs=[idx, dropout_rate],
    outputs=cost,
Exemple #2
0
n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size

print "[MESSAGE] The data is loaded"

X = T.matrix("data")
y = T.ivector("label")
idx = T.lscalar()
dropout_rate = T.fscalar()

layer_0 = ReLULayer(in_dim=1024, out_dim=500)
layer_1 = ReLULayer(in_dim=500, out_dim=200)
layer_2 = SoftmaxLayer(in_dim=200, out_dim=10)

dropout = multi_dropout([(batch_size, 1024), (batch_size, 500),
                         (batch_size, 200)], dropout_rate)

model = FeedForward(layers=[layer_0, layer_1, layer_2], dropout=dropout)
model_test = FeedForward(layers=[layer_0, layer_1, layer_2])
#model=FeedForward(layers=[layer_0, layer_1, layer_2]);

out = model.fprop(X)
out_test = model_test.fprop(X)
cost = categorical_cross_entropy_cost(out[-1], y)

# Configurations that work
updates = gd_updates(cost=cost,
                     params=model.params,
                     method="sgd",
                     learning_rate=0.01,
                     momentum=0.9)
Exemple #3
0
                        num_filters=20,
                        num_channels=50,
                        fm_size=(11, 11),
                        batch_size=batch_size)

pool_1 = MaxPooling(pool_size=(2, 2))

flattener = Flattener()

layer_2 = ReLULayer(in_dim=320, out_dim=200)

layer_3 = SoftmaxLayer(in_dim=200, out_dim=10)

#dropout=multi_dropout([(batch_size, 1, 28, 28), None, (batch_size, 50, 11, 11), None, None, None, None], prob=0.5);
dropout = multi_dropout([(batch_size, 1, 28, 28), None,
                         (batch_size, 50, 11, 11), None, None, None, None],
                        prob=0.5)

model = FeedForward(
    layers=[layer_0, pool_0, layer_1, pool_1, flattener, layer_2, layer_3],
    dropout=dropout)

out = model.fprop(images)
cost = categorical_cross_entropy_cost(out[-1], y) + L2_regularization(
    model.params, 0.01)
updates = gd_updates(cost=cost,
                     params=model.params,
                     method="sgd",
                     learning_rate=0.1)

train = theano.function(
                      num_channels=50,
                      fm_size=(11,11),
                      batch_size=batch_size);

pool_1=MaxPooling(pool_size=(2,2));

flattener=Flattener();

layer_2=ReLULayer(in_dim=320,
                  out_dim=200);
                  
layer_3=SoftmaxLayer(in_dim=200,
                     out_dim=10);
                     
#dropout=multi_dropout([(batch_size, 1, 28, 28), None, (batch_size, 50, 11, 11), None, None, None, None], prob=0.5);
dropout=multi_dropout([(batch_size, 1, 28, 28), None, (batch_size, 50, 11, 11), None, None, None, None], prob=0.5);
                     
model=FeedForward(layers=[layer_0, pool_0, layer_1, pool_1, flattener, layer_2, layer_3],
                  dropout=dropout);

out=model.fprop(images);
cost=categorical_cross_entropy_cost(out[-1], y)+L2_regularization(model.params, 0.01);
updates=gd_updates(cost=cost, params=model.params, method="sgd", learning_rate=0.1);

train=theano.function(inputs=[idx],
                      outputs=cost,
                      updates=updates,
                      givens={X: train_set_x[idx * batch_size: (idx + 1) * batch_size],
                              y: train_set_y[idx * batch_size: (idx + 1) * batch_size]});

test=theano.function(inputs=[idx],