Example #1
0
pool_0 = MaxPooling(pool_size=(4, 4))
pool_1 = MaxPooling(pool_size=(2, 2))
pool_2 = MaxPooling(pool_size=(2, 2))
pool_3 = MaxPooling(pool_size=(2, 2))

flattener = Flattener()
layer_5 = ReLULayer(in_dim=128 * 1 * 1, out_dim=64)
layer_6 = SoftmaxLayer(in_dim=64, out_dim=10)

model_sup = FeedForward(layers=[
    layer_0_en, pool_0, layer_1_en, pool_1, layer_2_en, pool_2, layer_3_en,
    pool_3, layer_4_en, flattener, layer_5, layer_6
])

out_sup = model_sup.fprop(images)
cost_sup = categorical_cross_entropy_cost(out_sup[-1], y)
updates = gd_updates(cost=cost_sup,
                     params=model_sup.params,
                     method="sgd",
                     learning_rate=0.1)

train_sup = theano.function(
    inputs=[idx],
    outputs=cost_sup,
    updates=updates,
    givens={
        X: train_set_x[idx * batch_size:(idx + 1) * batch_size],
        y: train_set_y[idx * batch_size:(idx + 1) * batch_size]
    })

test_sup = theano.function(
                   batch_size=batch_size,
                   border_mode="full")

pool_1 = MaxPooling(pool_size=(2, 2))

flattener = Flattener()

layer_2 = ReLULayer(in_dim=32 * 64, out_dim=800)

layer_3 = SoftmaxLayer(in_dim=800, out_dim=10)

model = FeedForward(
    layers=[layer_0, pool_0, layer_1, pool_1, flattener, layer_2, layer_3])

out = model.fprop(images)
cost = categorical_cross_entropy_cost(out[-1], y)
updates = gd_updates(cost=cost,
                     params=model.params,
                     method="sgd",
                     learning_rate=0.01,
                     momentum=0.9)

extract = theano.function(
    inputs=[idx],
    outputs=layer_0.apply(images),
    givens={X: train_set_x[idx * batch_size:(idx + 1) * batch_size]})
print extract(1).shape

train = theano.function(
    inputs=[idx],
    outputs=cost,
Example #3
0
pool_2=MaxPooling(pool_size=(2,2));
pool_3=MaxPooling(pool_size=(2,2));
pool_4=MaxPooling(pool_size=(2,2));

flattener=Flattener()
layer_6=ReLULayer(in_dim=64*1*1,
                  out_dim=32)
layer_7=SoftmaxLayer(in_dim=32,
                     out_dim=6)

model_sup=FeedForward(layers=[layer_0_en, pool_0, layer_1_en, pool_1, layer_2_en, pool_2, layer_3_en, pool_3, layer_4_en, pool_4, layer_5_en,
                              flattener, layer_6, layer_7])

 
out_sup=model_sup.fprop(images)
cost_sup=categorical_cross_entropy_cost(out_sup[-1], y)
updates=gd_updates(cost=cost_sup, params=model_sup.params, method="sgd", learning_rate=0.1)
 
train_sup=theano.function(inputs=[idx],
                          outputs=cost_sup,
                          updates=updates,
                          givens={X: train_set_x[idx * batch_size: (idx + 1) * batch_size],
                                  y: train_set_y[idx * batch_size: (idx + 1) * batch_size]})
 
test_sup=theano.function(inputs=[idx],
                         outputs=model_sup.layers[-1].error(out_sup[-1], y),
                         givens={X: test_set_x[idx * batch_size: (idx + 1) * batch_size],
                                 y: test_set_y[idx * batch_size: (idx + 1) * batch_size]})
                              
print "[MESSAGE] The supervised model is built"
Example #4
0
                      border_mode="full");

pool_1=MaxPooling(pool_size=(2,2));

flattener=Flattener();

layer_2=ReLULayer(in_dim=32*64,
                  out_dim=800);
                  
layer_3=SoftmaxLayer(in_dim=800,
                     out_dim=10);
                     
model=FeedForward(layers=[layer_0, pool_0, layer_1, pool_1, flattener, layer_2, layer_3]);

out=model.fprop(images);
cost=categorical_cross_entropy_cost(out[-1], y);
updates=gd_updates(cost=cost, params=model.params, method="sgd", learning_rate=0.01, momentum=0.9);

extract=theano.function(inputs=[idx],
                        outputs=layer_0.apply(images),
                        givens={X: train_set_x[idx * batch_size: (idx + 1) * batch_size]});
print extract(1).shape


train=theano.function(inputs=[idx],
                      outputs=cost,
                      updates=updates,
                      givens={X: train_set_x[idx * batch_size: (idx + 1) * batch_size],
                              y: train_set_y[idx * batch_size: (idx + 1) * batch_size]});

test=theano.function(inputs=[idx],