def double_fc_dropout(p0, p1, p2, repetitions):
    expanded_training_data, _, _ = network3_nbb.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    nets = []
    for j in range(repetitions):
        print "\n\nTraining using a dropout network with parameters ",p0,p1,p2
        print "Training with expanded data, run num %s" % j
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), 
                          filter_shape=(20, 1, 5, 5), 
                          poolsize=(2, 2), 
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12), 
                          filter_shape=(40, 20, 5, 5), 
                          poolsize=(2, 2), 
                          activation_fn=ReLU),
            FullyConnectedLayer(
                n_in=40*4*4, n_out=1000, activation_fn=ReLU, p_dropout=p0),
            FullyConnectedLayer(
                n_in=1000, n_out=1000, activation_fn=ReLU, p_dropout=p1),
            SoftmaxLayer(n_in=1000, n_out=10, p_dropout=p2)], mini_batch_size)
        net.SGD(expanded_training_data, 40, mini_batch_size, 0.03, 
                validation_data, test_data)
        nets.append(net)
    return nets
def load():
    import numpy as np
    import time

    import network3_nbb

    print "...in load, using " + network3_nbb.theano.config.device

    training_data, validation_data, test_data = network3_nbb.load_data_shared()
    mini_batch_size = 10
def expanded_data_double_fc(n=100):
    """n is the number of neurons in both fully-connected layers.  We'll
    try n=100, 300, and 1000.

    """
    expanded_training_data, _, _ = network3_nbb.load_data_shared(
        "../data/mnist_expanded.pkl.gz")
    for j in range(3):
        print "Training with expanded data, %s neurons in two FC layers, run num %s" % (n, j)
        net = Network([
            ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), 
                          filter_shape=(20, 1, 5, 5), 
                          poolsize=(2, 2), 
                          activation_fn=ReLU),
            ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12), 
                          filter_shape=(40, 20, 5, 5), 
                          poolsize=(2, 2), 
                          activation_fn=ReLU),
            FullyConnectedLayer(n_in=40*4*4, n_out=n, activation_fn=ReLU),
            FullyConnectedLayer(n_in=n, n_out=n, activation_fn=ReLU),
            SoftmaxLayer(n_in=n, n_out=10)], mini_batch_size)
        net.SGD(expanded_training_data, 60, mini_batch_size, 0.03, 
                validation_data, test_data, lmbda=0.1)
def dbl_conv_relu(lmbda_epochs):
    import numpy as np
    import time

    import network3_nbb

    print "...in dbl_conv_relu, using " + network3_nbb.theano.config.device

    training_data, validation_data, test_data = network3_nbb.load_data_shared()
    mini_batch_size = 10

    lmbda = lmbda_epochs[0]
    epoch = lmbda_epochs[1]
    print "Conv + Conv + FC num %s, relu, with regularization. lambda: %s, epochs: %s" % (
        0, lmbda, epoch)
    net = network3_nbb.Network([
        network3_nbb.ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                                   filter_shape=(20, 1, 5, 5),
                                   poolsize=(2, 2),
                                   activation_fn=network3_nbb.ReLU),
        network3_nbb.ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                                   filter_shape=(40, 20, 5, 5),
                                   poolsize=(2, 2),
                                   activation_fn=network3_nbb.ReLU),
        network3_nbb.FullyConnectedLayer(
            n_in=40 * 4 * 4, n_out=100, activation_fn=network3_nbb.ReLU),
        network3_nbb.SoftmaxLayer(n_in=100, n_out=10)
    ], mini_batch_size)
    acc_array = net.SGD(training_data,
                        int(epoch),
                        mini_batch_size,
                        0.03,
                        validation_data,
                        test_data,
                        lmbda=lmbda)
    return [acc_array, lmbda, epoch]
import numpy as np
import time
import network3_nbb

print "Starting, using " + network3_nbb.theano.config.device

training_data, validation_data, test_data = network3_nbb.load_data_shared()
mini_batch_size = 10

lmbda = 0.1  #lmbda_epochs[0]
epoch = 2  #lmbda_epochs[1]

print "Conv + Conv + FC num %s, relu, with regularization. lambda: %s, epochs: %s" % (
    0, lmbda, epoch)
net = network3_nbb.Network([
    network3_nbb.ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
                               filter_shape=(20, 1, 5, 5),
                               poolsize=(2, 2),
                               activation_fn=network3_nbb.ReLU),
    network3_nbb.ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
                               filter_shape=(40, 20, 5, 5),
                               poolsize=(2, 2),
                               activation_fn=network3_nbb.ReLU),
    network3_nbb.FullyConnectedLayer(
        n_in=40 * 4 * 4, n_out=100, activation_fn=network3_nbb.ReLU),
    network3_nbb.SoftmaxLayer(n_in=100, n_out=10)
], mini_batch_size)
acc_array = net.SGD(training_data,
                    int(epoch),
                    mini_batch_size,
                    0.03,