Beispiel #1
0
def contractive_denoising_autoencoder(input_var=None):
    """Contractive Denoising Autoencoder"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 20)
    hp('n_epochs', 1000)
    hp('learning_rate', 0.01)
    hp('patience', 10000)

    # Unsupervised hyperparameters
    hp_ae = Hyperparameters()
    hp_ae('batch_size', hp.batch_size)
    hp_ae('n_epochs', 15)
    hp_ae('learning_rate', 0.01)

    # Create connected layers
    # Input layer
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    # Auto Encoder Layer
    l_ae1 = AutoEncoder(incoming=l_in, nb_units=500, hyperparameters=hp_ae, contraction_level=0.3,
                        activation=relu, name='Contractive Denoising AutoEncoder')
    # Logistic regression Layer
    l_out = LogisticRegression(incoming=l_ae1, nb_class=10, name='Logistic regression')

    # Create network and add layers
    net = Network('contractive_denoising_autoencoder')
    net.add(l_in)
    net.add(l_ae1)
    net.add(l_out)

    return net, hp
Beispiel #2
0
def mlp(input_var=None):
    """Multi Layer Perceptron"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 20)
    hp('n_epochs', 1000)
    hp('learning_rate', 0.01)
    hp('l1_reg', 0.00)
    hp('l2_reg', 0.0001)
    hp('patience', 5000)

    # Create connected layers
    # Input layer
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    # Dense Layer
    l_hid1 = DenseLayer(incoming=l_in, nb_units=500, W=glorot_uniform, l1=hp.l1_reg,
                        l2=hp.l2_reg, activation=tanh, name='Hidden layer 1')
    # Dense Layer
    l_hid2 = DenseLayer(incoming=l_hid1, nb_units=500, W=glorot_uniform, l1=hp.l1_reg,
                        l2=hp.l2_reg, activation=tanh, name='Hidden layer 2')
    # Logistic regression Layer
    l_out = LogisticRegression(incoming=l_hid2, nb_class=10, l1=hp.l1_reg,
                               l2=hp.l2_reg, name='Logistic regression')

    # Create network and add layers
    net = Network('mlp')
    net.add(l_in)
    net.add(l_hid1)
    net.add(l_hid2)
    net.add(l_out)

    return net, hp
Beispiel #3
0
def dbn(input_var=None):
    """Deep Belief Network"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 10)
    hp('n_epochs', 1000)
    hp('learning_rate', 0.1)
    hp('patience', 100)

    # Unsupervised hyperparameters
    hp_ae = Hyperparameters()
    hp_ae('batch_size', hp.batch_size)
    hp_ae('n_epochs', 15)
    hp_ae('learning_rate', 0.01)

    # Create connected layers
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    l_rbm1 = RBM(incoming=l_in, nb_units=500, hyperparameters=hp_ae,
                 name='Restricted Boltzmann Machine 1')
    l_rbm2 = RBM(incoming=l_rbm1, nb_units=500, hyperparameters=hp_ae,
                 name='Restricted Boltzmann Machine 2')
    l_out = LogisticRegression(incoming=l_rbm2, nb_class=10, name='Logistic regression')

    # Create network and add layers
    net = Network('dbn')
    net.add(l_in)
    net.add(l_rbm1)
    net.add(l_rbm2)
    net.add(l_out)

    return net, hp
Beispiel #4
0
def convpool(input_var=None):
    """Convolution and MaxPooling"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 500)
    hp('n_epochs', 200)
    hp('learning_rate', 0.01)
    hp('patience', 10000)

    # Create connected layers
    image_shape = (None, 1, 28, 28)    # (batch size, nb input feature maps, image height, image width)
    filter_shape = (20, 1, 5, 5)       # (number of filters, nb input feature maps, filter height, filter width)
    poolsize = (2, 2)                  # downsampling factor per (row, col)
    # Input layer
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    # ConvLayer needs 4D Tensor
    l_rs = ReshapeLayer(incoming=l_in, output_shape=image_shape)
    # ConvPool Layer
    l_cp = ConvPoolLayer(incoming=l_rs, poolsize=poolsize, image_shape=image_shape,
                         filter_shape=filter_shape, name='ConvPool layer')
    # flatten convpool output
    l_fl = FlattenLayer(incoming=l_cp, ndim=2)
    # Logistic regression Layer
    l_out = LogisticRegression(incoming=l_fl, nb_class=10, name='Logistic regression')

    # Create network and add layers
    net = Network('convpool')
    net.add(l_in)
    net.add(l_rs)
    net.add(l_cp)
    net.add(l_out)

    return net, hp
Beispiel #5
0
def dropconnect(input_var=None):
    """DropConnect MLP"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 20)
    hp('n_epochs', 1000)
    hp('learning_rate', 0.01)
    hp('patience', 10000)

    # Create connected layers
    # Input layer
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    # DropConnect Layer
    l_dc1 = Dropconnect(incoming=l_in, nb_units=500, corruption_level=0.4,
                        W=glorot_uniform, activation=relu, name='Dropconnect layer 1')
    # DropConnect Layer
    l_dc2 = Dropconnect(incoming=l_dc1, nb_units=500, corruption_level=0.2,
                        W=glorot_uniform, activation=relu, name='Dropconnect layer 2')
    # Logistic regression Layer
    l_out = LogisticRegression(incoming=l_dc2, nb_class=10, name='Logistic regression')

    # Create network and add layers
    net = Network('dropconnect')
    net.add(l_in)
    net.add(l_dc1)
    net.add(l_dc2)
    net.add(l_out)

    return net, hp
Beispiel #6
0
def grid_search():
    for hp in hps:
        # create the model
        model = Model(name='mlp grid search', data=data)
        # add the hyperparameters to the model
        model.hp = hp
        # Create connected layers
        # Input layer
        l_in = InputLayer(shape=(None, 28 * 28), name='Input')
        # Dense Layer 1
        l_hid1 = DenseLayer(incoming=l_in, nb_units=5, W=hp.initialisation, l1=hp.l1_reg,
                            l2=hp.l2_reg, activation=hp.activation, name='Hidden layer 1')
        # Dense Layer 2
        l_hid2 = DenseLayer(incoming=l_hid1, nb_units=5, W=hp.initialisation, l1=hp.l1_reg,
                            l2=hp.l2_reg, activation=hp.activation, name='Hidden layer 2')
        # Logistic regression Layer
        l_out = LogisticRegression(incoming=l_hid2, nb_class=10, l1=hp.l1_reg,
                                   l2=hp.l2_reg, name='Logistic regression')

        # Create network and add layers
        net = Network('mlp')
        net.add(l_in)
        net.add(l_hid1)
        net.add(l_hid2)
        net.add(l_out)
        # add the network to the model
        model.network = net

        # updates method
        model.updates = yadll.updates.sgd
        reports.append((hp, model.train()))

        with open('reports.pkl', 'wb') as report_file:
            cPickle.dump(reports, report_file)
Beispiel #7
0
def rnn(input_var=None):
    """Recurrent Neural Network"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 500)
    hp('n_epochs', 1000)
    hp('learning_rate', 0.1)
    hp('patience', 500)

    # Create connected layers
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    l_rnn = RNN(incoming=l_in, n_hidden=100, n_out=28 * 28, name='Recurrent Neural Network')
    l_out = LogisticRegression(incoming=l_rnn, nb_class=10, name='Logistic regression')

    # Create network and add layers
    net = Network('rnn')
    net.add(l_in)
    net.add(l_rnn)
    net.add(l_out)

    return net, hp
Beispiel #8
0
def logistic_regression(input_var=None):
    """Logistic Regression"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 600)
    hp('n_epochs', 1000)
    hp('learning_rate', 0.1)
    hp('patience', 5000)

    # Create connected layers
    # Input layer
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    # Logistic regression Layer
    l_out = LogisticRegression(incoming=l_in, nb_class=10, name='Logistic regression')

    # Create network and add layers
    net = Network('logistic_regression')
    net.add(l_in)
    net.add(l_out)

    return net, hp
Beispiel #9
0
def lstm(input_var=None):
    """Long Short Term Memory"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 500)
    hp('n_epochs', 1000)
    hp('learning_rate', 0.1)
    hp('patience', 500)

    # Create connected layers
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    l_lstm = LSTM(incoming=l_in, n_hidden=100, n_out=28 * 28, name='Long Short Term Memory')
    l_out = LogisticRegression(incoming=l_lstm, nb_class=10, name='Logistic regression')

    # Create network and add layers
    net = Network('lstm')
    net.add(l_in)
    net.add(l_lstm)
    net.add(l_out)

    return net, hp
Beispiel #10
0
def lenet5(input_var=None):
    """LeNet-5"""

    # Hyperparameters
    hp = Hyperparameters()
    hp('batch_size', 500)
    hp('n_epochs', 200)
    hp('learning_rate', 0.1)
    hp('patience', 10000)

    # Create connected layers
    # Input layer
    l_in = InputLayer(shape=(None, 28 * 28), input_var=input_var, name='Input')
    # ConvLayer needs 4D Tensor
    image_shape = (None, 1, 28, 28)
    l_rs = ReshapeLayer(incoming=l_in, output_shape=image_shape)
    # first convpool
    filter_shape = (20, 1, 5, 5)
    poolsize = (2, 2)
    l_cp1 = ConvPoolLayer(incoming=l_rs, poolsize=poolsize, image_shape=image_shape,
                          filter_shape=filter_shape, name='ConvPool layer 1')
    # second convpool
    image_shape = (None, 20, 12, 12)   # (batch size, nb filters, (28-5)/2, (28-5)/2)
    filter_shape = (50, 20, 5, 5)
    poolsize = (2, 2)
    l_cp2 = ConvPoolLayer(incoming=l_cp1, poolsize=poolsize, image_shape=image_shape,
                          filter_shape=filter_shape, name='ConvPool layer 2')
    # flatten convpool output
    l_fl = FlattenLayer(incoming=l_cp2, ndim=2)
    # Dense Layer
    l_hid1 = DenseLayer(incoming=l_fl, nb_units=500, W=glorot_uniform, activation=tanh, name='Hidden layer 1')
    # Logistic regression Layer
    l_out = LogisticRegression(incoming=l_hid1, nb_class=10, name='Logistic regression')

    # Create network and add layers
    net = Network('convpool')
    net.add(l_in)
    net.add(l_rs)
    net.add(l_cp1)
    net.add(l_cp2)
    net.add(l_fl)
    net.add(l_hid1)
    net.add(l_out)

    return net, hp