Example #1
0
from deepy.dataset import MnistDataset, MiniBatches
from deepy.networks import NeuralClassifier
from deepy.layers import Convolution, Dense, Flatten, DimShuffle, Reshape, RevealDimension, Softmax, Dropout
from deepy.trainers import MomentumTrainer, LearningRateAnnealer

default_model = os.path.join(os.path.dirname(__file__), "models",
                             "deep_conv.gz")

if __name__ == '__main__':
    model = NeuralClassifier(input_dim=28 * 28)
    model.stack(  # Reshape to 3D tensor
        Reshape((-1, 28, 28)),
        # Add a new dimension for convolution
        DimShuffle((0, 'x', 1, 2)),
        Convolution((4, 1, 5, 5), activation="relu"),
        Dropout(0.15),
        Convolution((8, 4, 5, 5), activation="relu"),
        Dropout(0.1),
        Convolution((16, 8, 3, 3), activation="relu"),
        Flatten(),
        Dropout(0.1),
        # As dimension information was lost, reveal it to the pipe line
        RevealDimension(16),
        Dense(10, 'linear'),
        Softmax())

    trainer = MomentumTrainer(model)

    annealer = LearningRateAnnealer()

    mnist = MiniBatches(MnistDataset(), batch_size=20)
Example #2
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
For reference, this model should achieve 1.50% error rate, in 10 mins with i7 CPU (8 threads).
"""

import logging, os
logging.basicConfig(level=logging.INFO)

from deepy.dataset import MnistDataset, MiniBatches
from deepy.networks import NeuralClassifier
from deepy.layers import Dense, Softmax, Dropout
from deepy.trainers import MomentumTrainer, LearningRateAnnealer

default_model = os.path.join(os.path.dirname(__file__), "models",
                             "mlp_dropout1.gz")

if __name__ == '__main__':
    model = NeuralClassifier(input_dim=28 * 28)
    model.stack(Dense(256, 'relu'), Dropout(0.5), Dense(256, 'relu'),
                Dropout(0.5), Dense(10, 'linear'), Softmax())

    trainer = MomentumTrainer(model)

    annealer = LearningRateAnnealer()

    mnist = MiniBatches(MnistDataset(), batch_size=20)

    trainer.run(mnist, epoch_controllers=[annealer])

    model.save_params(default_model)
Example #3
0
L2NORM_LIMIT = 1.9365
EPSILON = 1e-7

def clip_param_norm():
    for param in model.parameters:
        if param.name.startswith("W"):
            l2_norms = np.sqrt(np.sum(param.get_value() ** 2, axis=0, keepdims=True))
            desired_norms = np.clip(l2_norms, 0, L2NORM_LIMIT)
            scale = (desired_norms + EPSILON) / (l2_norms + EPSILON)
            param.set_value(param.get_value() * scale)

if __name__ == '__main__':
    model = NeuralClassifier(input_dim=28 * 28)
    model.training_callbacks.append(clip_param_norm)
    model.stack(Dropout(0.2),
                Maxout(240, num_pieces=5, init=UniformInitializer(.005)),
                Maxout(240, num_pieces=5, init=UniformInitializer(.005)),
                Dense(10, 'linear', init=UniformInitializer(.005)),
                Softmax())


    trainer = MomentumTrainer(model, {"learning_rate": shared_scalar(0.01),
                                      "momentum": 0.5})

    annealer = ExponentialLearningRateAnnealer(trainer, debug=True)

    mnist = MiniBatches(MnistDataset(), batch_size=100)

    trainer.run(mnist, controllers=[annealer])
Example #4
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import logging, os
logging.basicConfig(level=logging.INFO)

from deepy.dataset import MnistDataset, MiniBatches
from deepy.networks import NeuralClassifier
from deepy.layers import Dense, Softmax, Dropout, PRelu
from deepy.trainers import MomentumTrainer, LearningRateAnnealer

default_model = os.path.join(os.path.dirname(__file__), "models",
                             "mlp_prelu_dropout1.gz")

if __name__ == '__main__':
    model = NeuralClassifier(input_dim=28 * 28)
    model.stack(Dense(256, 'linear'), PRelu(), Dropout(0.2),
                Dense(256, 'linear'), PRelu(), Dropout(0.2),
                Dense(10, 'linear'), Softmax())

    trainer = MomentumTrainer(model)

    annealer = LearningRateAnnealer()

    mnist = MiniBatches(MnistDataset(), batch_size=20)

    trainer.run(mnist, controllers=[annealer])

    model.save_params(default_model)
Example #5
0
if __name__ == '__main__':
    dropout_p_0 = 0.2
    dropout_p_h_0 = 0.3
    dropout_p_h_1 = 0.3
    dropout_p_2 = 0.5
    T = 5
    n = 1024
    d = 256
    gate_bias = -1.0
    activation = 'relu'
    #l2_reg = 0.001
    l2_reg = 1e-5
    init = XavierGlorotInitializer()
    model = L2HingeNeuralClassifier(input_dim=28 * 28,
                                    last_layer_l2_regularization=l2_reg)
    model.stack(Dropout(p=dropout_p_0), Dense(n, init=init, disable_bias=True),
                BatchNormalization(), Activation(activation))
    #model.stack(Dropout(p=dropout_p_0), BatchNormalization())

    for _ in range(T):
        #model.stack(HighwayLayerLRDropoutBatchNorm(activation=activation, gate_bias=gate_bias, projection_dim=d, d_p_0 = dropout_p_h_0, d_p_1 = dropout_p_h_1, init=init))
        model.stack(
            HighwayLayerLRDiagDropoutBatchNorm(activation=activation,
                                               gate_bias=gate_bias,
                                               projection_dim=d,
                                               d_p_0=dropout_p_h_0,
                                               d_p_1=dropout_p_h_1,
                                               init=init,
                                               quasi_ortho_init=True))
    #model.stack(BatchNormalization(),Dropout(p=dropout_p_2), Dense(10, init=init))
    model.stack(Dropout(p=dropout_p_2), Dense(10, init=init))
Example #6
0
import logging, os
logging.basicConfig(level=logging.INFO)

# MNIST Multi-layer model with dropout.
from deepy.dataset import MnistDataset, MiniBatches
from deepy.networks import NeuralClassifier
from deepy.layers import Dense, Softmax, Dropout
from deepy.trainers import MomentumTrainer, LearningRateAnnealer

model_path = os.path.join(os.path.dirname(__file__), "models", "tutorial1.gz")

if __name__ == '__main__':
    model = NeuralClassifier(input_dim=28*28)
    model.stack(Dense(256, 'relu'),
                Dropout(0.2),
                Dense(256, 'relu'),
                Dropout(0.2),
                Dense(10, 'linear'),
                Softmax())

    mnist = MiniBatches(MnistDataset(), batch_size=20)

    trainer = MomentumTrainer(model, {"learning_rate": LearningRateAnnealer.learning_rate(0.01)})

    annealer = LearningRateAnnealer(trainer)

    trainer.run(mnist, controllers=[annealer])

    model.save_params(model_path)