def main(num_epochs=1000):
    # MODEL
    # defining the data
    x = tensor.matrix('features')
    y = tensor.lmatrix('targets')

    # defining the model
    softmax_regressor = SoftmaxRegressor(input_dim=784, n_classes=10)

    # defining the cost to learn on
    probs = softmax_regressor.get_probs(features=x)
    cost = softmax_regressor.get_cost(probs=probs, targets=y).mean()
    cost.name = 'cost'

    # defining the cost to monitor
    misclassification = softmax_regressor.get_misclassification(
        probs=probs, targets=y
    ).mean()
    misclassification.name = 'misclassification'

    # DATASETS
    # defining the datasets
    train_dataset = MNIST('train')
    test_dataset = MNIST('test')

    # TRAINING ALGORITHM
    # defining the algorithm
    params = softmax_regressor.get_params()
    algorithm = GradientDescent(
        cost=cost,
        params=params,
        step_rule=Momentum(learning_rate=0.1,
                           momentum=0.1))

    # defining the data stream
    # how the dataset is read
    train_data_stream = ForceFloatX(
        data_stream=DataStream(
            dataset=train_dataset,
            iteration_scheme=ShuffledScheme(
                examples=train_dataset.num_examples,
                batch_size=100,
            )
        )
    )
    test_data_stream = ForceFloatX(
        data_stream=DataStream(
            dataset=test_dataset,
            iteration_scheme=SequentialScheme(
                examples=test_dataset.num_examples,
                batch_size=1000,
            )
        )
    )

    # MONITORING
    # defining the extensions
    extensions = []
    # timing the training and each epoch
    extensions.append(Timing())
    # ending the training after a certain number of epochs
    extensions.append(FinishAfter(after_n_epochs=num_epochs))
    # monitoring the test set
    extensions.append(DataStreamMonitoring(
        [cost, misclassification],
        test_data_stream,
        prefix='test'))
    # monitoring the training set while training
    extensions.append(TrainingDataMonitoring(
        [cost, misclassification],
        prefix='train',
        after_every_epoch=True))
    # printing quantities
    extensions.append(Printing())

    # MERGING IT TOGETHER
    # defining the model
    model = Model(cost)

    # defining the training main loop
    main_loop = MainLoop(model=model,
                         data_stream=train_data_stream,
                         algorithm=algorithm,
                         extensions=extensions)

    main_loop.run()
Esempio n. 2
0
def main(num_epochs=1000):
    x = tensor.matrix('features')
    y = tensor.lmatrix('targets')

    softmax_regressor = SoftmaxRegressor(input_dim=784, n_classes=10)
    probs = softmax_regressor.get_probs(features=x)
    params = softmax_regressor.get_params()
    weights = softmax_regressor.get_weights()
    cost = softmax_regressor.get_cost(probs=probs, targets=y).mean()
    cost.name = 'cost'
    misclassification = softmax_regressor.get_misclassification(
        probs=probs, targets=y
    ).mean()
    misclassification.name = 'misclassification'

    train_dataset = MNIST('train')
    test_dataset = MNIST('test')

    algorithm = GradientDescent(
        cost=cost,
        params=params,
        step_rule=Momentum(learning_rate=0.1,
                           momentum=0.1))

    train_data_stream = ForceFloatX(
        data_stream=DataStream(
            dataset=train_dataset,
            iteration_scheme=ShuffledScheme(
                examples=train_dataset.num_examples,
                batch_size=100,
            )
        )
    )
    test_data_stream = ForceFloatX(
        data_stream=DataStream(
            dataset=test_dataset,
            iteration_scheme=SequentialScheme(
                examples=test_dataset.num_examples,
                batch_size=1000,
            )
        )
    )

    model = Model(cost)

    extensions = []
    extensions.append(Timing())
    extensions.append(FinishAfter(after_n_epochs=num_epochs))
    extensions.append(DataStreamMonitoring(
        [cost, misclassification],
        test_data_stream,
        prefix='test'))
    extensions.append(TrainingDataMonitoring(
        [cost, misclassification],
        prefix='train',
        after_epoch=True))

    plotters = []
    plotters.append(Plotter(
        channels=[['test_cost', 'test_misclassification',
                   'train_cost', 'train_misclassification']],
        titles=['Costs']))
    display_train = ImageDataStreamDisplay(
        data_stream=copy.deepcopy(train_data_stream),
        image_shape=(28, 28, 1),
        axes=(0, 1, 'c'),
        shift=-0.5,
        rescale=2.,
    )
    weight_display = WeightDisplay(
        weights=weights,
        transpose=(1, 0),
        image_shape=(28, 28, 1),
        axes=(0, 1, 'c'),
        shift=-0.5,
        rescale=2.,
        grid_shape=(1, 10)
    )
    images_displayer = DisplayImage(
        image_getters=[display_train, weight_display],
        titles=['Training examples', 'Softmax weights']
    )
    plotters.append(images_displayer)

    extensions.append(PlotManager('MNIST softmax examples', plotters=plotters,
                                  after_epoch=False,
                                  every_n_epochs=10,
                                  after_training=True))
    extensions.append(Printing())
    main_loop = MainLoop(model=model,
                         data_stream=train_data_stream,
                         algorithm=algorithm,
                         extensions=extensions)

    main_loop.run()
Esempio n. 3
0
def main(num_epochs=1000):
    x = tensor.matrix('features')
    y = tensor.lmatrix('targets')

    softmax_regressor = SoftmaxRegressor(input_dim=784, n_classes=10)
    probs = softmax_regressor.get_probs(features=x)
    params = softmax_regressor.get_params()
    weights = softmax_regressor.get_weights()
    cost = softmax_regressor.get_cost(probs=probs, targets=y).mean()
    cost.name = 'cost'
    misclassification = softmax_regressor.get_misclassification(
        probs=probs, targets=y).mean()
    misclassification.name = 'misclassification'

    train_dataset = MNIST('train')
    test_dataset = MNIST('test')

    algorithm = GradientDescent(cost=cost,
                                params=params,
                                step_rule=Momentum(learning_rate=0.1,
                                                   momentum=0.1))

    train_data_stream = ForceFloatX(
        data_stream=DataStream(dataset=train_dataset,
                               iteration_scheme=ShuffledScheme(
                                   examples=train_dataset.num_examples,
                                   batch_size=100,
                               )))
    test_data_stream = ForceFloatX(
        data_stream=DataStream(dataset=test_dataset,
                               iteration_scheme=SequentialScheme(
                                   examples=test_dataset.num_examples,
                                   batch_size=1000,
                               )))

    model = Model(cost)

    extensions = []
    extensions.append(Timing())
    extensions.append(FinishAfter(after_n_epochs=num_epochs))
    extensions.append(
        DataStreamMonitoring([cost, misclassification],
                             test_data_stream,
                             prefix='test'))
    extensions.append(
        TrainingDataMonitoring([cost, misclassification],
                               prefix='train',
                               after_epoch=True))

    plotters = []
    plotters.append(
        Plotter(channels=[[
            'test_cost', 'test_misclassification', 'train_cost',
            'train_misclassification'
        ]],
                titles=['Costs']))
    display_train = ImageDataStreamDisplay(
        data_stream=copy.deepcopy(train_data_stream),
        image_shape=(28, 28, 1),
        axes=(0, 1, 'c'),
        shift=-0.5,
        rescale=2.,
    )
    weight_display = WeightDisplay(weights=weights,
                                   transpose=(1, 0),
                                   image_shape=(28, 28, 1),
                                   axes=(0, 1, 'c'),
                                   shift=-0.5,
                                   rescale=2.,
                                   grid_shape=(1, 10))
    images_displayer = DisplayImage(
        image_getters=[display_train, weight_display],
        titles=['Training examples', 'Softmax weights'])
    plotters.append(images_displayer)

    extensions.append(
        PlotManager('MNIST softmax examples',
                    plotters=plotters,
                    after_epoch=False,
                    every_n_epochs=10,
                    after_training=True))
    extensions.append(Printing())
    main_loop = MainLoop(model=model,
                         data_stream=train_data_stream,
                         algorithm=algorithm,
                         extensions=extensions)

    main_loop.run()
Esempio n. 4
0
def main(num_epochs=1000):
    # MODEL
    # defining the data
    x = tensor.matrix('features')
    y = tensor.lmatrix('targets')

    # defining the model
    softmax_regressor = SoftmaxRegressor(input_dim=784, n_classes=10)

    # defining the cost to learn on
    probs = softmax_regressor.get_probs(features=x)
    cost = softmax_regressor.get_cost(probs=probs, targets=y).mean()
    cost.name = 'cost'

    # defining the cost to monitor
    misclassification = softmax_regressor.get_misclassification(
        probs=probs, targets=y).mean()
    misclassification.name = 'misclassification'

    # DATASETS
    # defining the datasets
    train_dataset = MNIST('train')
    test_dataset = MNIST('test')

    # TRAINING ALGORITHM
    # defining the algorithm
    params = softmax_regressor.get_params()
    algorithm = GradientDescent(cost=cost,
                                params=params,
                                step_rule=Momentum(learning_rate=0.1,
                                                   momentum=0.1))

    # defining the data stream
    # how the dataset is read
    train_data_stream = ForceFloatX(
        data_stream=DataStream(dataset=train_dataset,
                               iteration_scheme=ShuffledScheme(
                                   examples=train_dataset.num_examples,
                                   batch_size=100,
                               )))
    test_data_stream = ForceFloatX(
        data_stream=DataStream(dataset=test_dataset,
                               iteration_scheme=SequentialScheme(
                                   examples=test_dataset.num_examples,
                                   batch_size=1000,
                               )))

    # MONITORING
    # defining the extensions
    extensions = []
    # timing the training and each epoch
    extensions.append(Timing())
    # ending the training after a certain number of epochs
    extensions.append(FinishAfter(after_n_epochs=num_epochs))
    # monitoring the test set
    extensions.append(
        DataStreamMonitoring([cost, misclassification],
                             test_data_stream,
                             prefix='test'))
    # monitoring the training set while training
    extensions.append(
        TrainingDataMonitoring([cost, misclassification],
                               prefix='train',
                               after_every_epoch=True))
    # printing quantities
    extensions.append(Printing())

    # MERGING IT TOGETHER
    # defining the model
    model = Model(cost)

    # defining the training main loop
    main_loop = MainLoop(model=model,
                         data_stream=train_data_stream,
                         algorithm=algorithm,
                         extensions=extensions)

    main_loop.run()