示例#1
0
def main():
    ########################################
    # Initialization things with arguments #
    ########################################
    logger.config_root_logger()
    log.info("Creating a new DAE")

    mnist = MNIST()
    config = {"output_path": '../../../outputs/dae/mnist/'}
    dae = DenoisingAutoencoder(config=config, dataset=mnist)

    # # Load initial weights and biases from file
    # params_to_load = 'dae_params.pkl'
    # dae.load_params(params_to_load)

    optimizer = AdaDelta(dae, mnist)
    optimizer.train()

    # Save some reconstruction output images
    import opendeep.data.dataset as datasets
    n_examples = 100
    test_xs = mnist.getDataByIndices(indices=range(n_examples), subset=datasets.TEST)
    dae.create_reconstruction_image(test_xs)
示例#2
0
def run_dae():
    ########################################
    # Initialization things with arguments #
    ########################################
    config_root_logger()
    log.info("Creating a new DAE")

    mnist = MNIST()
    config = {"output_path": '../../../../outputs/dae/mnist/'}
    dae = DenoisingAutoencoder(config=config, dataset=mnist)

    # # Load initial weights and biases from file
    # params_to_load = 'dae_params.pkl'
    # dae.load_params(params_to_load)

    optimizer = AdaDelta(dae, mnist)
    optimizer.train()

    # Save some reconstruction output images
    import opendeep.data.dataset as datasets
    n_examples = 100
    test_xs = mnist.getDataByIndices(indices=range(n_examples), subset=datasets.TEST)
    dae.create_reconstruction_image(test_xs)
示例#3
0
    import logging
    import opendeep.log.logger as logger
    logger.config_root_logger()
    log = logging.getLogger(__name__)
    log.info("Creating softmax!")

    # grab the MNIST dataset
    mnist = MNIST()
    # create the softmax classifier
    s = SoftmaxLayer(input_size=28 * 28, output_size=10, out_as_probs=False)
    # make an optimizer to train it (AdaDelta is a good default)
    optimizer = AdaDelta(model=s, dataset=mnist, n_epoch=20)
    # perform training!
    optimizer.train()
    # test it on some images!
    test_data = mnist.getDataByIndices(indices=range(25), subset=TEST)
    # use the predict function!
    preds = s.predict(test_data)
    print '-------'
    print preds
    print mnist.getLabelsByIndices(indices=range(25), subset=TEST)
    print
    print
    del mnist
    del s
    del optimizer


    log.info("Creating softmax with categorical cross-entropy!")
    # grab the MNIST dataset
    mnist = MNIST(one_hot=True)
示例#4
0
    from opendeep.data.standard_datasets.image.mnist import MNIST
    from opendeep.optimization.adadelta import AdaDelta

    # grab the MNIST dataset
    mnist = MNIST()

    # create your shiny new DAE
    dae = DenoisingAutoencoder()

    # make an optimizer to train it (AdaDelta is a good default)
    optimizer = AdaDelta(model=dae, dataset=mnist)
    # perform training!
    optimizer.train()

    # test it on some images!
    test_data = mnist.getDataByIndices(indices=range(25), subset=TEST)
    corrupted_test = salt_and_pepper(test_data, 0.4)
    # use the predict function!
    reconstructed_images = dae.predict(test_data)

    # create an image from this reconstruction!
    # imports for working with tiling outputs into one image
    from opendeep.utils.image import tile_raster_images
    import numpy
    import PIL
    # stack the image matrices together in three 5x5 grids next to each other using numpy
    stacked = numpy.vstack([
        numpy.vstack([
            test_data[i * 5:(i + 1) * 5],
            corrupted_test.eval()[i * 5:(i + 1) * 5],
            reconstructed_images[i * 5:(i + 1) * 5]