#!/usr/bin/env python

import climate
import matplotlib.pyplot as plt
import numpy as np
import theanets

from utils import load_cifar, plot_layers, plot_images

logging = climate.get_logger('cifar')

g = climate.add_group('CIFAR Example')
g.add_argument('--features',
               type=int,
               default=0,
               metavar='N',
               help='train a model using N^2 hidden-layer features')

K = 655  # this retains 99% of the variance in the cifar images.


def pca(dataset):
    mean = dataset[:3000].mean(axis=0)

    logging.info('computing whitening transform')
    x = dataset[:3000] - mean
    vals, vecs = np.linalg.eigh(np.dot(x.T, x) / len(x))
    vals = vals[::-1]
    vecs = vecs[:, ::-1]

    vals = np.sqrt(vals[:K])
Example #2
0
#!/usr/bin/env python

import climate
import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images

g = climate.add_group('MNIST Example')
g.add_argument('--features',
               type=int,
               default=8,
               metavar='N',
               help='train a model using N^2 hidden-layer features')


def main(args):
    train, valid, _ = load_mnist()

    e = theanets.Experiment(theanets.Autoencoder,
                            layers=(784, args.features**2, 784))

    e.train(train, valid)

    plot_layers([e.network.find(1, 0), e.network.find(2, 0)])
    plt.tight_layout()
    plt.show()

    v = valid[:100]
    plot_images(v, 121, 'Sample data')
    plot_images(e.network.predict(v), 122, 'Reconstructed data')
#!/usr/bin/env python

import climate
import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images

g = climate.add_group('MNIST Example')
g.add_argument('--features', type=int, default=8, metavar='N',
               help='train a model using N^2 hidden-layer features')


def main(args):
    train, valid, _ = load_mnist()

    e = theanets.Experiment(
        theanets.Autoencoder,
        layers=(784, args.features ** 2, 784))

    e.train(train, valid, min_improvement=0.1)

    plot_layers([e.network.find('hid1', 'w'), e.network.find('out', 'w')])
    plt.tight_layout()
    plt.show()

    v = valid[:100]
    plot_images(v, 121, 'Sample data')
    plot_images(e.network.predict(v), 122, 'Reconstructed data')
    plt.tight_layout()
    plt.show()
import climate
import numpy as np
import pandas as pd

from .database import Experiment

logging = climate.get_logger(__name__)

g = climate.add_group('dropout-filling options')
g.add_argument('--root',
               metavar='DIR',
               help='load data files from tree at DIR')
g.add_argument('--output',
               metavar='DIR',
               help='save smoothed data files to tree at DIR')
g.add_argument('--pattern',
               default='*',
               metavar='SHPAT',
               help='process only trials matching this pattern')
g.add_argument('--autoencoder-rank',
               type=float,
               metavar='K',
               help='reconstruction rank')
g.add_argument('--svt-threshold',
               type=float,
               metavar='S',
               help='truncate singular values at threshold S')
g.add_argument('--window',
               type=int,
               metavar='T',
               help='process windows of T frames')
Example #5
0
'''This module contains command line flags.'''

import climate

climate.add_arg('--help-activation', action='store_true',
                help='show available activation functions')
climate.add_arg('--help-optimize', action='store_true',
                help='show available optimization algorithms')

g = climate.add_group('Architecture')
g.add_argument('-n', '--layers', nargs='+', type=int, metavar='N',
               help='construct a network with layers of size N1, N2, ...')
g.add_argument('-g', '--hidden-activation', default='logistic', metavar='FUNC',
               help='function for hidden unit activations')
g.add_argument('--output-activation', default='linear', metavar='FUNC',
               help='function for output unit activations')
g.add_argument('-t', '--tied-weights', action='store_true',
               help='tie encoding and decoding weights')
g.add_argument('--decode-from', type=int, default=1, metavar='N',
               help='decode from the final N layers of the net')

g = climate.add_group('Training')
g.add_argument('-O', '--optimize', default=(), nargs='+', metavar='ALGO',
               help='train with the given optimization algorithm(s)')
g.add_argument('-p', '--patience', type=int, default=4, metavar='N',
               help='stop training if less than --min-improvement for N validations')
g.add_argument('-v', '--validate-every', type=int, default=10, metavar='N',
               help='validate the model every N updates')
g.add_argument('-b', '--batch-size', type=int, default=64, metavar='N',
               help='train with mini-batches of size N')
g.add_argument('-B', '--train-batches', type=int, metavar='N',
Example #6
0
"""Single-layer autoencoder example using MNIST digit data.

This example shows one way to train a single-layer autoencoder model using the
handwritten MNIST digits.

This example also shows the use of climate command-line arguments.
"""

import climate
import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images

g = climate.add_group("MNIST Example")
g.add_argument("--features", type=int, default=8, metavar="N", help="train a model using N^2 hidden-layer features")


def main(args):
    # load up the MNIST digit dataset.
    train, valid, _ = load_mnist()

    net = theanets.Autoencoder([784, args.features ** 2, 784], rng=42)
    net.train(
        train,
        valid,
        input_noise=0.1,
        weight_l2=0.0001,
        algo="rmsprop",
        momentum=0.9,
Example #7
0
#!/usr/bin/env python

import climate
import matplotlib.pyplot as plt
import numpy as np
import theanets

from utils import load_cifar, plot_layers, plot_images

logging = climate.get_logger('cifar')

g = climate.add_group('CIFAR Example')
g.add_argument('--features', type=int, default=0, metavar='N',
               help='train a model using N^2 hidden-layer features')


K = 655  # this retains 99% of the variance in the cifar images.

def pca(dataset):
    mean = dataset[:3000].mean(axis=0)

    logging.info('computing whitening transform')
    x = dataset[:3000] - mean
    vals, vecs = np.linalg.eigh(np.dot(x.T, x) / len(x))
    vals = vals[::-1]
    vecs = vecs[:, ::-1]

    vals = np.sqrt(vals[:K])
    vecs = vecs[:, :K]

    def whiten(x):
Example #8
0
'''This module contains command line flags.'''

import climate

climate.add_arg('--help-activation',
                action='store_true',
                help='show available activation functions')
climate.add_arg('--help-optimize',
                action='store_true',
                help='show available optimization algorithms')

g = climate.add_group('Architecture')
g.add_argument('-n',
               '--layers',
               nargs='+',
               type=int,
               metavar='N',
               help='construct a network with layers of size N1, N2, ...')
g.add_argument('-g',
               '--hidden-activation',
               default='logistic',
               metavar='FUNC',
               help='function for hidden unit activations')
g.add_argument('--output-activation',
               default='linear',
               metavar='FUNC',
               help='function for output unit activations')
g.add_argument('-t',
               '--tied-weights',
               action='store_true',
               help='tie encoding and decoding weights')
import climate
import numpy as np
import pandas as pd

from .database import Experiment

logging = climate.get_logger(__name__)

g = climate.add_group('dropout-filling options')
g.add_argument('--root', metavar='DIR',
               help='load data files from tree at DIR')
g.add_argument('--output', metavar='DIR',
               help='save smoothed data files to tree at DIR')
g.add_argument('--pattern', default='*', metavar='SHPAT',
               help='process only trials matching this pattern')
g.add_argument('--autoencoder-rank', type=float, metavar='K',
               help='reconstruction rank')
g.add_argument('--svt-threshold', type=float, metavar='S',
               help='truncate singular values at threshold S')
g.add_argument('--window', type=int, metavar='T',
               help='process windows of T frames')

CENTERS = [
    'marker34-l-ilium',
    'marker35-r-ilium',
    'marker36-r-hip',
    'marker43-l-hip',
]

PHASESPACE_TOLERANCE = 0.001  # error tolerance of phasespace system