예제 #1
0
def main(args):
    train, valid, _ = load_cifar()

    whiten, color = pca(train)

    feat = args.features or int(np.sqrt(4 * K))
    e = theanets.Experiment(
        theanets.Autoencoder,
        layers=(K, feat**2, K),
    )

    e.train(whiten(train), whiten(valid), input_noise=1)

    plot_layers([
        color(e.network.find(1, 0).get_value().T).T,
        color(e.network.find('out', 0).get_value())
    ],
                channels=3)
    plt.tight_layout()
    plt.show()

    valid = whiten(valid[:100])
    plot_images(color(valid), 121, 'Sample data', channels=3)
    plot_images(color(e.network.predict(valid)),
                122,
                'Reconstructed data',
                channels=3)
    plt.tight_layout()
    plt.show()
예제 #2
0
def main(args):
    train, valid, _ = load_cifar()

    whiten, color = pca(train)

    feat = args.features or int(np.sqrt(4 * K))
    e = theanets.Experiment(
        theanets.Autoencoder,
        layers=(K, feat ** 2, K),
    )

    e.train(whiten(train), whiten(valid), input_noise=1)

    plot_layers([
        color(e.network.find(1, 0).get_value().T).T,
        color(e.network.find('out', 0).get_value())], channels=3)
    plt.tight_layout()
    plt.show()

    valid = whiten(valid[:100])
    plot_images(color(valid), 121, 'Sample data', channels=3)
    plot_images(color(e.network.predict(valid)), 122,
                'Reconstructed data', channels=3)
    plt.tight_layout()
    plt.show()
예제 #3
0
def main(args):
    train, valid, _ = load_mnist()

    e = theanets.Experiment(theanets.Autoencoder,
                            layers=(784, args.features**2, 784))

    e.train(train, valid)

    plot_layers([e.network.find(1, 0), e.network.find(2, 0)])
    plt.tight_layout()
    plt.show()

    v = valid[:100]
    plot_images(v, 121, 'Sample data')
    plot_images(e.network.predict(v), 122, 'Reconstructed data')
    plt.tight_layout()
    plt.show()
예제 #4
0
def main(args):
    train, valid, _ = load_cifar()

    e = theanets.Experiment(
        theanets.Autoencoder,
        layers=(3072, args.features ** 2, 3072))

    e.train(train, valid)

    plot_layers(e.network.weights, channels=3)
    plt.tight_layout()
    plt.show()

    valid = valid[:100]
    plot_images(valid, 121, 'Sample data', channels=3)
    plot_images(e.network.predict(valid), 122, 'Reconstructed data', channels=3)
    plt.tight_layout()
    plt.show()
예제 #5
0
def main(args):
    train, valid, _ = load_mnist()

    e = theanets.Experiment(
        theanets.Autoencoder,
        layers=(784, args.features ** 2, 784))

    e.train(train, valid, min_improvement=0.1)

    plot_layers([e.network.find('hid1', 'w'), e.network.find('out', 'w')])
    plt.tight_layout()
    plt.show()

    v = valid[:100]
    plot_images(v, 121, 'Sample data')
    plot_images(e.network.predict(v), 122, 'Reconstructed data')
    plt.tight_layout()
    plt.show()
예제 #6
0
def main(args):
    train, valid, _ = load_mnist()

    e = theanets.Experiment(
        theanets.Autoencoder,
        layers=(784, args.features ** 2, 784))

    e.train(train, valid)

    plot_layers(e.network.weights)
    plt.tight_layout()
    plt.show()

    v = valid[:100]
    plot_images(v, 121, 'Sample data')
    plot_images(e.network.predict(v), 122, 'Reconstructed data')
    plt.tight_layout()
    plt.show()
예제 #7
0
def main(args):
    # load up the MNIST digit dataset.
    train, valid, _ = load_mnist()

    net = theanets.Autoencoder([784, args.features ** 2, 784], rng=42)
    net.train(
        train,
        valid,
        input_noise=0.1,
        weight_l2=0.0001,
        algo="rmsprop",
        momentum=0.9,
        max_updates=1,
        min_improvement=0.1,
    )

    plot_layers([net.find("hid1", "w"), net.find("out", "w")])
    plt.tight_layout()
    plt.show()
예제 #8
0
def main(args):
    train, valid, _ = load_cifar()

    e = theanets.Experiment(theanets.Autoencoder,
                            layers=(3072, args.features**2, 3072))

    e.train(train, valid)

    plot_layers(e.network.weights, channels=3)
    plt.tight_layout()
    plt.show()

    valid = valid[:100]
    plot_images(valid, 121, 'Sample data', channels=3)
    plot_images(e.network.predict(valid),
                122,
                'Reconstructed data',
                channels=3)
    plt.tight_layout()
    plt.show()
예제 #9
0
def main(features):
    train, valid, _ = load_cifar()

    whiten, color = pca(train[0])

    feat = features or int(np.sqrt(2 * K))
    n = theanets.Autoencoder([K, feat ** 2, K])
    n.train(whiten(train), whiten(valid), input_noise=1, train_batches=313)

    plot_layers([
        color(n.find('hid1', 'w').get_value().T).T,
        color(n.find('out', 'w').get_value())], channels=3)
    plt.tight_layout()
    plt.show()

    valid = whiten(valid[:100])
    plot_images(color(valid), 121, 'Sample data', channels=3)
    plot_images(color(n.predict(valid)), 122,
                'Reconstructed data', channels=3)
    plt.tight_layout()
    plt.show()
예제 #10
0
def main(args):
    # load up the MNIST digit dataset.
    train, valid, _ = load_mnist()

    net = theanets.Autoencoder([784, args.features ** 2, 784])
    net.train(train, valid,
              input_noise=0.1,
              weight_l2=0.0001,
              algo='rmsprop',
              momentum=0.9,
              min_improvement=0.1)

    plot_layers([net.find('hid1', 'w'), net.find('out', 'w')])
    plt.tight_layout()
    plt.show()

    v = valid[:100]
    plot_images(v, 121, 'Sample data')
    plot_images(net.predict(v), 122, 'Reconstructed data')
    plt.tight_layout()
    plt.show()
예제 #11
0
def main(features):
    train, valid, _ = load_cifar()

    whiten, color = pca(train[0])

    feat = features or int(np.sqrt(2 * K))
    n = theanets.Autoencoder([K, feat**2, K])
    n.train(whiten(train), whiten(valid), input_noise=1, train_batches=313)

    plot_layers([
        color(n.find('hid1', 'w').get_value().T).T,
        color(n.find('out', 'w').get_value())
    ],
                channels=3)
    plt.tight_layout()
    plt.show()

    valid = whiten(valid[:100])
    plot_images(color(valid), 121, 'Sample data', channels=3)
    plot_images(color(n.predict(valid)), 122, 'Reconstructed data', channels=3)
    plt.tight_layout()
    plt.show()
예제 #12
0
def main(args):
    # load up the MNIST digit dataset.
    train, valid, _ = load_mnist()

    net = theanets.Autoencoder([784, args.features**2, 784])
    net.train(train,
              valid,
              input_noise=0.1,
              weight_l2=0.0001,
              algo='rmsprop',
              momentum=0.9,
              min_improvement=0.1)

    plot_layers([net.find('hid1', 'w'), net.find('out', 'w')])
    plt.tight_layout()
    plt.show()

    v = valid[:100]
    plot_images(v, 121, 'Sample data')
    plot_images(net.predict(v), 122, 'Reconstructed data')
    plt.tight_layout()
    plt.show()
예제 #13
0
# now train our model on the whitened dataset.

N = 16

e = theanets.Experiment(
    RICA,
    layers=(K, N * N, K),
    activation='linear',
    hidden_l1=0.2,
    no_learn_biases=True,
    tied_weights=True,
    train_batches=100,
    weight_inverse=0.01,
)
e.run(whiten(train), whiten(valid))

# color the network weights so they are viewable as digits.
plot_layers(
    [color(e.network.weights[0].get_value().T).T],
    tied_weights=True)
plt.tight_layout()
plt.show()

plot_images(valid[:N*N], 121, 'Sample data')
plot_images(
    color(e.network.predict(whiten(valid[:N*N]))),
    122, 'Reconstructed data')
plt.tight_layout()
plt.show()
예제 #14
0
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers


train, valid, _ = load_mnist(labels=True)

N = 10

net = theanets.Classifier([784, N * N, ('softmax', 10)])

net.train(train, valid, min_improvement=0.001)


plot_layers([net.find('hid1', 'w'), net.find('out', 'w')])
plt.tight_layout()
plt.show()
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images


e = theanets.Experiment(
    theanets.Classifier,
    layers=(784, 1024, 256, 64, ('softmax', 10)),
)

# first, run an unsupervised layerwise pretrainer.
train, valid, _ = load_mnist()
e.train(train, valid,
        algorithm='pretrain',
        patience=1,
        min_improvement=0.1,
        train_batches=100)

# second, run a supervised trainer on the classifier model.
train, valid, _ = load_mnist(labels=True)
e.train(train, valid, min_improvement=0.01, train_batches=100)

plot_layers([e.network.find(i, 'w') for i in (1, 2, 3)])
plt.tight_layout()
plt.show()
예제 #16
0
def whiten(x):
    return np.dot(x, np.dot(vecs, np.diag(1. / vals)))


def color(z):
    return np.dot(z, np.dot(np.diag(vals), vecs.T))

# now train our model on the whitened dataset.

N = 20

net = RICA([K, (N * N, 'linear'), (K, 'tied')])

net.train(whiten(train),
          whiten(valid),
          hidden_l1=0.5,
          weight_inverse=1e-6,
          train_batches=300,
          monitors={'hid1:out': (-0.9, -0.1, 0.1, 0.9)})

# color the network weights so they are viewable as digits.
plot_layers([color(net.find('hid1', 'w').get_value().T).T], tied_weights=True)
plt.tight_layout()
plt.show()

plot_images(valid[:N*N], 121, 'Sample data')
plot_images(color(net.predict(whiten(valid[:N*N]))), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
예제 #17
0
N = 16

e = theanets.Experiment(
    RICA,
    layers=(K, N * N, K),
    activation='linear',
    tied_weights=True,
    train_batches=100,
)
e.train(
    whiten(train),
    whiten(valid),
    hidden_l1=0.2,
    weight_inverse=0.01,
)

# color the network weights so they are viewable as digits.
plot_layers(
    [color(e.network.find('hid1', 0).get_value().T).T],
    tied_weights=True)
plt.tight_layout()
plt.show()

plot_images(valid[:N*N], 121, 'Sample data')
plot_images(
    color(e.network.predict(whiten(valid[:N*N]))),
    122, 'Reconstructed data')
plt.tight_layout()
plt.show()
예제 #18
0
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers

train, valid, _ = load_mnist(labels=True)

N = 10

net = theanets.Classifier([784, N * N, ('softmax', 10)])
net.train(train, valid, min_improvement=0.001)

plot_layers([net.find('hid1', 'w'), net.find('out', 'w')])
plt.tight_layout()
plt.show()
예제 #19
0
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers


train, valid, _ = load_mnist(labels=True)

N = 10

e = theanets.Experiment(
    theanets.Classifier,
    layers=(784, N * N, 10),
    train_batches=100,
)
e.train(train, valid)

plot_layers([e.network.find(1, 0), e.network.find(2, 0)])
plt.tight_layout()
plt.show()
예제 #20
0
#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers


train, valid, _ = load_mnist(labels=True)

N = 16

e = theanets.Experiment(
    theanets.Classifier,
    layers=(784, N * N, 10),
    train_batches=100,
)
e.run(train, valid)

plot_layers(e.network.weights)
plt.tight_layout()
plt.show()
예제 #21
0
def whiten(x):
    return np.dot(x, np.dot(vecs, np.diag(1. / vals)))


def color(z):
    return np.dot(z, np.dot(np.diag(vals), vecs.T))

# now train our model on the whitened dataset.

N = 20

net = RICA([K, (N * N, 'linear'), (K, 'tied')])

net.train(whiten(train),
          whiten(valid),
          hidden_l1=0.001,
          weight_inverse=1e-6,
          train_batches=300,
          monitors={'hid1:out': (-0.9, -0.1, 0.1, 0.9)})

# color the network weights so they are viewable as digits.
plot_layers([color(net.find('hid1', 'w').get_value().T).T], tied_weights=True)
plt.tight_layout()
plt.show()

plot_images(valid[:N*N], 121, 'Sample data')
plot_images(color(net.predict(whiten(valid[:N*N]))), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
예제 #22
0
# now train our model on the whitened dataset.

N = 16

e = theanets.Experiment(
    RICA,
    layers=(K, N * N, K),
    activation='linear',
    tied_weights=True,
    train_batches=100,
)
e.train(
    whiten(train),
    whiten(valid),
    hidden_l1=0.2,
    weight_inverse=0.01,
)

# color the network weights so they are viewable as digits.
plot_layers([color(e.network.find('hid1', 0).get_value().T).T],
            tied_weights=True)
plt.tight_layout()
plt.show()

plot_images(valid[:N * N], 121, 'Sample data')
plot_images(color(e.network.predict(whiten(valid[:N * N]))), 122,
            'Reconstructed data')
plt.tight_layout()
plt.show()
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images


train, valid, _ = load_mnist()

e = theanets.Experiment(
    theanets.Autoencoder,
    layers=(784, 256, 64, 16, 64, 256, 784),
    train_batches=100,
    tied_weights=True,
)
e.run(train, valid)

plot_layers(e.network.weights, tied_weights=True)
plt.tight_layout()
plt.show()

valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images

train, valid, _ = load_mnist()

e = theanets.Experiment(
    theanets.Autoencoder,
    layers=(784, 256, 64, 36, 64, 256, 784),
    train_batches=100,
    tied_weights=True,
)
e.train(train, valid)

plot_layers(e.network.weights, tied_weights=True)
plt.tight_layout()
plt.show()

valid = valid[:16 * 16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images


net = theanets.Classifier(
    layers=(784, 1024, 256, 64, ('softmax', 10)),
)

# first, run an unsupervised layerwise pretrainer.
train, valid, _ = load_mnist()
net.train(train, valid,
          algo='pretrain',
          patience=1,
          min_improvement=0.1,
          train_batches=100)

# second, run a supervised trainer on the classifier model.
train, valid, _ = load_mnist(labels=True)
net.train(train, valid, min_improvement=0.01, train_batches=100)

plot_layers([net.find(i, 'w') for i in (1, 2, 3)])
plt.tight_layout()
plt.show()
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images


train, valid, _ = load_mnist()

e = theanets.Experiment(
    theanets.Autoencoder,
    layers=(784, 256, 100, 64, ('tied', 100), ('tied', 256), ('tied', 784)),
)
e.train(train, valid,
        algorithm='layerwise',
        patience=1,
        min_improvement=0.05,
        train_batches=100)
e.train(train, valid, min_improvment=0.01, train_batches=100)

plot_layers([e.network.find(i, 'w') for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()

valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
예제 #27
0
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers, plot_images

train, valid, _ = load_mnist()

N = 8

e = theanets.Experiment(
    theanets.Autoencoder,
    layers=(784, N * N, 784),
    train_batches=100,
)
e.run(train, valid)

plot_layers(e.network.weights)
plt.tight_layout()
plt.show()

valid = valid[:N * N]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
예제 #28
0
def color(z):
    return np.dot(z, np.dot(np.diag(vals), vecs.T))


# now train our model on the whitened dataset.

N = 16

e = theanets.Experiment(
    RICA,
    layers=(K, N * N, K),
    activation='linear',
    hidden_l1=0.2,
    no_learn_biases=True,
    tied_weights=True,
    train_batches=100,
    weight_inverse=0.01,
)
e.run(whiten(train), whiten(valid))

# color the network weights so they are viewable as digits.
plot_layers([color(e.network.weights[0].get_value().T).T], tied_weights=True)
plt.tight_layout()
plt.show()

plot_images(valid[:N * N], 121, 'Sample data')
plot_images(color(e.network.predict(whiten(valid[:N * N]))), 122,
            'Reconstructed data')
plt.tight_layout()
plt.show()
예제 #29
0
#!/usr/bin/env python

import matplotlib.pyplot as plt
import theanets

from utils import load_mnist, plot_layers


train, valid, _ = load_mnist(labels=True)

N = 10

e = theanets.Experiment(
    theanets.Classifier,
    layers=(784, N * N, ('softmax', 10)),
)
e.train(train, valid, min_improvement=0.001)

plot_layers([e.network.find('hid1', 'w'), e.network.find('out', 'w')])
plt.tight_layout()
plt.show()