Пример #1
0
def test_toy(finetune_lr=0.1,
             pretraining_epochs=10,
             pretrain_lr=0.01,
             k=1,
             training_epochs=100,
             dataset='../datasets/mnist.pkl.gz',
             batch_size=10):
   
    print 'Creating dataset...'
    train_set_x = toy_dataset(p=0.001, size=10000, seed=238904)
    valid_set_x = toy_dataset(p=0.001, size=10000, seed=238905)
    test_set_x = toy_dataset(p=0.001, size=10000, seed=238906)
    train_set_x = numpy.asarray(train_set_x, dtype=theano.config.floatX)
    valid_set_x = numpy.asarray(valid_set_x, dtype=theano.config.floatX)
    test_set_x = numpy.asarray(test_set_x, dtype=theano.config.floatX)
    numpy.random.shuffle(train_set_x)
    numpy.random.shuffle(valid_set_x)
    numpy.random.shuffle(test_set_x)
    train_set_x = theano.shared(train_set_x)
    valid_set_x = theano.shared(valid_set_x)   
    test_set_x = theano.shared(test_set_x)    
                
    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2 ** 30))
    
    print '... building the model'
    dbn = DBN(numpy_rng=rng,
              theano_rng=theano_rng,
              n_ins=4 * 4,
              hidden_layers_sizes=[100, 50, 50],
              n_outs=10)

    print '... getting the pretraining functions'
    pretraining_fns = dbn.pretraining_functions(train_set_x=train_set_x,
                                                batch_size=batch_size,
                                                k=k)

    print '... pre-training the model'
    start_time = timeit.default_timer()
    for i in xrange(dbn.n_layers):
        for epoch in xrange(pretraining_epochs):
            c = []
            for batch_index in xrange(n_train_batches):
                c.append(pretraining_fns[i](index=batch_index,
                                            lr=pretrain_lr))
            print 'Pre-training layer %i, epoch %d, cost ' % (i, epoch),
            print numpy.mean(c)

    end_time = timeit.default_timer()
    print >> sys.stderr, ('The pretraining code for file ' +
                          os.path.split(__file__)[1] +
                          ' ran for %.2fm' % ((end_time - start_time) / 60.))
def test_toy(
    learning_rate=0.01,
    training_epochs=5,
    n_chains=20,
    n_samples=10,
    batch_size=20,
    output_folder="toy_rbm_CD_plots",
    n_hidden=30,
):

    print "Creating dataset..."
    train_set_x = toy_dataset(p=0.001, size=10000, seed=238904)
    test_set_x = toy_dataset(p=0.001, size=10000, seed=238905)
    train_set_x = numpy.asarray(train_set_x, dtype=theano.config.floatX)
    test_set_x = numpy.asarray(test_set_x, dtype=theano.config.floatX)
    numpy.random.shuffle(train_set_x)
    numpy.random.shuffle(test_set_x)
    train_set_x = theano.shared(train_set_x)
    test_set_x = theano.shared(test_set_x)

    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

    index = T.lscalar()
    x = T.matrix("x")

    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2 ** 30))

    print "Creating RBM..."
    rbm = RBM(input=x, n_visible=4 * 4, n_hidden=n_hidden, numpy_rng=rng, theano_rng=theano_rng)

    cost, updates = rbm.get_cost_updates(lr=learning_rate, k=15)

    if not os.path.isdir(output_folder):
        os.makedirs(output_folder)
    os.chdir(output_folder)

    train_rbm = theano.function(
        [index],
        cost,
        updates=updates,
        givens={x: train_set_x[index * batch_size : (index + 1) * batch_size]},
        name="train_rbm",
    )
    print "Starting training with %d epochs" % training_epochs
    plotting_time = 0.0
    start_time = timeit.default_timer()

    for epoch in xrange(training_epochs):
        mean_cost = []
        for batch_index in xrange(n_train_batches):
            mean_cost += [train_rbm(batch_index)]

        print "Training epoch %d, cost is " % epoch, numpy.mean(mean_cost)

        plotting_start = timeit.default_timer()
        image = Image.fromarray(
            tile_raster_images(
                X=rbm.W.get_value(borrow=True).T, img_shape=(4, 4), tile_shape=(10, 10), tile_spacing=(1, 1)
            )
        )
        image.save("filters_at_epoch_%i.png" % epoch)

        foo = train_set_x.get_value(borrow=True)[0:batch_size]
        _, prob = rbm.propup(train_set_x.get_value(borrow=True)[0:batch_size])
        image = Image.fromarray(
            tile_raster_images(
                X=(prob.eval()).reshape((1, batch_size * n_hidden)),
                img_shape=(batch_size, n_hidden),
                tile_shape=(1, 1),
                tile_spacing=(0, 0),
            )
        )
        image.save("hid_prob_at_epoch_%i.png" % epoch)

        plt.clf()
        plt.hist(rbm.W.get_value(borrow=True))
        plt.title("Histogram")
        plt.xlabel("Value")
        plt.ylabel("Frequency")
        plt.savefig("hist_at_epoch_%i.png" % epoch)

        plotting_stop = timeit.default_timer()
        plotting_time += plotting_stop - plotting_start

    end_time = timeit.default_timer()
    print "Training took %.2f minutes" % ((end_time - start_time) / 60.0)

    number_of_test_samples = test_set_x.get_value(borrow=True).shape[0]

    test_idx = rng.randint(number_of_test_samples - n_chains)
    persistent_vis_chain = theano.shared(
        numpy.asarray(test_set_x.get_value(borrow=True)[test_idx : test_idx + n_chains], dtype=theano.config.floatX)
    )

    plot_every = 1000
    ([presig_hids, hid_mfs, hid_samples, presig_vis, vis_mfs, vis_samples], updates) = theano.scan(
        rbm.gibbs_vhv, outputs_info=[None, None, None, None, None, persistent_vis_chain], n_steps=plot_every
    )

    updates.update({persistent_vis_chain: vis_samples[-1]})
    sample_fn = theano.function([], [vis_mfs[-1], vis_samples[-1]], updates=updates, name="sample_fn")

    image_data = numpy.zeros((5 * n_samples + 1, 5 * n_chains - 1), dtype="uint8")
    for idx in xrange(n_samples):

        vis_mf, vis_sample = sample_fn()
        print " ... plotting sample ", idx
        image_data[5 * idx : 5 * idx + 4, :] = tile_raster_images(
            X=vis_mf, img_shape=(4, 4), tile_shape=(1, n_chains), tile_spacing=(1, 1)
        )

    image = Image.fromarray(image_data)
    image.save("samples.png")
    os.chdir("../")
Пример #3
0
def test_toy(learning_rate=0.1,
             training_epochs=4, 
             n_chains=200,
             n_samples=3,
             batch_size=10, 
             output_folder='toy_rbm_CD_plots',
             n_hidden=25):
    
    print 'Creating dataset...'
    train_set_x = toy_dataset(p=0.001, size=5000, seed=238904)
    test_set_x = toy_dataset(p=0.001, size=1000, seed=238905)
    train_set_x = numpy.asarray(train_set_x, dtype=theano.config.floatX)
    test_set_x = numpy.asarray(test_set_x, dtype=theano.config.floatX)
    numpy.random.shuffle(train_set_x)
    numpy.random.shuffle(test_set_x)
    train_set_x = theano.shared(train_set_x)
    test_set_x = theano.shared(test_set_x)
    
    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
    
    index = T.lscalar()
    x = T.matrix('x')
    
    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2 ** 30))
    
    print 'Creating RBM...'
    rbm = RBM(
            input=x, 
            n_visible=4 * 4, 
            n_hidden=n_hidden, 
            numpy_rng=rng, 
            theano_rng=theano_rng)    
    
    cost, updates = rbm.get_cost_updates(lr=learning_rate, k=15)

    if not os.path.isdir(output_folder):
        os.makedirs(output_folder)
    os.chdir(output_folder)    
    
    train_rbm = theano.function(
        [index],
        cost,
        updates=updates,
        givens={
            x: train_set_x[index * batch_size: (index + 1) * batch_size]
        },
        name='train_rbm'
    )
    print 'Starting training with %d epochs' %training_epochs
    plotting_time = 0.
    start_time = timeit.default_timer()
    
    for epoch in xrange(training_epochs):
        mean_cost = []
        for batch_index in xrange(n_train_batches):
                    mean_cost += [train_rbm(batch_index)]
        
        print 'Training epoch %d, cost is ' % epoch, numpy.mean(mean_cost)

        plotting_start = timeit.default_timer()
        image = Image.fromarray(
                    tile_raster_images(
                        X=rbm.W.get_value(borrow=True).T,
                        img_shape=(4, 4),
                        tile_shape=(10, 10),
                        tile_spacing=(1, 1)
                    )
                )
        image.save('filters_at_epoch_%i.png' % epoch)
        plotting_stop = timeit.default_timer()
        plotting_time += (plotting_stop - plotting_start) 
                
    end_time = timeit.default_timer()
    print 'Training took %.2f minutes' % ((end_time - start_time)/ 60.)

    number_of_test_samples = test_set_x.get_value(borrow=True).shape[0]
    
    test_idx = rng.randint(number_of_test_samples - n_chains)
    persistent_vis_chain = theano.shared(
        numpy.asarray(
            test_set_x.get_value(borrow=True)[test_idx:test_idx + n_chains],
            dtype=theano.config.floatX
        )
    )
    
    plot_every = 1000
    (
        [
            presig_hids,
            hid_mfs,
            hid_samples,
            presig_vis,
            vis_mfs,
            vis_samples
        ],
        updates
    ) = theano.scan(
        rbm.gibbs_vhv,
        outputs_info=[None, None, None, None, None, persistent_vis_chain],
        n_steps=plot_every
    )

    updates.update({persistent_vis_chain: vis_samples[-1]})
    sample_fn = theano.function(
        [],
        [
            vis_mfs[-1],
            vis_samples[-1]
        ],
        updates=updates,
        name='sample_fn'
    )

    image_data = numpy.zeros(
        (5 * n_samples + 1, 5 * n_chains - 1),
        dtype='uint8'
    )
    for idx in xrange(n_samples):

        vis_mf, vis_sample = sample_fn()
        print ' ... plotting sample ', idx
        image_data[5 * idx:5 * idx + 4, :] = tile_raster_images(
            X=vis_mf,
            img_shape=(4, 4),
            tile_shape=(1, n_chains),
            tile_spacing=(1, 1)
        )

    image = Image.fromarray(image_data)
    image.save('samples.png')
 
    
#-----------------------------------    
    persistent_vis_chain_tse = theano.shared(
        numpy.asarray(
            test_set_x.get_value(borrow=True)[test_idx:test_idx + n_chains],
            dtype=theano.config.floatX
        )
    )    
    plot_every = 10
    (
        [
            presig_hids,
            hid_mfs,
            hid_samples,
            presig_vis,
            vis_mfs,
            vis_samples
        ],
        updates
    ) = theano.scan(
        rbm.gibbs_vhv,
        outputs_info=[None, None, None, None, None, persistent_vis_chain_tse],
        n_steps=plot_every
    )

    updates.update({persistent_vis_chain_tse: vis_samples[-1]})


    sample_hid_fn = theano.function(
        [],
        [
            hid_mfs[-1],
            hid_samples[-1]
        ],
        updates=updates,
        name='sample_hid_fn'
    )
    
    hid_mf, hid_sample = sample_hid_fn()
    
    model_recon = manifold.TSNE(n_components=2, init='pca', random_state=0)
        
    X_tsne=model_recon.fit_transform(hid_mf) 
    
    model_original = manifold.TSNE(n_components=2, init='pca', random_state=0)
    
    X_tsne_original=model_original.fit_transform(test_set_x.get_value(borrow=True)[test_idx:test_idx + n_chains])
    
    
    f, (ax1, ax2) = plt.subplots(1, 2, sharey=True)
    ax1.scatter(X_tsne_original[:,0], X_tsne_original[:,1])
    ax2.scatter(X_tsne[:,0], X_tsne[:,1])
    plt.savefig('foo.png')    
    plt.show()
    os.chdir('../')   
Пример #4
0
def test_toy(
    finetune_lr=0.1,
    pretraining_epochs=10,
    pretrain_lr=0.01,
    k=1,
    training_epochs=100,
    dataset="../datasets/mnist.pkl.gz",
    batch_size=10,
):

    print "Creating dataset..."
    train_set_x = toy_dataset(p=0.001, size=10000, seed=238904)
    valid_set_x = toy_dataset(p=0.001, size=10000, seed=238905)
    test_set_x = toy_dataset(p=0.001, size=10000, seed=238906)
    train_set_x = numpy.asarray(train_set_x, dtype=theano.config.floatX)
    valid_set_x = numpy.asarray(valid_set_x, dtype=theano.config.floatX)
    test_set_x = numpy.asarray(test_set_x, dtype=theano.config.floatX)
    numpy.random.shuffle(train_set_x)
    numpy.random.shuffle(valid_set_x)
    numpy.random.shuffle(test_set_x)
    train_set_x = theano.shared(train_set_x)
    valid_set_x = theano.shared(valid_set_x)
    test_set_x = theano.shared(test_set_x)

    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2 ** 30))

    print "... building the model"
    dbn = DBN(numpy_rng=rng, theano_rng=theano_rng, n_ins=4 * 4, hidden_layers_sizes=[100, 50, 50], n_outs=10)

    print "... getting the pretraining functions"
    pretraining_fns = dbn.pretraining_functions(train_set_x=train_set_x, batch_size=batch_size, k=k)

    print "... pre-training the model"
    start_time = timeit.default_timer()
    for i in xrange(dbn.n_layers):
        for epoch in xrange(pretraining_epochs):
            c = []
            for batch_index in xrange(n_train_batches):
                c.append(pretraining_fns[i](index=batch_index, lr=pretrain_lr))
            print "Pre-training layer %i, epoch %d, cost " % (i, epoch),
            print numpy.mean(c)

    end_time = timeit.default_timer()
    print >> sys.stderr, (
        "The pretraining code for file "
        + os.path.split(__file__)[1]
        + " ran for %.2fm" % ((end_time - start_time) / 60.0)
    )

    if not os.path.isdir(output_folder):
        os.makedirs(output_folder)
    os.chdir(output_folder)

    for i in xrange(dbn.n_layers):

        X = (dbn.rbm_layers[i].W.get_value(borrow=True).T,)

        image = Image.fromarray(
            tile_raster_images(
                X=dbn.rbm_layers[i].W.get_value(borrow=True).T,
                img_shape=(28, 28),
                tile_shape=(10, 10),
                tile_spacing=(1, 1),
            )
        )
        image.save("filters_corruption_30.png")
Пример #5
0
def test_toy(learning_rate=0.1,
             training_epochs=15,
             n_hidden=30,
             batch_size=20,
             output_folder='toy_dA_plots'):
 
    print 'Creating dataset...'
    train_set_x = toy_dataset(p=0.001, size=10000, seed=238904)
    test_set_x = toy_dataset(p=0.001, size=10000, seed=238905)
    train_set_x = numpy.asarray(train_set_x, dtype=theano.config.floatX)
    test_set_x = numpy.asarray(test_set_x, dtype=theano.config.floatX)
    numpy.random.shuffle(train_set_x)
    numpy.random.shuffle(test_set_x)
    train_set_x = theano.shared(train_set_x)
    test_set_x = theano.shared(test_set_x)

    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

    index = T.lscalar()  
    x = T.matrix('x') 

    if not os.path.isdir(output_folder):
        os.makedirs(output_folder)
    os.chdir(output_folder)

    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2 ** 30))

    da = dA(
        numpy_rng=rng,
        theano_rng=theano_rng,
        input=x,
        n_visible=4 * 4,
        n_hidden=n_hidden
    )

    cost, updates = da.get_cost_updates(
        corruption_level=0.3,
        learning_rate=learning_rate
    )

    train_da = theano.function(
        inputs=[index],
        outputs=cost,
        updates=updates,
        givens={
            x: train_set_x[index * batch_size: (index + 1) * batch_size]
        }
    )

    print 'Starting training with %d epochs' %training_epochs
    plotting_time = 0.
    start_time = timeit.default_timer()
 
    for epoch in xrange(training_epochs):
        c = []
        for batch_index in xrange(n_train_batches):
            c.append(train_da(batch_index))

        print 'Training epoch %d, cost ' % epoch, numpy.mean(c)

        plotting_start = timeit.default_timer()
        image = Image.fromarray(
                    tile_raster_images(
                        X=da.W.get_value(borrow=True).T,
                        img_shape=(4, 4),
                        tile_shape=(10, 10),
                        tile_spacing=(1, 1)
                    )
                )
        image.save('filters_at_epoch_%i.png' % epoch)
        plotting_stop = timeit.default_timer()
        plotting_time += (plotting_stop - plotting_start) 

    end_time = timeit.default_timer()
    print 'Training took %.2f minutes' % ((end_time - start_time)/ 60.)        

    image = Image.fromarray(tile_raster_images(
        X=da.W.get_value(borrow=True).T,
        img_shape=(4, 4), tile_shape=(10, 10),
        tile_spacing=(1, 1)))
    image.save('filters_corruption_30.png')

    os.chdir('../')
Пример #6
0
from tf_hmm import HiddenMarkovModel
from toy_dataset import toy_dataset
import tensorflow as tf
import time

hmm = HiddenMarkovModel(2, 2, time_steps=64, reports=True, code_number=1)

# training using the 1st, 2nd, 3rd, 4th, 8th and 9th members of the dataset
codes = [1, 1, 1, 1, 0, 0, 0, 1, 1, 0]
dataset = toy_dataset(10, 64)

print(hmm.posterior(dataset))
hmm.expectation_maximization(dataset, max_steps=100, codes=codes)
hmm.plot()

print('### p0 ###')
print(hmm.p0)
print('### tp ###')
print(hmm.tp)
print('### mu ###')
print(hmm.mu)
print('### cov ###')
print(hmm.cov)
print()

hmm.save('example')

print(hmm.posterior(dataset))