コード例 #1
0
def test_vae():
    ds = odin.dataset.load_mnist()

    W = T.variable(T.np_glorot_uniform(shape=(784, 512)), name='W')
    WT = T.transpose(W)
    encoder = odin.nnet.Dense((None, 28, 28), num_units=512, W=W, name='encoder')
    # decoder = odin.nnet.Dense((None, 256), num_units=512, name='decoder1')
    decoder = odin.nnet.Dense((None, 512), num_units=784, W=WT, name='decoder2')

    vae = odin.nnet.VariationalEncoderDecoder(encoder=encoder, decoder=decoder,
        prior_logsigma=1.7, batch_size=64)

    # ====== prediction ====== #
    x = ds['X_train'][:16]

    f = T.function(inputs=vae.input_var, outputs=vae(training=False))
    print("Predictions:", f(x)[0].shape)

    f = T.function(
        inputs=vae.input_var,
        outputs=vae.set_reconstruction_mode(True)(training=False))
    y = f(x)[0].reshape(-1, 28, 28)
    print("Predictions:", y.shape)

    odin.visual.plot_images(x)
    odin.visual.plot_images(y)
    odin.visual.plot_show()

    print('Params:', [p.name for p in vae.get_params(False)])
    print('Params(globals):', [p.name for p in vae.get_params(True)])
    # ====== Optimizer ====== #
    cost, updates = vae.get_optimization(
        objective=odin.objectives.categorical_crossentropy,
        optimizer=lambda x, y: odin.optimizers.sgd(x, y, learning_rate=0.01),
        globals=True, training=True)

    f = T.function(inputs=vae.input_var, outputs=cost, updates=updates)
    cost = []
    niter = ds['X_train'].iter_len() / 64
    for j in xrange(2):
        for i, x in enumerate(ds['X_train'].iter(64)):
            if x.shape[0] != 64: continue
            cost.append(f(x))
            odin.logger.progress(i, niter, str(cost[-1]))
    odin.visual.print_bar(cost)

    # ====== reconstruc ====== #
    f = T.function(
        inputs=vae.input_var,
        outputs=vae.set_reconstruction_mode(True)(training=False))
    X_test = ds['X_test'][:16]
    X_reco = f(X_test)[0].reshape(-1, 28, 28)
    odin.visual.plot_images(X_test)
    odin.visual.plot_images(X_reco)
    odin.visual.plot_show()
コード例 #2
0
ファイル: mnist_rbm.py プロジェクト: trungnt13/odin_old
def test_rbm():
    batch_size = 32
    persistent_chain = T.variable(numpy.zeros((batch_size, 500)))
    input_ = odin.nnet.Dense((None, 28, 28), num_units=784)
    input_ = (None, 28, 28)
    rbm = odin.nnet.RBM(input_, 500, persistent=persistent_chain)
    print('Input variables:', rbm.input_var)
    print('Output variables:', rbm.output_var)

    sgd = lambda x, y: odin.optimizers.sgd(x, y, learning_rate=0.01)
    cost, updates = rbm.get_optimization(
        optimizer=sgd, globals=True, objective=odin.objectives.contrastive_divergence)
    print('Building functions...')
    train_rbm = T.function(
        inputs=rbm.input_var,
        outputs=cost,
        updates=updates
    )

    cost = []
    niter = ds['X_train'].iter_len() / batch_size
    for i, x in enumerate(ds['X_train'].iter(batch_size, seed=13)):
        if x.shape[0] != batch_size: continue
        # x = x.astype(int) # this one can mess up the whole training process
        cost.append(train_rbm(x))
        odin.logger.progress(i, niter, title='%.5f' % cost[-1])
    odin.visual.print_bar(cost, bincount=20)

    vis_mfc = rbm.set_sampling_steps(1).set_reconstruction_mode(True)()
    print('Building functions...')
    sample_rbm = T.function(
        inputs=rbm.input_var,
        outputs=vis_mfc,
        updates=updates)

    test_x = ds['X_test'].value
    for i in xrange(3):
        t = numpy.random.randint(test_x.shape[0] - 16)
        x = test_x[t:t + 16]

        x_mean = sample_rbm(x)[0]
        odin.visual.plot_images(x)
        odin.visual.plot_images(x_mean)
        plt.show(block=False)
        raw_input('<Enter>')
        plt.close('all')
コード例 #3
0
def test_dA(): # AutoEncoder
    dA = odin.nnet.AutoEncoder((None, 28, 28),
        num_units=512, denoising=0.3, contractive=False,
        nonlinearity=T.sigmoid)
    sgd = lambda x, y: odin.optimizers.sgd(x, y, learning_rate=0.01)
    cost, updates = dA.get_optimization(
        objective=odin.objectives.categorical_crossentropy,
        optimizer=sgd,
        globals=True)
    f_train = T.function(
        inputs=dA.input_var,
        outputs=cost,
        updates=updates)

    cost = []
    niter = ds['X_train'].iter_len() / 64
    choices = None
    for _ in xrange(3):
        for i, x in enumerate(ds['X_train'].iter(64)):
            cost.append(f_train(x))
            odin.logger.progress(i, niter, title=str(cost[-1]))
        print()
        odin.visual.print_bar(cost, bincount=20)
        W = T.get_value(dA.get_params(False)[0]).T.reshape(-1, 28, 28)
        if choices is None:
            choices = np.random.choice(
                np.arange(W.shape[0]), size=16, replace=False)
        W = W[choices]
        odin.visual.plot_images(W)
        plt.show(block=False)
        raw_input('<enter>')

    f_pred = T.function(
        inputs=dA.input_var,
        outputs=dA.set_reconstruction_mode(True)())
    for i in xrange(3):
        t = np.random.randint(ds['X_test'].shape[0] - 16)
        X = ds['X_test'][t:t + 16]
        X_pred = f_pred(X)[0]
        odin.visual.plot_images(X)
        odin.visual.plot_images(X_pred)
        odin.visual.plot_show()
コード例 #4
0
def test_aED(): #AutoEncoderDecoder
    Wa = T.variable(T.np_glorot_uniform(shape=(784, 256)), name='W')
    Wb = T.variable(T.np_glorot_uniform(shape=(256, 128)), name='W')

    d1a = odin.nnet.Dense((None, 28, 28), num_units=256, W=Wa, name='d1a',
        nonlinearity=T.sigmoid)
    d1b = odin.nnet.Dense(d1a, num_units=128, W=Wb, name='d1b',
        nonlinearity=T.sigmoid)

    # or d1b, (None, 128) as incoming
    d2a = odin.nnet.Dense((None, 128), num_units=256, W=Wb.T, name='d2a',
        nonlinearity=T.sigmoid)
    d2b = odin.nnet.Dense(d2a, num_units=784, W=Wa.T, name='d2b',
        nonlinearity=T.sigmoid)

    aED = odin.nnet.AutoEncoderDecoder(d1b, d2b)

    sgd = lambda x, y: odin.optimizers.sgd(x, y, learning_rate=0.01)
    cost, updates = aED.get_optimization(
        objective=odin.objectives.categorical_crossentropy,
        optimizer=sgd,
        globals=True)
    f_train = T.function(
        inputs=aED.input_var,
        outputs=cost,
        updates=updates)

    cost = []
    niter = ds['X_train'].iter_len() / 64
    choices = None
    for _ in xrange(3):
        for i, x in enumerate(ds['X_train'].iter(64)):
            cost.append(f_train(x))
            odin.logger.progress(i, niter, title=str(cost[-1]))
        print()
        odin.visual.print_bar([i for i in cost if i == i], bincount=20)
        W = T.get_value(aED.get_params(True)[0]).T.reshape(-1, 28, 28)
        if choices is None:
            choices = np.random.choice(
                np.arange(W.shape[0]), size=16, replace=False)
        W = W[choices]
        odin.visual.plot_images(W)
        plt.show(block=False)
        raw_input('<enter>')

    # ====== Output reconstruction ====== #
    f_pred = T.function(
        inputs=aED.input_var,
        outputs=aED.set_reconstruction_mode(True)())

    for i in xrange(3):
        t = np.random.randint(ds['X_test'].shape[0] - 16)
        X = ds['X_test'][t:t + 16]
        X_pred = f_pred(X)[0].reshape(-1, 28, 28)
        odin.visual.plot_images(X)
        odin.visual.plot_images(X_pred)
        odin.visual.plot_show()

    # ====== OUtput hidden activation ====== #
    f_pred = T.function(
        inputs=aED.input_var,
        outputs=aED())
    X = ds['X_test'][t:t + 16]
    print(f_pred(X)[0].shape)
コード例 #5
0
ファイル: mnist_cnn.py プロジェクト: trungnt13/odin_old
f = odin.nnet.Dense(f,
    num_units=128,
    nonlinearity=T.relu)
f = odin.nnet.Dropout(f, p=0.5)
f = odin.nnet.Dense(f, num_units=nb_classes, nonlinearity=T.softmax)

print('Input variables:', f.input_var)
print('Outputs variables:', f.output_var)
# ====== create cost and updates based on given objectives and optimizers ====== #
print('\nBuilding training functions ...')
cost, updates = f.get_optimization(
    objective=odin.objectives.categorical_crossentropy,
    optimizer=odin.optimizers.adadelta,
    training=True)
f_train = T.function(
    inputs=f.input_var + f.output_var,
    outputs=cost,
    updates=updates)

# if you don't specify th eoptimizer only calculate the objectives
print('\nBuilding monitoring functions ...')
monitor_cost, _ = f.get_optimization(
    objective=odin.objectives.mean_categorical_accuracy,
    optimizer=None,
    training=False)
f_monitor = T.function(
    inputs=f.input_var + f.output_var,
    outputs=monitor_cost)

# ====== Start the trianing process ====== #
cost = []
niter_train = ds['X_train'].iter_len() // batch_size
コード例 #6
0
    h1 = h[:, 0:10] # 1x10
    h2 = h[:, 10:20]
    h3 = h[:, 20:30]
    h4 = h[:, 30:40]

    test = (T.dot(x1, T.transpose(h1)) +
            T.dot(x2, T.transpose(h2)) +
            T.dot(x3, T.transpose(h3)) +
            T.dot(x4, T.transpose(h4)))
    return test + mem

mem, updates = T.scan(step1,
                sequences=[inputs, hidden],
                outputs_info=[T.zeros(shape=(seq_len, 1))]
            )
f1 = T.function(inputs=[], outputs=mem)
print(f1().shape)

# ====== second approach ====== #


def step2(x1, x2, x3, x4, h, mem):
    h = h.dimshuffle('x', 0) # 1x20
    h1 = T.dot(h, W1) # 1x10
    h2 = T.dot(h, W2)
    h3 = T.dot(h, W3)
    h4 = T.dot(h, W4)
    test = (T.dot(x1, T.transpose(h1)) +
            T.dot(x2, T.transpose(h2)) +
            T.dot(x3, T.transpose(h3)) +
            T.dot(x4, T.transpose(h4)))
コード例 #7
0
# build the model: 2 stacked LSTM
print('Build model...')
f = odin.nnet.LSTM((None, maxlen, len(chars)), num_units=256)
f = odin.nnet.Dropout(f, p=0.2)
f = odin.nnet.LSTM(f, num_units=256, only_return_final=True)
f = odin.nnet.Dense(f, num_units=len(chars), nonlinearity=T.softmax)

print('Input variables:', f.input_var)
print('Output variables:', f.output_var)
cost, updates = f.get_optimization(
    objective=odin.objectives.mean_categorical_crossentropy,
    optimizer=odin.optimizers.rmsprop)
print('Build training function...')
f_train = T.function(
    inputs=f.input_var + f.output_var,
    outputs=cost,
    updates=updates)
f_pred = T.function(
    inputs=f.input_var,
    outputs=f(training=False)[0]
)

# train the model, output generated text after each iteration
for iteration in range(1, 60):
    print()
    print('-' * 50)
    print('Iteration', iteration)
    seed = T.get_random_magic_seed()
    cost = []
    niter = X.shape[0] // 128
    for k, (i, j) in enumerate(
コード例 #8
0
seq3 = T.variable(np.arange(5))

nonseq1 = T.variable(1.)
nonseq2 = T.variable(2.)

([o1, o2], updates) = theano.scan(
    fn=step,
    sequences=[seq1, seq2, seq3],
    outputs_info=[T.zeros((2, 2)), T.ones((2, 2))],
    non_sequences=[nonseq1, nonseq2],
    n_steps=None,
    truncate_gradient=-1,
    go_backwards=False)

f1 = T.function(
    inputs=[],
    outputs=[o1, o2],
    updates=updates)
a, b = f1()
print(a.shape)

o1, o2 = T.loop(step,
    sequences=[seq1, seq2, seq3],
    outputs_info=[T.zeros((2, 2)), T.ones((2, 2))],
    non_sequences=[nonseq1, nonseq2],
    n_steps=5)
print(o1, o2)
f2 = T.function(
    inputs=[],
    outputs=[o1, o2],
    updates=updates)
a, b = f2()