Ejemplo n.º 1
0
def run_lstm():
    del_shared()
    n_in = X.shape[-1]
    n_hid = 20
    n_out = y.shape[-1]

    random_state = np.random.RandomState(42)
    h_init = np.zeros((minibatch_size, 2 * n_hid)).astype("float32")

    h0 = tensor.fmatrix()

    l1 = lstm_fork([X_sym], [n_in], n_hid, name="l1",
                    random_state=random_state)

    def step(in_t, h_tm1):
        h_t = lstm(in_t, h_tm1, n_hid, name="rec", random_state=random_state)
        return h_t

    h, _ = theano.scan(step, sequences=[l1], outputs_info=[h0])
    h_o = slice_state(h, n_hid)

    pred = linear([h_o], [n_hid], n_out, name="l2", random_state=random_state)
    cost = ((y_sym - pred) ** 2).sum()
    params = list(get_params().values())

    grads = tensor.grad(cost, params)
    learning_rate = 0.000000000001
    opt = sgd(params, learning_rate)
    updates = opt.updates(params, grads)

    f = theano.function([X_sym, y_sym, h0], [cost, h], updates=updates,
                        mode="FAST_COMPILE")
    f(X, y, h_init)
Ejemplo n.º 2
0
def test_feedforward_theano_mix():
    del_shared()
    minibatch_size = 100
    random_state = np.random.RandomState(1999)
    X_sym = tensor.fmatrix()
    y_sym = tensor.fmatrix()

    l1_o = linear([X_sym], [X.shape[1]],
                  proj_dim=20,
                  name='l1',
                  random_state=random_state)
    l1_o = .999 * l1_o
    y_pred = softmax([l1_o], [20],
                     proj_dim=n_classes,
                     name='out',
                     random_state=random_state)

    cost = categorical_crossentropy(y_pred, y_sym).mean()
    params = list(get_params().values())
    grads = theano.grad(cost, params)
    learning_rate = 0.001
    opt = sgd(params, learning_rate)
    updates = opt.updates(params, grads)

    fit_function = theano.function([X_sym, y_sym], [cost],
                                   updates=updates,
                                   mode="FAST_COMPILE")

    cost_function = theano.function([X_sym, y_sym], [cost],
                                    mode="FAST_COMPILE")

    train_itr = minibatch_iterator([X, y], minibatch_size, axis=0)
    valid_itr = minibatch_iterator([X, y], minibatch_size, axis=0)
    X_train, y_train = next(train_itr)
    X_valid, y_valid = next(valid_itr)
    fit_function(X_train, y_train)
    cost_function(X_valid, y_valid)
Ejemplo n.º 3
0
def test_fixed_projection():
    random_state = np.random.RandomState(1999)
    rand_projection = random_state.randn(64, 12)
    rand_dim = rand_projection.shape[1]

    out = fixed_projection([X_sym], [X.shape[1]], rand_projection, 'proj1')
    out2 = fixed_projection([X_sym], [X.shape[1]],
                            rand_projection,
                            'proj2',
                            pre=rand_projection[:, 0])
    out3 = fixed_projection([X_sym], [X.shape[1]],
                            rand_projection,
                            'proj3',
                            post=rand_projection[0])
    final = linear([out2], [rand_dim], 5, 'linear', random_state=random_state)
    # Test that it compiles with and without bias
    f = theano.function([X_sym], [out, out2, out3, final], mode="FAST_COMPILE")

    # Test updates
    params = list(get_params().values())
    grads = tensor.grad(final.mean(), params)
    opt = sgd(params, .1)
    updates = opt.updates(params, grads)
    f2 = theano.function([X_sym], [out2, final], updates=updates)
    ret = f(np.ones_like(X))[0]
    assert ret.shape[1] != X.shape[1]
    ret2 = f(np.ones_like(X))[1]
    assert ret.shape[1] != X.shape[1]
    out1, final1 = f2(X)
    out2, final2 = f2(X)

    # Make sure fixed basis is unchanged
    assert_almost_equal(out1, out2)

    # Make sure linear layer is updated
    assert_raises(AssertionError, assert_almost_equal, final1, final2)
Ejemplo n.º 4
0

def step(in_t, h_tm1):
    h_t = lstm(in_t, h_tm1, [n_hid], n_hid, name="lstm_l1", random_state=random_state)
    return h_t


h, _ = theano.scan(step, sequences=[in_fork], outputs_info=[h0])

h_o = slice_state(h, n_hid)

y_pred = softmax([h_o], [n_hid], n_classes, name="h2", random_state=random_state)
loss = categorical_crossentropy(y_pred, y_sym)
cost = loss.mean(axis=1).sum(axis=0)

params = list(get_params().values())
params = params
grads = tensor.grad(cost, params)

learning_rate = 0.0001
opt = adam(params, learning_rate)
updates = opt.updates(params, grads)

fit_function = theano.function([X_sym, y_sym, h0], [cost, h], updates=updates)
cost_function = theano.function([X_sym, y_sym, h0], [cost, h])
predict_function = theano.function([X_sym, h0], [y_pred, h])


def train_loop(itr):
    mb = next(itr)
    X_mb, y_mb = mb[:-1], mb[1:]
Ejemplo n.º 5
0
               random_state=random_state)
    return h_t


h, _ = theano.scan(step, sequences=[in_fork], outputs_info=[h0])

h_o = slice_state(h, n_hid)

y_pred = softmax([h_o], [n_hid],
                 n_classes,
                 name="h2",
                 random_state=random_state)
loss = categorical_crossentropy(y_pred, y_sym)
cost = loss.mean(axis=1).sum(axis=0)

params = list(get_params().values())
params = params
grads = tensor.grad(cost, params)

learning_rate = 0.0001
opt = adam(params, learning_rate)
updates = opt.updates(params, grads)

fit_function = theano.function([X_sym, y_sym, h0], [cost, h], updates=updates)
cost_function = theano.function([X_sym, y_sym, h0], [cost, h])
predict_function = theano.function([X_sym, h0], [y_pred, h])


def train_loop(itr):
    mb = next(itr)
    X_mb, y_mb = mb[:-1], mb[1:]