def run_lstm(): del_shared() n_in = X.shape[-1] n_hid = 20 n_out = y.shape[-1] random_state = np.random.RandomState(42) h_init = np.zeros((minibatch_size, 2 * n_hid)).astype("float32") h0 = tensor.fmatrix() l1 = lstm_fork([X_sym], [n_in], n_hid, name="l1", random_state=random_state) def step(in_t, h_tm1): h_t = lstm(in_t, h_tm1, n_hid, name="rec", random_state=random_state) return h_t h, _ = theano.scan(step, sequences=[l1], outputs_info=[h0]) h_o = slice_state(h, n_hid) pred = linear([h_o], [n_hid], n_out, name="l2", random_state=random_state) cost = ((y_sym - pred) ** 2).sum() params = list(get_params().values()) grads = tensor.grad(cost, params) learning_rate = 0.000000000001 opt = sgd(params, learning_rate) updates = opt.updates(params, grads) f = theano.function([X_sym, y_sym, h0], [cost, h], updates=updates, mode="FAST_COMPILE") f(X, y, h_init)
h0.tag.test_value = train_h_init random_state = np.random.RandomState(1999) l1 = embed([X_sym], n_classes, n_emb, name="emb", random_state=random_state) in_fork = lstm_fork([l1], [n_emb], n_hid, name="h1", random_state=random_state) def step(in_t, h_tm1): h_t = lstm(in_t, h_tm1, [n_hid], n_hid, name="lstm_l1", random_state=random_state) return h_t h, _ = theano.scan(step, sequences=[in_fork], outputs_info=[h0]) h_o = slice_state(h, n_hid) y_pred = softmax([h_o], [n_hid], n_classes, name="h2", random_state=random_state) loss = categorical_crossentropy(y_pred, y_sym) cost = loss.mean(axis=1).sum(axis=0) params = list(get_params().values()) params = params grads = tensor.grad(cost, params) learning_rate = 0.0001 opt = adam(params, learning_rate) updates = opt.updates(params, grads) fit_function = theano.function([X_sym, y_sym, h0], [cost, h], updates=updates) cost_function = theano.function([X_sym, y_sym, h0], [cost, h])
l1 = embed([X_sym], n_classes, n_emb, name="emb", random_state=random_state) in_fork = lstm_fork([l1], [n_emb], n_hid, name="h1", random_state=random_state) def step(in_t, h_tm1): h_t = lstm(in_t, h_tm1, [n_hid], n_hid, name="lstm_l1", random_state=random_state) return h_t h, _ = theano.scan(step, sequences=[in_fork], outputs_info=[h0]) h_o = slice_state(h, n_hid) y_pred = softmax([h_o], [n_hid], n_classes, name="h2", random_state=random_state) loss = categorical_crossentropy(y_pred, y_sym) cost = loss.mean(axis=1).sum(axis=0) params = list(get_params().values()) params = params grads = tensor.grad(cost, params) learning_rate = 0.0001 opt = adam(params, learning_rate) updates = opt.updates(params, grads)