f0 = model.Wf(x_k) + model.Rf(h) f1 = F.sigmoid(f0) c = i1 * z1 + f1 * c o0 = model.Wo(x_k) + model.Ro(h) o1 = F.sigmoid(o0) y = o1 * F.tanh(c) yv = F.softmax(model.W(y)) pi = yv.data[0][w2] sum -= math.log(pi, 2) return sum model_filename = sys.argv[1] vocab = {} train_data = helpers.load_data('.data/ptb.train.min.txt', vocab) eos_id = vocab['<eos>'] max_id = len(vocab) - 1 demb = 100 model = with_dropout.Lstm(len(vocab), eos_id, demb) serializers.load_npz(model_filename, model) test_data = helpers.load_data('.data/ptb.test.txt', vocab) test_data = test_data[0:1000] s = [] has_unknown = False total_word_num = 0 sum = 0.0
for i in range(1, len(s)): w1, w2 = s[i - 1], s[i] x_k = model.embed( Variable(np.array([w1], dtype=np.int32), volatile='on')) y = model.H(x_k) h = y yv = F.softmax(model.W(y)) pi = yv.data[0][w2] sum -= math.log(pi, 2) return sum model_filename = sys.argv[1] vocab = {} train_data = helpers.load_data('.data/ptb.train.min.txt', vocab) eos_id = vocab['<eos>'] max_id = len(vocab) - 1 demb = 100 model = use_lstm.Lstm(len(vocab), eos_id, demb) serializers.load_npz(model_filename, model) test_data = helpers.load_data('.data/ptb.test.txt', vocab) test_data = test_data[0:1000] s = [] has_unknown = False total_word_num = 0 sum = 0.0
import numpy as np import chainer from chainer import cuda, Function, gradient_check, Variable, optimizers, serializers, utils from chainer import Link, Chain, ChainList import chainer.functions as F import chainer.links as L from impls import use_lstm, helpers name = helpers.get_script_name(__file__) data_path = '.data/ptb.train.min.txt' dest_root = '.dest' vocab = {} train_data = helpers.load_data(data_path, vocab) vocab_num = len(vocab) eos_id = vocab['<eos>'] demb = 100 model = use_lstm.Lstm(vocab_num, eos_id, demb) optimizer = optimizers.Adam() optimizer.setup(model) for epoch in range(5): s = [] for pos in range(len(train_data)): id = train_data[pos] s.append(id) if id == eos_id: