class RnnRbm(RnnRbm): def __init__(self, n_hidden=150, n_hidden_recurrent=100, lr=0.001, dt=0.3): v, v_sample, cost, monitor, params, updates_train, v_t,updates_generate = build_rnnrbm(2, n_hidden, n_hidden_recurrent) self.dt = dt gradient = T.grad(cost, params, consider_constant=[v_sample]) updates_train.update(((p, p - lr * g) for p, g in zip(params, gradient))) self.train_function = theano.function([v], monitor, updates=updates_train) self.generate_function = theano.function([], v_t, updates=updates_generate) def train(self, files, batch_size=100, num_epochs=200): """ ############################### #### Setting for dataset #### ############################### dataset = XOR(size=30, type='seq') train_data = dataset.get_batch_design(10) print len(train_data) ######################## #### Pre training #### ######################## ### First layer ### rbm = RnnRbm(nvis=2, nhid=4, nhid_recurrent=3) # rbm = RnnRbm(n_hidden=4, n_hidden_recurrent=3) print '' # for param in rbm.get_params(): # print '------ ' + str(param) + ' -----' # print param.get_value()