def get_random_binary_tree(min_depth, max_depth, num_vals, child_prob=0.7, _cur_depth=0): root = tree_rnn.BinaryNode(int(random.random() * num_vals)) if max_depth <= 1: return root # left child if _cur_depth < min_depth or random.random() < child_prob: left_child = get_random_binary_tree(min_depth, max_depth - 1, num_vals, child_prob=child_prob, _cur_depth=_cur_depth + 1) root.add_left(left_child) # right child if _cur_depth < min_depth or random.random() < child_prob: right_child = get_random_binary_tree(min_depth, max_depth - 1, num_vals, child_prob=child_prob, _cur_depth=_cur_depth + 1) root.add_right(right_child) return root
def test_tree_rnn_var_degree(): model = DummyBinaryRNN(10, 2, 2, 1, degree=2) emb = model.embeddings.get_value() root = tree_rnn.BinaryNode(0) c1 = tree_rnn.BinaryNode(1) cc1 = tree_rnn.BinaryNode(2) ccc1 = tree_rnn.BinaryNode(3) cc1.add_left(ccc1) c1.add_right(cc1) root.add_left(c1) root_emb = model.evaluate(root) expected = emb[0] + (emb[1] + (emb[2] + emb[3])**2) assert_array_almost_equal(expected, root_emb) cccc1 = tree_rnn.BinaryNode(5) cccc2 = tree_rnn.BinaryNode(6) ccc1.add_left(cccc1) ccc1.add_right(cccc2) root_emb = model.evaluate(root) expected = emb[0] + (emb[1] + (emb[2] + (emb[3] + emb[5] + emb[6]**2))**2) assert_array_almost_equal(expected, root_emb) # check step works without error model.train_step(root, np.array([0]).astype(theano.config.floatX))
def test(): # very simple for now... just checks compilation and training step # TODO: better tests root = tree_rnn.BinaryNode(0) c1 = tree_rnn.BinaryNode(1) cc1 = tree_rnn.BinaryNode(2) ccc1 = tree_rnn.BinaryNode(3) cccc1 = tree_rnn.BinaryNode(5) cccc2 = tree_rnn.BinaryNode(6) ccc1.add_left(cccc1) ccc1.add_right(cccc2) cc1.add_left(ccc1) c1.add_right(cc1) root.add_left(c1) # check child sum model = tree_gru.ChildSumTreeGRU(10, 20, 30, 1) model.train_step(root, np.array([0]).astype(theano.config.floatX)) # check n-ary model = tree_gru.NaryTreeGRU(10, 20, 30, 1) model.train_step(root, np.array([0]).astype(theano.config.floatX))