def test_executor_dependent(backendopt): for datatype in backendopt: T.set_backend(datatype) A = ad.Variable(name="A", shape=[3, 3]) B = ad.Variable(name="B", shape=[3, 3]) AA = ad.einsum('ab,ab->', A, A) BB = ad.einsum('ab,ab->', B, B) AB = ad.einsum('ab,ab->', A, B) out_A = AA + AB out_B = AB + AA executor = ad.Executor({out_A, out_B}) data = gen_dict([A, B]) A_val, = executor.run(feed_dict=data, reset_graph=False, out_nodes=[out_A]) data2 = gen_dict([A]) data2.update({B: data[B]}) B_val, = executor.run(feed_dict=data2, out_nodes=[out_B]) # This is checking A's val is not reused in B_val computationA. assert A_val != B_val
def test_einsum_multitier(backendopt): for datatype in backendopt: T.set_backend(datatype) input_nodes1, zs1 = get_tree("set1") input_nodes2, zs2 = get_tree("set2") out1 = zs1 + zs2 input_nodes3, zs3 = get_tree("set3") input_nodes4, zs4 = get_tree("set4") out2 = zs3 + zs4 out = ad.einsum("ij, jk->ik", out1, out2) input_nodes = input_nodes1 + input_nodes2 + input_nodes3 + input_nodes4 generated_feed_dict = gen_dict(input_nodes) executor = ad.Executor([out]) z_val, = executor.run(feed_dict=generated_feed_dict) with OutputInjectedModeP(find_topo_sort_p([PseudoNode(out)])): trees = find_sub_einsumtree(PseudoNode(out)) for tree in trees: out_node, in_nodes = tree new_z = fuse_einsums(out_node.node, in_nodes) replace_node(out_node, new_z) executor = ad.Executor([out]) z_new_val, = executor.run(feed_dict=generated_feed_dict) assert float_eq(z_val, z_new_val)
def test_einsum_subtree_clone(backendopt): """ [Subtree clone] This case is rather subtle. We want to auto fuse A B C D | \ / | | es | | / \ | | / \ | es es \ / + Here es is einsum. """ for datatype in backendopt: T.set_backend(datatype) a = ad.Variable(name="a", shape=[3, 3]) b = ad.Variable(name="b", shape=[3, 2]) c = ad.Variable(name="c", shape=[2, 3]) d = ad.Variable(name="d", shape=[3, 3]) BC = ad.einsum('ik, kj->ij', b, c) # 3x3 ABC = ad.einsum('ik, kj->ij', a, BC) # 3x3 BCD = ad.einsum('jk, ki->ji', BC, d) # 3x3 out = ABC + BCD input_nodes = [a, b, c, d] generated_feed_dict = gen_dict(input_nodes) executor = ad.Executor([out]) out_val, = executor.run(feed_dict=generated_feed_dict) with OutputInjectedModeP(find_topo_sort_p([PseudoNode(out)])): trees = find_sub_einsumtree(PseudoNode(out)) assert len(trees) == 2 for tree in trees: out_node, in_nodes = tree new_z = fuse_einsums(out_node.node, in_nodes) replace_node(out_node, new_z) new_out_val, = executor.run(feed_dict=generated_feed_dict) assert float_eq(out_val, new_out_val)
def test_einsum_find_subtree_after_linearization(backendopt): """ An einsum graph like A B inputs |\ | | \ | | \ | | C | / | / output will produce An einsum graph like A B inputs |\ | | A1 | | \ | A2 C | / | / output The subtree inputs must then be [A1, A2, B] rather than A, B. """ for datatype in backendopt: T.set_backend(datatype) a = ad.Variable(name="a1", shape=[3, 2]) b = ad.Variable(name="b", shape=[2, 3]) c = ad.einsum('ik,kj->ij', a, b) output = ad.einsum('ik,ij->kj', a, c) feed_dict = gen_dict([a, b]) executor = ad.Executor([output]) out_val, = executor.run(feed_dict=feed_dict) # New graph linearize(output) tree, = find_sub_einsumtree(PseudoNode(output)) assert (len(tree[1]) == 3)
def test_einsum_multiuse(backendopt): """ An einsum graph like A B inputs |\ | | \ | | \ | | C | / | / output will produce An einsum graph like A B inputs |\ | | A1 | | \ | A2 C | / | / output """ for datatype in backendopt: T.set_backend(datatype) a = ad.Variable(name="a1", shape=[3, 2]) b = ad.Variable(name="b", shape=[2, 3]) c = ad.einsum('ik,kj->ij', a, b) output = ad.einsum('ik,ij->kj', a, c) feed_dict = gen_dict([a, b]) executor = ad.Executor([output]) out_val, = executor.run(feed_dict=feed_dict) linearize(output) executor = ad.Executor([output]) out_new_val, = executor.run(feed_dict=feed_dict) assert T.array_equal(out_val, out_new_val)