def test_merge_with_weird_eq(): # numpy arrays don't compare equal like other python objects # SCALAR CASE x = aet.constant(np.asarray(1), name="x") y = aet.constant(np.asarray(1), name="y") g = FunctionGraph([x, y], [x + y]) MergeOptimizer().optimize(g) assert len(g.apply_nodes) == 1 node = list(g.apply_nodes)[0] assert len(node.inputs) == 2 assert node.inputs[0] is node.inputs[1] # NONSCALAR CASE # This was created to test TensorConstantSignature x = aet.constant(np.ones(5), name="x") y = aet.constant(np.ones(5), name="y") g = FunctionGraph([x, y], [x + y]) MergeOptimizer().optimize(g) assert len(g.apply_nodes) == 1 node = list(g.apply_nodes)[0] assert len(node.inputs) == 2 assert node.inputs[0] is node.inputs[1]
def test_merge_outputs(self): x, y, z = inputs() e1 = op3(op2(x, y)) e2 = op3(op2(x, y)) g = FunctionGraph([x, y, z], [e1, e2], clone=False) MergeOptimizer().optimize(g) assert g.outputs[0] is g.outputs[1]
def test_merge_outputs(self): x, y, z = inputs() e1 = op3(op2(x, y)) e2 = op3(op2(x, y)) g = FunctionGraph([x, y, z], [e1, e2]) MergeOptimizer().optimize(g) assert str(g) == "FunctionGraph(*1 -> Op3(Op2(x, y)), *1)"
def test_both_assert_merge_2_reverse(self): # Test case "test_both_assert_merge_2" but in reverse order x1 = matrix("x1") x2 = matrix("x2") x3 = matrix("x3") e = dot(x1, assert_op(x2, (x2 > x3).all())) + dot( assert_op(x1, (x1 > x3).all()), x2) g = FunctionGraph([x1, x2, x3], [e]) MergeOptimizer().optimize(g) strg = aesara.printing.debugprint(g, file="str") strref = """Elemwise{add,no_inplace} [id A] '' 7 |dot [id B] '' 6 | |Assert{msg='Aesara Assert failed!'} [id C] '' 5 | | |x1 [id D] | | |All [id E] '' 3 | | |Elemwise{gt,no_inplace} [id F] '' 1 | | |x1 [id D] | | |x3 [id G] | |Assert{msg='Aesara Assert failed!'} [id H] '' 4 | |x2 [id I] | |All [id J] '' 2 | |Elemwise{gt,no_inplace} [id K] '' 0 | |x2 [id I] | |x3 [id G] |dot [id B] '' 6 """ print(strg) assert strg == strref, (strg, strref)
def __init__( self, inputs, outputs, target_accept=0.8, draws=1000, tune=1000, chains=4, seed=None, progress_bar=True, ): self.draws = draws self.tune = tune self.chains = chains self.target_accept = target_accept self.progress_bar = progress_bar self.seed = seed self.inputs, self.outputs = clone(inputs, outputs, copy_inputs=False) self.inputs_type = tuple([input.type for input in inputs]) self.outputs_type = tuple([output.type for output in outputs]) self.nin = len(inputs) self.nout = len(outputs) self.nshared = len( [v for v in inputs if isinstance(v, SharedVariable)]) self.samples_bcast = [self.chains == 1, self.draws == 1] self.fgraph = FunctionGraph(self.inputs, self.outputs, clone=False) MergeOptimizer().optimize(self.fgraph) super().__init__()
def test_deep_merge(self): x, y, z = inputs() e = op1(op3(op2(x, y), z), op4(op3(op2(x, y), z))) g = FunctionGraph([x, y, z], [e], clone=False) MergeOptimizer().optimize(g) out_var = g.outputs[0] var_1, var_2 = out_var.owner.inputs assert var_2.owner.inputs[0] is var_1
def test_straightforward(self): x, y, z = inputs() e = op1(op2(x, y), op2(x, y), op2(x, z)) g = FunctionGraph([x, y, z], [e], clone=False) MergeOptimizer().optimize(g) out_var = g.outputs[0] var_1, var_2, var_3 = out_var.owner.inputs assert var_1 is var_2 assert var_1 is not var_3
def test_identical_constant_args(self): x = MyVariable("x") y = Constant(MyType(), 2, name="y") z = Constant(MyType(), 2, name="z") with config.change_flags(compute_test_value="off"): e1 = op1(y, z) g = FunctionGraph([x, y, z], [e1]) MergeOptimizer().optimize(g) strg = str(g) assert strg == "FunctionGraph(Op1(y, y))" or strg == "FunctionGraph(Op1(z, z))"
def test_constant_merging(self): x = MyVariable("x") y = Constant(MyType(), 2, name="y") z = Constant(MyType(), 2, name="z") e = op1(op2(x, y), op2(x, y), op2(x, z)) g = FunctionGraph([x, y, z], [e]) MergeOptimizer().optimize(g) strg = str(g) assert (strg == "FunctionGraph(Op1(*1 -> Op2(x, y), *1, *1))" or strg == "FunctionGraph(Op1(*1 -> Op2(x, z), *1, *1))")
def test_constant_merging(self): x = MyVariable("x") y = Constant(MyType(), 2, name="y") z = Constant(MyType(), 2, name="z") e = op1(op2(x, y), op2(x, y), op2(x, z)) g = FunctionGraph([x, y, z], [e], clone=False) MergeOptimizer().optimize(g) out_var = g.outputs[0] var_1, var_2, var_3 = out_var.owner.inputs assert var_1 is var_2 assert var_2 is var_3
def test_identical_constant_args(self): x = MyVariable("x") y = Constant(MyType(), 2, name="y") z = Constant(MyType(), 2, name="z") e1 = op1(y, z) g = FunctionGraph([x, y, z], [e1], clone=False) MergeOptimizer().optimize(g) assert g.outputs[0].owner.op == op1 input_1 = g.outputs[0].owner.inputs[0] assert input_1 is g.outputs[0].owner.inputs[1]
def test_merge_noinput(self): # Check that identical Apply nodes without inputs will be merged x = NoInputOp(param=0)() y = NoInputOp(param=0)() z = NoInputOp(param=1)() fg = FunctionGraph([], [x, y, z], clone=False) MergeOptimizer().optimize(fg) assert fg.outputs[0] is fg.outputs[1] assert fg.outputs[0] is not fg.outputs[2]
def test_merge_noinput(self): # Check that identical Apply nodes without inputs will be merged x = NoInputOp(param=0)() y = NoInputOp(param=0)() z = NoInputOp(param=1)() fg = FunctionGraph([], [x, y, z]) MergeOptimizer().optimize(fg) no_input_ops = [ n for n in fg.apply_nodes if isinstance(n.op, NoInputOp) ] assert len(no_input_ops) == 2, fg.apply_nodes
def test_multiple_merges(self): x, y, z = inputs() e1 = op1(x, y) e2 = op2(op3(x), y, z) e = op1(e1, op4(e2, e1), op1(e2)) g = FunctionGraph([x, y, z], [e]) MergeOptimizer().optimize(g) strg = str(g) # note: graph.as_string can only produce the following two possibilities, but if # the implementation was to change there are 6 other acceptable answers. assert ( strg == "FunctionGraph(Op1(*1 -> Op1(x, y), Op4(*2 -> Op2(Op3(x), y, z), *1), Op1(*2)))" or strg == "FunctionGraph(Op1(*2 -> Op1(x, y), Op4(*1 -> Op2(Op3(x), y, z), *2), Op1(*1)))" )
def test_one_assert_merge(self): # Merge two nodes, one has assert, the other not. x1 = matrix("x1") x2 = matrix("x2") e = dot(x1, x2) + dot(assert_op(x1, (x1 > x2).all()), x2) g = FunctionGraph([x1, x2], [e], clone=False) MergeOptimizer().optimize(g) assert g.outputs[0].owner.op == add add_inputs = g.outputs[0].owner.inputs assert isinstance(add_inputs[0].owner.op, Dot) # Confirm that the `Assert`s are correct assert_var = add_inputs[0].owner.inputs[0] assert_ref = assert_op(x1, (x1 > x2).all()) assert equal_computations([assert_var], [assert_ref]) # Confirm the merge assert add_inputs[0] is add_inputs[1]
def test_both_assert_merge_identical(self): """Merge two nodes, both have `Assert`s on the same node with the same conditions.""" x1 = matrix("x1") x2 = matrix("x2") e = dot(assert_op(x1, (x1 > x2).all()), x2) + dot( assert_op(x1, (x1 > x2).all()), x2) g = FunctionGraph([x1, x2], [e], clone=False) MergeOptimizer().optimize(g) assert g.outputs[0].owner.op == add add_inputs = g.outputs[0].owner.inputs assert isinstance(add_inputs[0].owner.op, Dot) # Confirm that the `Assert`s are correct assert_var = add_inputs[0].owner.inputs[0] assert_ref = assert_op(x1, (x1 > x2).all()) assert equal_computations([assert_var], [assert_ref]) # Confirm the merge assert add_inputs[0] is add_inputs[1]
def test_both_assert_merge_1(self): # Merge two nodes, both have assert on the same node # with different conditions. x1 = matrix("x1") x2 = matrix("x2") x3 = matrix("x3") e = dot(assert_op(x1, (x1 > x3).all()), x2) + dot( assert_op(x1, (x1 > x2).all()), x2) g = FunctionGraph([x1, x2, x3], [e]) MergeOptimizer().optimize(g) strg = aesara.printing.debugprint(g, file="str") strref1 = """Elemwise{add,no_inplace} [id A] '' 6 |dot [id B] '' 5 | |Assert{msg='Aesara Assert failed!'} [id C] '' 4 | | |x1 [id D] | | |All [id E] '' 3 | | | |Elemwise{gt,no_inplace} [id F] '' 1 | | | |x1 [id D] | | | |x3 [id G] | | |All [id H] '' 2 | | |Elemwise{gt,no_inplace} [id I] '' 0 | | |x1 [id D] | | |x2 [id J] | |x2 [id J] |dot [id B] '' 5 """ strref2 = """Elemwise{add,no_inplace} [id A] '' 6 |dot [id B] '' 5 | |Assert{msg='Aesara Assert failed!'} [id C] '' 4 | | |x1 [id D] | | |All [id E] '' 3 | | | |Elemwise{gt,no_inplace} [id F] '' 1 | | | |x1 [id D] | | | |x2 [id G] | | |All [id H] '' 2 | | |Elemwise{gt,no_inplace} [id I] '' 0 | | |x1 [id D] | | |x3 [id J] | |x2 [id G] |dot [id B] '' 5 """ # print(strg) assert strg == strref1 or strg == strref2, (strg, strref1, strref2)
def test_one_assert_merge(self): # Merge two nodes, one has assert, the other not. x1 = matrix("x1") x2 = matrix("x2") e = dot(x1, x2) + dot(assert_op(x1, (x1 > x2).all()), x2) g = FunctionGraph([x1, x2], [e]) MergeOptimizer().optimize(g) strg = aesara.printing.debugprint(g, file="str") strref = """Elemwise{add,no_inplace} [id A] '' 4 |dot [id B] '' 3 | |Assert{msg='Aesara Assert failed!'} [id C] '' 2 | | |x1 [id D] | | |All [id E] '' 1 | | |Elemwise{gt,no_inplace} [id F] '' 0 | | |x1 [id D] | | |x2 [id G] | |x2 [id G] |dot [id B] '' 3 """ assert strg == strref, (strg, strref)
def test_both_assert_merge_2_reverse(self): # Test case "test_both_assert_merge_2" but in reverse order x1 = matrix("x1") x2 = matrix("x2") x3 = matrix("x3") e = dot(x1, assert_op(x2, (x2 > x3).all())) + dot( assert_op(x1, (x1 > x3).all()), x2) g = FunctionGraph([x1, x2, x3], [e], clone=False) MergeOptimizer().optimize(g) assert g.outputs[0].owner.op == add add_inputs = g.outputs[0].owner.inputs assert isinstance(add_inputs[0].owner.op, Dot) # Confirm that the `Assert`s are correct assert_var_1, assert_var_2 = add_inputs[0].owner.inputs assert_ref_1 = assert_op(x2, (x2 > x3).all()) assert equal_computations([assert_var_1], [assert_ref_1]) assert_ref_2 = assert_op(x1, (x1 > x3).all()) assert equal_computations([assert_var_2], [assert_ref_2]) # Confirm the merge assert add_inputs[0] is add_inputs[1]
def is_same_graph_with_merge(var1, var2, givens=None): """ Merge-based implementation of `aesara.graph.basic.is_same_graph`. See help on `aesara.graph.basic.is_same_graph` for additional documentation. """ from aesara.graph.opt import MergeOptimizer if givens is None: givens = {} # Copy variables since the MergeOptimizer will modify them. copied = copy.deepcopy([var1, var2, givens]) vars = copied[0:2] givens = copied[2] # Create FunctionGraph. inputs = list(graph_inputs(vars)) # The clone isn't needed as we did a deepcopy and we cloning will # break the mapping in givens. fgraph = aesara.graph.fg.FunctionGraph(inputs, vars, clone=False) # Perform Variable substitution. for to_replace, replace_by in givens.items(): fgraph.replace(to_replace, replace_by) # Perform merge optimization. MergeOptimizer().optimize(fgraph) # When two variables perform the same computations, they will have the same # owner in the optimized graph. # We need to be careful with the special case where the owner is None, # which happens when the graph is made of a single Variable. # We also need to make sure we replace a Variable if it is present in # `givens`. vars_replaced = [givens.get(v, v) for v in fgraph.outputs] o1, o2 = [v.owner for v in vars_replaced] if o1 is None and o2 is None: # Comparing two single-Variable graphs: they are equal if they are # the same Variable. return vars_replaced[0] == vars_replaced[1] else: return o1 is o2
def test_both_assert_merge_identical(self): # Merge two nodes, both have assert on the same node # with the same conditions. x1 = matrix("x1") x2 = matrix("x2") e = dot(assert_op(x1, (x1 > x2).all()), x2) + dot( assert_op(x1, (x1 > x2).all()), x2) g = FunctionGraph([x1, x2], [e]) MergeOptimizer().optimize(g) strg = aesara.printing.debugprint(g, file="str") strref = """Elemwise{add,no_inplace} [id A] '' 4 |dot [id B] '' 3 | |Assert{msg='Aesara Assert failed!'} [id C] '' 2 | | |x1 [id D] | | |All [id E] '' 1 | | |Elemwise{gt,no_inplace} [id F] '' 0 | | |x1 [id D] | | |x2 [id G] | |x2 [id G] |dot [id B] '' 3 """ # print(strg) assert strg == strref, (strg, strref)
Toss it into the optimization pipeline to see the state of things at any given point. """ def __init__(self, header): self.header = header def apply(self, fgraph): import aesara.printing print("PrintCurrentFunctionGraph:", self.header) aesara.printing.debugprint(fgraph.outputs) optdb = SequenceDB() optdb.register("merge1", MergeOptimizer(), 0, "fast_run", "fast_compile", "merge") # After scan1 opt at 0.5 and before ShapeOpt at 1 # This should only remove nodes. # The opt should not do anything that need shape inference. # New nodes that don't have infer_shape need that the original node # also don't have infer_shape local_useless = LocalGroupDB(apply_all_opts=True, profile=True) optdb.register( "useless", TopoDB(local_useless, failure_callback=NavigatorOptimizer.warn_inplace), 0.6, "fast_run", "fast_compile", )
def test_straightforward(self): x, y, z = inputs() e = op1(op2(x, y), op2(x, y), op2(x, z)) g = FunctionGraph([x, y, z], [e]) MergeOptimizer().optimize(g) assert str(g) == "FunctionGraph(Op1(*1 -> Op2(x, y), *1, Op2(x, z)))"
def test_deep_merge(self): x, y, z = inputs() e = op1(op3(op2(x, y), z), op4(op3(op2(x, y), z))) g = FunctionGraph([x, y, z], [e]) MergeOptimizer().optimize(g) assert str(g) == "FunctionGraph(Op1(*1 -> Op3(Op2(x, y), z), Op4(*1)))"
def test_no_merge(self): x, y, z = inputs() e = op1(op3(op2(x, y)), op3(op2(y, x))) g = FunctionGraph([x, y, z], [e]) MergeOptimizer().optimize(g) assert str(g) == "FunctionGraph(Op1(Op3(Op2(x, y)), Op3(Op2(y, x))))"
def test_no_merge(self): x, y, z = inputs() e = op1(op3(op2(x, y)), op3(op2(y, x))) g = FunctionGraph([x, y, z], [e]) g.attach_feature(AssertNoChanges()) MergeOptimizer().optimize(g)
""" def __init__(self, header): self.header = header def apply(self, fgraph): import aesara.printing print("PrintCurrentFunctionGraph:", self.header) aesara.printing.debugprint(fgraph.outputs) optdb = SequenceDB() optdb.register( "merge1", MergeOptimizer(), "fast_run", "fast_compile", "merge", position=0 ) # After scan1 opt at 0.5 and before ShapeOpt at 1 # This should only remove nodes. # The opt should not do anything that need shape inference. # New nodes that don't have infer_shape need that the original node # also don't have infer_shape local_useless = LocalGroupDB(apply_all_opts=True, profile=True) optdb.register( "useless", TopoDB(local_useless, failure_callback=NavigatorOptimizer.warn_inplace), "fast_run", "fast_compile", position=0.6, )