def test_stochasticoptimization(): # this optimization alternates between triggering and not triggering. last_time_replaced = [False] @local_optimizer([add]) def insert_broken_add_sometimes(fgraph, node): if node.op == add: last_time_replaced[0] = not last_time_replaced[0] if last_time_replaced[0]: return [off_by_half(*node.inputs)] return False edb = EquilibriumDB() edb.register("insert_broken_add_sometimes", insert_broken_add_sometimes, "all") opt = edb.query("+all") a = dvector() b = dvector() with pytest.raises(StochasticOrder): aesara.function( [a, b], add(a, b), mode=DebugMode( optimizer=opt, check_c_code=True, stability_patience=max(2, config.DebugMode__patience), ), )
def test_badoptimization_opt_err(): # This variant of test_badoptimization() replace the working code # with a new apply node that will raise an error. @local_optimizer([add]) def insert_bigger_b_add(fgraph, node): if node.op == add: inputs = list(node.inputs) if inputs[-1].owner is None: inputs[-1] = aet.concatenate((inputs[-1], inputs[-1])) return [node.op(*inputs)] return False @local_optimizer([add]) def insert_bad_dtype(fgraph, node): if node.op == add: inputs = list(node.inputs) if inputs[-1].owner is None: return [node.outputs[0].astype("float32")] return False edb = EquilibriumDB() edb.register("insert_bigger_b_add", insert_bigger_b_add, "all") opt = edb.query("+all") edb2 = EquilibriumDB() edb2.register("insert_bad_dtype", insert_bad_dtype, "all") opt2 = edb2.query("+all") a = dvector() b = dvector() f = aesara.function([a, b], a + b, mode=DebugMode(optimizer=opt)) with pytest.raises(ValueError, match=r"insert_bigger_b_add"): f( [1.0, 2.0, 3.0], [2, 3, 4], ) # Test that opt that do an illegal change still get the error from graph. with pytest.raises(TypeError) as einfo: with config.change_flags(on_opt_error="raise"): f2 = aesara.function( [a, b], a + b, mode=DebugMode(optimizer=opt2, stability_patience=1), ) f2( [1.0, 2.0, 3.0], [2, 3, 4], ) # Test that we can reraise the error with an extended message with pytest.raises(TypeError): e = einfo.value new_e = e.__class__("TTT" + str(e)) exc_type, exc_value, exc_trace = sys.exc_info() exc_value = new_e raise exc_value.with_traceback(exc_trace)
def test_badoptimization(): @local_optimizer([add]) def insert_broken_add(fgraph, node): if node.op == add: return [off_by_half(*node.inputs)] return False edb = EquilibriumDB() edb.register("insert_broken_add", insert_broken_add, "all") opt = edb.query("+all") a = dvector() b = dvector() f = aesara.function([a, b], a + b, mode=DebugMode(optimizer=opt)) with pytest.raises(BadOptimization) as einfo: f( [1.0, 2.0, 3.0], [2, 3, 4], ) assert str(einfo.value.reason) == "insert_broken_add"
local_useless = LocalGroupDB(apply_all_opts=True, profile=True) optdb.register( "useless", TopoDB(local_useless, failure_callback=NavigatorOptimizer.warn_inplace), 0.6, "fast_run", "fast_compile", ) optdb.register("merge1.1", MergeOptimizer(), 0.65, "fast_run", "fast_compile", "merge") # rearranges elemwise expressions optdb.register( "canonicalize", EquilibriumDB(ignore_newtrees=False), 1, "fast_run", "fast_compile", "canonicalize_db", ) # Register in the canonizer Equilibrium as a clean up opt the merge opt. # Without this, as the equilibrium have ignore_newtrees=False, we # won't merge all nodes if it is set as a global optimizer with # final_opt=True. # We need a new instance of MergeOptimizer to don't have its name # changed by other usage of it. optdb["canonicalize"].register("merge", MergeOptimizer(), "fast_run",
new_repl = opt.transform(fgraph, op, context_name, inputs, outputs) opt_finish = time.time() if self.profile: self.time_opts[opt] += opt_start - opt_finish self.process_count[opt] += 1 if not new_repl: continue if self.profile: self.node_created[opt] += len( list(applys_between(fgraph.variables, new_repl))) self.applied_true[opt] += 1 return new_repl gpu_optimizer = EquilibriumDB() gpu_cut_copies = EquilibriumDB() # Not used for an EquilibriumOptimizer. It has the "tracks" that we need for GraphToGPUDB. gpu_optimizer2 = EquilibriumDB() gpu_seqopt = SequenceDB() # do not add 'fast_run' to these two as this would always enable gpuarray mode optdb.register( "gpuarray_opt", gpu_seqopt, optdb.__position__.get("add_destroy_handler", 49.5) - 1, "gpuarray", )
def test_EquilibriumDB(self): eq_db = EquilibriumDB() with pytest.raises(ValueError, match=r"`final_opt` and.*"): eq_db.register("d", TestOpt(), final_opt=True, cleanup=True)
optdb.register( "useless", TopoDB(local_useless, failure_callback=NavigatorOptimizer.warn_inplace), "fast_run", "fast_compile", position=0.6, ) optdb.register( "merge1.1", MergeOptimizer(), "fast_run", "fast_compile", "merge", position=0.65 ) # rearranges elemwise expressions optdb.register( "canonicalize", EquilibriumDB(ignore_newtrees=False), "fast_run", "fast_compile", "canonicalize_db", position=1, ) # Register in the canonizer Equilibrium as a clean up opt the merge opt. # Without this, as the equilibrium have ignore_newtrees=False, we # won't merge all nodes if it is set as a global optimizer with # final_opt=True. # We need a new instance of MergeOptimizer to don't have its name # changed by other usage of it. optdb["canonicalize"].register( "merge", MergeOptimizer(), "fast_run", "fast_compile", cleanup=True )