def test_callgraph_construct(): mod = tvm.IRModule({}) x = relay.var("x", shape=(2, 3)) y = relay.var("y", shape=(2, 3)) mod["g1"] = relay.Function([x, y], x + y) call_graph = relay.CallGraph(mod) assert "g1" in str(call_graph) assert relay.alpha_equal(mod, call_graph.module)
def test_extern_ccompiler_default_ops(): def expected(): mod = tvm.IRModule() x = relay.var("x", shape=(8, 8)) y = relay.var("y", shape=(8, 8)) x0 = relay.var("x0", shape=(8, 8)) y0 = relay.var("y0", shape=(8, 8)) add = x0 + y0 # Function that uses C compiler func = relay.Function([x0, y0], add) func = func.with_attr("Primitive", tvm.tir.IntImm("int32", 1)) func = func.with_attr("Inline", tvm.tir.IntImm("int32", 1)) func = func.with_attr("Compiler", tvm.tir.StringImm("ccompiler")) func = func.with_attr("ExternalSymbol", tvm.tir.StringImm("ccompiler_0")) glb_0 = relay.GlobalVar("ccompiler_0") mod[glb_0] = func add_call = relay.Call(glb_0, [x, y]) # Function that uses default compiler. Ops are fused in this function. p0 = relay.var("p0", shape=(8, 8)) log = relay.log(p0) exp = relay.exp(p0) concat = relay.concatenate([log, exp], axis=0) fused_func = relay.Function([p0], concat) fused_func = fused_func.with_attr("Primitive", tvm.tir.IntImm("int32", 1)) fused_call = relay.Call(fused_func, [add_call]) main = relay.Function([x, y], fused_call) mod["main"] = main return mod x = relay.var("x", shape=(8, 8)) y = relay.var("y", shape=(8, 8)) add = x + y log = relay.log(add) exp = relay.exp(add) concat = relay.concatenate([log, exp], axis=0) f = relay.Function([x, y], concat) mod = tvm.IRModule() mod["main"] = f mod = WhiteListAnnotator(["add", "subtract", "multiply"], "ccompiler")(mod) mod = transform.PartitionGraph()(mod) fused_mod = transform.FuseOps(2)(mod) expected_mod = expected() assert relay.alpha_equal(fused_mod, expected_mod) x_data = np.random.rand(8, 8).astype('float32') y_data = np.random.rand(8, 8).astype('float32') np_add = x_data + y_data res = np.concatenate([np.log(np_add), np.exp(np_add)]) check_result(mod, {"x": x_data, "y": y_data}, (16, 8), res)
def test_call_globalvar_without_args(): def get_mod(): mod = tvm.IRModule({}) fn1 = relay.Function([], relay.const(1)) fn2 = relay.Function([], relay.const(2)) g1 = relay.GlobalVar('g1') g2 = relay.GlobalVar('g2') mod[g1] = fn1 mod[g2] = fn2 p = relay.var('p', 'bool') mod['main'] = relay.Function([p], relay.Call(relay.If(p, g1, g2), [])) return mod mod = get_mod() ref_mod = get_mod() mod = relay.transform.RemoveUnusedFunctions()(mod) assert relay.alpha_equal(mod, ref_mod)
def test_constant_propagation(): ones = np.ones(shape=(8, 8), dtype="float32") def expected(): mod = tvm.IRModule() x = relay.const(ones) y = relay.var("y", shape=(8, 8)) x0 = relay.const(ones) y0 = relay.var("y0", shape=(8, 8)) add = x0 + y0 # Function that uses C compiler func = relay.Function([y0], add) func = func.with_attr("Primitive", tvm.tir.IntImm("int32", 1)) func = func.with_attr("Inline", tvm.tir.IntImm("int32", 1)) func = func.with_attr("Compiler", tvm.tir.StringImm("ccompiler")) func = func.with_attr("ExternalSymbol", tvm.tir.StringImm("ccompiler_0")) glb_0 = relay.GlobalVar("ccompiler_0") mod[glb_0] = func add_call = relay.Call(glb_0, [y]) log = relay.log(add_call) main = relay.Function([y], log) mod["main"] = main return mod x = relay.var("x", shape=(8, 8)) y = relay.var("y", shape=(8, 8)) add = x + y log = relay.log(add) f = relay.Function([x, y], log) f = relay.build_module.bind_params_by_name(f, {"x": tvm.nd.array(ones)}) mod = tvm.IRModule() mod["main"] = f mod = WhiteListAnnotator(["add"], "ccompiler")(mod) mod = transform.PartitionGraph()(mod) expected_mod = expected() assert relay.alpha_equal(mod, expected_mod) y_data = np.random.rand(8, 8).astype('float32') np_add = ones + y_data check_result(mod, {"y": y_data}, (8, 8), np.log(np_add))
def test_function_class_pass(): @relay.transform.function_pass(opt_level=1) class TestReplaceFunc: """Simple test function to replace one argument to another.""" def __init__(self, new_func): self.new_func = new_func def transform_function(self, func, mod, ctx): return self.new_func x = relay.var("x", shape=(10, 20)) f1 = relay.Function([x], x) f2 = relay.Function([x], relay.log(x)) fpass = TestReplaceFunc(f1) assert fpass.info.opt_level == 1 assert fpass.info.name == "TestReplaceFunc" mod = tvm.IRModule.from_expr(f2) mod = fpass(mod) # wrap in expr mod2 = tvm.IRModule.from_expr(f1) assert relay.alpha_equal(mod["main"], mod2["main"])
def test_extern_dnnl(): if not tvm.get_global_func("relay.ext.dnnl", True): print("skip because DNNL codegen is not available") return dtype = 'float32' ishape = (1, 32, 14, 14) w1shape = (32, 1, 3, 3) def expected(): data0 = relay.var("data", shape=(ishape), dtype=dtype) input0 = relay.var("input0", shape=(w1shape), dtype=dtype) input1 = relay.var("input1", shape=(w1shape), dtype=dtype) depthwise_conv2d_1 = relay.nn.conv2d(data0, input0, kernel_size=(3, 3), padding=(1, 1), groups=32) depthwise_conv2d_2 = relay.nn.conv2d(depthwise_conv2d_1, input1, kernel_size=(3, 3), padding=(1, 1), groups=32) out = relay.add(depthwise_conv2d_1, depthwise_conv2d_2) func = relay.Function([data0, input0, input1], out) func = func.with_attr("Primitive", tvm.tir.IntImm("int32", 1)) func = func.with_attr("Inline", tvm.tir.IntImm("int32", 1)) func = func.with_attr("Compiler", tvm.tir.StringImm("dnnl")) func = func.with_attr("ExternalSymbol", tvm.tir.StringImm("dnnl_0")) glb_var = relay.GlobalVar("dnnl_0") mod = tvm.IRModule() mod[glb_var] = func data = relay.var("data", shape=(ishape), dtype=dtype) weight = relay.var("input", shape=(w1shape), dtype=dtype) main_f = relay.Function([data, weight], glb_var(data, weight, weight)) mod["main"] = main_f return mod def get_func(): data = relay.var("data", shape=(ishape), dtype=dtype) weight1 = relay.var("weight1", shape=(w1shape), dtype=dtype) depthwise_conv2d_1 = relay.nn.conv2d(data, weight1, kernel_size=(3, 3), padding=(1, 1), groups=32) depthwise_conv2d_2 = relay.nn.conv2d(depthwise_conv2d_1, weight1, kernel_size=(3, 3), padding=(1, 1), groups=32) out = relay.add(depthwise_conv2d_1, depthwise_conv2d_2) return relay.Function([data, weight1], out) mod = tvm.IRModule() mod["main"] = WholeGraphAnnotator("dnnl").visit(get_func()) mod = transform.PartitionGraph()(mod) assert relay.alpha_equal(mod, expected()) ref_mod = tvm.IRModule() ref_mod["main"] = get_func() i_data = np.random.uniform(0, 1, ishape).astype(dtype) w1_data = np.random.uniform(0, 1, w1shape).astype(dtype) ref_ex = relay.create_executor("graph", mod=ref_mod, ctx=tvm.cpu()) ref_res = ref_ex.evaluate()(i_data, w1_data) check_result(mod, { "data": i_data, "weight1": w1_data }, (1, 32, 14, 14), ref_res.asnumpy(), tol=1e-5)