def test_explicit_bound(): x = relay.const(1) y = op.add(x, x) z = op.add(y, y) f = relay.Function([], op.add(z, z)) assert not Feature.fLet in detect_feature(f) anf = transform.OptimizeOnExpr(f, transform.ToANormalForm()) assert Feature.fLet in detect_feature(anf) check_eval(f(), 8.0) check_eval(anf(), 8.0)
def test_explicit_bound(): x = relay.const(1) y = op.add(x, x) z = op.add(y, y) f = relay.Function([], op.add(z, z)) assert not Feature.fLet in detect_feature(f) anf = to_a_normal_form(f) assert Feature.fLet in detect_feature(anf) check_eval(f(), 8.0) check_eval(anf(), 8.0)
def test_implicit_share(): x = relay.Var('x') y = relay.Var('y') z = relay.Var('z') body = relay.Let(z, op.add(y, y), op.add(z, z)) body = relay.Let(y, op.add(x, x), body) f = relay.Function([], relay.Let(x, relay.const(1), body)) g = transform.OptimizeOnExpr(f, transform.ToGraphNormalForm()) assert Feature.fLet in detect_feature(f) assert not Feature.fLet in detect_feature(g) check_eval(f, [], 8.0) check_eval(g, [], 8.0)
def test_round_trip(): x = relay.Var('x') y = relay.Var('y') z = relay.Var('z') body = relay.Let(z, op.add(y, y), op.add(z, z)) body = relay.Let(y, op.add(x, x), body) f = relay.Function([], relay.Let(x, relay.const(1), body)) g = to_graph_normal_form(f) h = to_a_normal_form(g) assert Feature.fLet in detect_feature(f) assert not Feature.fLet in detect_feature(g) check_eval(f, [], 8.0) check_eval(g, [], 8.0) check_eval(h, [], 8.0)
def test_prelude(): p = Prelude() feats = detect_feature(p.mod) assert feats == set([ Feature.fVar, Feature.fGlobalVar, Feature.fConstant, Feature.fTuple, Feature.fTupleGetItem, Feature.fFunction, Feature.fOp, Feature.fCall, Feature.fLet, Feature.fIf, Feature.fConstructor, Feature.fMatch ])
def test_ad(): shape = (10, 10) dtype = 'float32' t = relay.TensorType(shape, dtype) x = relay.var("x", t) func = relay.Function([x], x + x) back_func = relay.ir_pass.infer_type(gradient(func)) feats = detect_feature(back_func) assert feats == set([ Feature.fVar, Feature.fTuple, Feature.fTupleGetItem, Feature.fFunction, Feature.fOp, Feature.fCall, Feature.fLet, Feature.fRefCreate, Feature.fRefRead, Feature.fRefWrite ])
def test_nat_add(): mod = relay.Module() p = Prelude(mod) add_nat_definitions(p) nat = p.nat add = p.add s = p.s z = p.z ctx = tvm.context("llvm", 0) intrp = create_executor(mod=mod, ctx=ctx, target="llvm") assert mod[add].checked_type == relay.FuncType([nat(), nat()], nat()) assert count(p, intrp.evaluate(add(s(z()), s(z())))) == 2 assert count(p, intrp.evaluate(to_a_normal_form(add(s(z()), s(z())), mod))) == 2 assert Feature.fLet in detect_feature(mod[add])
def test_nat_add(): mod = relay.Module() p = Prelude(mod) add_nat_definitions(p) nat = p.nat add = p.add s = p.s z = p.z ctx = tvm.context("llvm", 0) intrp = create_executor(mod=mod, ctx=ctx, target="llvm") assert mod[add].checked_type == relay.FuncType([nat(), nat()], nat()) assert count(p, intrp.evaluate(add(s(z()), s(z())))) == 2 expr = add(s(z()), s(z())) f = relay.GlobalVar("f") mod[f] = relay.Function([], expr) mod = transform.ToANormalForm()(mod) expr = mod["f"] assert count(p, intrp.evaluate(expr.body)) == 2 assert Feature.fLet in detect_feature(mod[add])