示例#1
0
def test_bound_vars():
    x = relay.Var("x")
    y = relay.Var("y")
    z = relay.Var("z")
    a = relay.Var("a")

    f1 = relay.Function([x, y, z], relay.Let(a, x, relay.Tuple([])))
    assert_vars_match(bound_vars(f1), [x, y, z, a])

    tup = relay.Tuple([x, y, z, a])
    assert len(bound_vars(tup)) == 0

    f2 = relay.Function([x, y], relay.Tuple([x, y, z, a]))
    assert_vars_match(bound_vars(f2), [x, y])
 def expected():
     x = relay.var("x", shape=(), dtype="bool")
     y = relay.var("y", shape=(), dtype="float32")
     cond_t = relay.const(True)
     cond_f = relay.const(False)
     one = relay.const(1, dtype="float32")
     two = relay.const(2, dtype="float32")
     three = relay.const(3, dtype="float32")
     y2 = relay.var("y2")
     true_branch = relay.If(cond_t, y2, relay.add(three, y2))
     true_branch = relay.Let(y2, relay.add(y, y), true_branch)
     false_branch = relay.If(cond_f, two, one)
     body = relay.If(x, true_branch, false_branch)
     return body
示例#3
0
def test_let():
    lv = relay.Var('x')
    ty = None
    arr = tvm.nd.array(10)
    value = relay.Constant(arr)
    # I would prefer that the order of arguments
    # matches syntax let x: t = v in b
    let = relay.Let(lv, value, lv)
    assert let.var == lv
    assert let.value == value
    assert let.body == lv
    assert let.span == None
    str(let)
    check_json_roundtrip(let)
def test_ref():
    shape = (10, 10)
    dtype = 'float32'
    t = relay.TensorType(shape, dtype)
    x = relay.var("x", t)
    r = relay.Var("r")
    u = relay.Var("u")
    body = relay.RefRead(r)
    body = relay.Let(u, relay.RefWrite(r,
                                       relay.RefRead(r) + relay.RefRead(r)),
                     body)
    body = relay.Let(r, relay.RefCreate(x), body)
    func = relay.Function([x], body)
    func = run_infer_type(func)
    back_func = run_infer_type(gradient(func))
    assert back_func.checked_type == relay.FuncType(
        [t], relay.TupleType([t, relay.TupleType([t])]))
    x_nd = rand(dtype, *shape)
    ex = create_executor()
    forward, (grad_x, ) = ex.evaluate(back_func)(x_nd)
    tvm.testing.assert_allclose(forward.asnumpy(), 2 * x_nd.asnumpy())
    tvm.testing.assert_allclose(grad_x.asnumpy(),
                                2 * np.ones_like(grad_x.asnumpy()))
示例#5
0
def test_order():
    z = relay.const(3)
    y = relay.const(2)
    x = relay.const(1)
    val = x + y * z
    check_eval(val, 7.0)
    anf = transform.OptimizeOnExpr(
        val, [transform.ToANormalForm(),
              transform.InferType()])
    a = relay.Var('a', relay.IncompleteType())
    b = relay.Var('b', relay.IncompleteType())
    c = relay.Var('c', relay.IncompleteType())
    d = relay.Var('d', relay.IncompleteType())
    e = relay.Var('e', relay.IncompleteType())
    expected_output = e
    expected_output = relay.Let(e, a + d, expected_output)
    expected_output = relay.Let(d, b * c, expected_output)
    expected_output = relay.Let(c, z, expected_output)
    expected_output = relay.Let(b, y, expected_output)
    expected_output = relay.Let(a, x, expected_output)
    expected_output = transform.OptimizeOnExpr(expected_output,
                                               transform.InferType())
    assert alpha_equal(anf, expected_output)
示例#6
0
def test_match():
    # pair each match keyword with whether it specifies a complete match or not
    match_keywords = [("match", True), ("match?", False)]
    for (match_keyword, is_complete) in match_keywords:
        mod = tvm.IRModule()

        list_var = relay.GlobalTypeVar("List")
        typ_var = relay.TypeVar("A")
        cons_constructor = relay.Constructor(
            "Cons", [typ_var, list_var(typ_var)], list_var)
        nil_constructor = relay.Constructor("Nil", [], list_var)
        list_def = relay.TypeData(list_var, [typ_var],
                                  [cons_constructor, nil_constructor])
        mod[list_var] = list_def

        length_var = relay.GlobalVar("length")
        typ_var = relay.TypeVar("A")
        input_type = list_var(typ_var)
        input_var = relay.Var("xs", input_type)
        rest_var = relay.Var("rest")
        cons_case = relay.Let(
            relay.var("", type_annotation=None), UNIT,
            relay.add(relay.const(1), relay.Call(length_var, [rest_var])))
        body = relay.Match(input_var, [
            relay.Clause(
                relay.PatternConstructor(
                    cons_constructor,
                    [relay.PatternWildcard(),
                     relay.PatternVar(rest_var)]), cons_case),
            relay.Clause(relay.PatternConstructor(nil_constructor, []),
                         relay.const(0))
        ],
                           complete=is_complete)
        length_func = relay.Function([input_var], body, int32, [typ_var])
        mod[length_var] = length_func

        assert_parse_module_as(
            """
            %s

            def @length[A](%%xs: List[A]) -> int32 {
              %s (%%xs) {
                Cons(_, %%rest : List[A]) => {
                  ();
                  1 + @length(%%rest)
                },
                Nil => 0,
              }
            }
            """ % (LIST_DEFN, match_keyword), mod)
示例#7
0
def test_function_invalidate():
    shape = ()
    dtype = "bool"
    t = relay.TensorType(shape, dtype)
    d = relay.Var("d", t)
    r = relay.Var("r")
    fetch = relay.Function([], relay.RefRead(r))
    fet = relay.Var("fetch")
    fet_obscured = relay.Var("fetch_obscured")
    u = relay.Var("u")
    body = relay.If(d, fet_obscured(), fet_obscured())
    body = relay.Let(u, relay.RefWrite(r, relay.const(1)), body)
    body = relay.Let(fet_obscured, relay.If(d, fet, fet), body)
    body = relay.Let(fet, fetch, body)
    body = relay.Let(r, relay.RefCreate(relay.const(0)), body)
    f = relay.Function([d], body)
    f = infer_type(f)
    pe_f = infer_type(partial_evaluate(f))
    ex = create_executor()
    f_res = ex.evaluate(f)(relay.const(True))
    pe_f_res = ex.evaluate(pe_f)(relay.const(True))
    np.testing.assert_allclose(f_res.asnumpy(), np.ones_like(f_res.asnumpy()))
    np.testing.assert_allclose(pe_f_res.asnumpy(), np.ones_like(pe_f_res.asnumpy()))
示例#8
0
def test_ref_execution_order():
    # we want to have effects execute from left to right
    x = relay.Var('x')
    y = relay.Var('y')
    f = relay.Var('f')
    r = relay.Var('r')

    expr = relay.Let(
        f,
        relay.Function([x, y], x),
        # r = 1
        relay.Let(
            r,
            relay.RefCreate(relay.const(1)),
            relay.Tuple([
                # should be 1
                relay.RefRead(r),
                # set r to 2 and read back
                seq(relay.RefWrite(r, relay.const(2)), relay.RefRead(r)),
                # set r to 3 and read back
                seq(relay.RefWrite(r, relay.const(3)), relay.RefRead(r)),
                # set r to 4 and read as first arg to f
                # set r to 5 and read as second arg to f
                # f should evaluate to 4
                f(seq(relay.RefWrite(r, relay.const(4)), relay.RefRead(r)),
                  seq(relay.RefWrite(r, relay.const(5)), relay.RefRead(r))),
                # read back 5
                relay.RefRead(r)
            ])))

    tup_val = run_as_python(expr)
    assert_adt_len(tup_val, 5)
    assert_tensor_value(tup_val[0], 1)
    assert_tensor_value(tup_val[1], 2)
    assert_tensor_value(tup_val[2], 3)
    assert_tensor_value(tup_val[3], 4)
    assert_tensor_value(tup_val[4], 5)
def test_match_pattern():
    mod = tvm.IRModule()
    box, box_ctor = init_box_adt(mod)
    v = relay.Var("v")
    w = relay.Var("w")
    match = relay.Let(
        v,
        box_ctor(relay.const(1)),
        relay.Match(v, [
            relay.Clause(
                relay.PatternConstructor(box_ctor, [relay.PatternVar(w)]), w)
        ]),
    )
    match_val = run_as_python(match, mod)
    assert_tensor_value(match_val, 1)
示例#10
0
def test_arbitrary_let_nesting():
    # something that is tricky to do in Python but comes naturally in Relay
    mod = tvm.IRModule()
    p = Prelude(mod)
    x = relay.Var("x")
    r = relay.Var("r")
    y = relay.Var("y")
    z = relay.Var("z")
    expr = relay.Tuple([
        relay.Let(x, relay.Tuple([relay.const(1),
                                  relay.const(2)]), relay.TupleGetItem(x, 1)),
        relay.Let(
            r,
            relay.RefCreate(relay.const(1)),
            seq(relay.RefWrite(r, relay.const(3)), relay.RefRead(r)),
        ),
        relay.Let(y, p.id(relay.Let(z, relay.const(4), z)), y),
    ])

    tup_val = run_as_python(expr, mod)
    assert_adt_len(tup_val, 3)
    assert_tensor_value(tup_val[0], 2)
    assert_tensor_value(tup_val[1], 3)
    assert_tensor_value(tup_val[2], 4)
示例#11
0
def test_tuple_type():
    assert alpha_equal(
        relay.fromtext(
        """
        let %_: () = (); ()
        """),
        relay.Let(
            relay.Var("_", relay.TupleType([])),
            UNIT,
            UNIT
        )
    )

    assert alpha_equal(
        relay.fromtext(
        """
        let %_: (int32,) = (0,); ()
        """),
        relay.Let(
            relay.Var("_", relay.TupleType([int32])),
            relay.Tuple([relay.const(0)]),
            UNIT
        )
    )

    assert alpha_equal(
        relay.fromtext(
        """
        let %_: (int32, int32) = (0, 1); ()
        """),
        relay.Let(
            relay.Var("_", relay.TupleType([int32, int32])),
            relay.Tuple([relay.const(0), relay.const(1)]),
            UNIT
        )
    )
示例#12
0
def test_tensor_type():
    assert_parses_as(
        "let %_ : Tensor[(), float32] = (); ()",
        relay.Let(relay.Var("_", relay.TensorType((), "float32")), UNIT, UNIT),
    )

    assert_parses_as(
        "let %_ : Tensor[(1), float32] = (); ()",
        relay.Let(relay.Var("_", relay.TensorType((1, ), "float32")), UNIT,
                  UNIT),
    )

    assert_parses_as(
        "let %_ : Tensor[(1, 1), float32] = (); ()",
        relay.Let(relay.Var("_", relay.TensorType((1, 1), "float32")), UNIT,
                  UNIT),
    )

    assert_parses_as(
        "let %_ : Tensor[(?, 1), float32] = (); ()",
        relay.Let(
            relay.Var("_", relay.TensorType((tvm.tir.Any(), 1), "float32")),
            UNIT, UNIT),
    )
示例#13
0
def test_tuple_alpha_equal():
    v1 = relay.Var("v1")
    v2 = relay.Var("v2")

    # unit value is a valid tuple
    assert alpha_equal(relay.Tuple([]), relay.Tuple([]))

    tup = relay.Tuple([v1, relay.const(2), relay.const(3), relay.Tuple([relay.const(4)])])
    same = relay.Tuple([v1, relay.const(2), relay.const(3), relay.Tuple([relay.const(4)])])

    assert alpha_equal(tup, same)

    # use the eq_map
    let_tup = relay.Let(v1, tup, v1)
    let_mapped = relay.Let(v2, relay.Tuple([v2, relay.const(2), relay.const(3),
                                            relay.Tuple([relay.const(4)])]),
                           v2)
    assert alpha_equal(let_tup, let_mapped)

    more_fields = relay.Tuple([v1, relay.const(2), relay.const(3), relay.Tuple([relay.const(4)]), v2])
    assert not alpha_equal(tup, more_fields)

    fewer_fields = relay.Tuple([v1, relay.const(2), relay.const(3)])
    assert not alpha_equal(tup, fewer_fields)

    different_end = relay.Tuple([v1, relay.const(2), relay.const(3),
                           relay.Tuple([relay.const(5)])])
    assert not alpha_equal(tup, different_end)

    different_start = relay.Tuple([v2, relay.const(2), relay.const(3),
                                 relay.Tuple([relay.const(4)])])
    assert not alpha_equal(tup, different_start)

    longer_at_end = relay.Tuple([v1, relay.const(2), relay.const(3),
                                 relay.Tuple([relay.const(4), relay.const(5)])])
    assert not alpha_equal(tup, longer_at_end)
示例#14
0
def test_nested_match_pattern():
    mod = relay.Module()
    box, box_ctor = init_box_adt(mod)
    v = relay.Var('v')
    w = relay.Var('w')
    match = relay.Let(
        v, box_ctor(box_ctor(relay.const(2))),
        relay.Match(v, [
            relay.Clause(
                relay.PatternConstructor(box_ctor, [
                    relay.PatternConstructor(box_ctor, [relay.PatternVar(w)])
                ]), w)
        ]))
    match_val = run_as_python(match, mod)
    assert_tensor_value(match_val, 2)
示例#15
0
def test_let_if_scope():
    x = relay.var("x", "float32")
    y = relay.var("y", "float32")
    cond = relay.var("cond", "bool")
    sb = relay.ScopeBuilder()
    with sb.if_scope(cond):
        v1 = sb.let("v", relay.const(1, "float32"))
        v2 = sb.let("v", x)
        sb.ret(relay.subtract(v1, v2))
    with sb.else_scope():
        v3 = relay.var("v")
        let2 = relay.Let(v3, y, v3)
        sb.ret(relay.add(let2, let2))
    result = sb.get()
    f = relay.Function([x, y, cond], result)
    print(f.astext())
示例#16
0
def test_all_vars():
    x = relay.Var("x")
    y = relay.Var("y")
    z = relay.Var("z")

    f1 = relay.Function([x, y], z)
    assert_vars_match(all_vars(f1), [x, y, z])

    f2 = relay.Function([x], relay.Let(y, relay.Tuple([]), z))
    assert_vars_match(all_vars(f2), [x, y, z])

    f3 = relay.Function([x], relay.Tuple([y, z]))
    assert_vars_match(all_vars(f3), [x, y, z])

    tup = relay.Tuple([x, y, z])
    assert_vars_match(all_vars(tup), [x, y, z])
示例#17
0
def _test_tuple(mode):
    shape = (10, 10)
    dtype = "float32"
    t = relay.TensorType(shape, dtype)
    x = relay.var("x", t)
    y = relay.var("y", t)
    z = relay.var("z", t)
    if mode == "higher_order":
        tup = relay.Var("tup")
        func = relay.Function(
            [x, y, z],
            relay.Let(
                tup,
                relay.Tuple([x, y, z]),
                relay.TupleGetItem(tup, 0) + relay.TupleGetItem(tup, 1) -
                relay.TupleGetItem(tup, 2),
            ),
        )
    else:
        # first order does not do let.
        tup = relay.Tuple([x, y, z])
        func = relay.Function(
            [x, y, z],
            relay.TupleGetItem(tup, 0) + relay.TupleGetItem(tup, 1) -
            relay.TupleGetItem(tup, 2),
        )
    func = run_infer_type(func)
    back_func = run_infer_type(gradient(func, mode=mode))
    assert back_func.checked_type == relay.FuncType(
        [t, t, t], relay.TupleType([t, relay.TupleType([t, t, t])]))
    x_nd = rand(dtype, *shape)
    y_nd = rand(dtype, *shape)
    z_nd = rand(dtype, *shape)
    x_np = x_nd.asnumpy()
    y_np = y_nd.asnumpy()
    z_np = z_nd.asnumpy()
    expected_forward = x_np + y_np - z_np
    ex = create_executor()
    forward, (grad_x, grad_y, grad_z) = ex.evaluate(back_func)(x_nd, y_nd,
                                                               z_nd)
    tvm.testing.assert_allclose(forward.asnumpy(), expected_forward)
    tvm.testing.assert_allclose(grad_x.asnumpy(),
                                np.ones_like(grad_x.asnumpy()))
    tvm.testing.assert_allclose(grad_y.asnumpy(),
                                np.ones_like(grad_y.asnumpy()))
    tvm.testing.assert_allclose(grad_z.asnumpy(),
                                -1 * np.ones_like(grad_z.asnumpy()))
示例#18
0
def test_match_order():
    mod = relay.Module()
    box, box_ctor = init_box_adt(mod)
    v = relay.Var('v')
    w = relay.Var('w')
    # wildcard pattern goes first
    match = relay.Let(
        v, box_ctor(box_ctor(relay.const(2))),
        relay.Match(v, [
            relay.Clause(relay.PatternWildcard(), relay.const(1)),
            relay.Clause(
                relay.PatternConstructor(box_ctor, [
                    relay.PatternConstructor(box_ctor, [relay.PatternVar(w)])
                ]), w)
        ]))
    match_val = run_as_python(match, mod)
    assert_tensor_value(match_val, 1)
    def before():

        var1 = relay.var("var1", shape=(2,))
        var2 = relay.var("var2", shape=(), dtype="int32")
        var3 = relay.var("var3", shape=(2,))
        cond = relay.less(var2, relay.const(10, dtype="int32"))

        loop = relay.var("while_loop")
        ii = var2 + relay.const(1, dtype="int32")
        ss = var3 + var1
        true_branch = loop(ii, ss)
        ife = relay.If(cond, true_branch, var3)
        func_1 = relay.Function([var2, var3], ife)

        ret = relay.Let(loop, func_1, loop(relay.const(0, dtype="int32"), relay.zeros_like(var1)))
        func_2 = relay.Function([var1], ret)
        mod = tvm.IRModule.from_expr(func_2)
        return mod
 def before(x, conv_weight, in_bias, in_scale, channels):
     args = [x, conv_weight, in_bias]
     x = relay.multiply(x, in_scale)
     x = relay.nn.relu(x)
     x = relay.add(x, in_bias)
     x_var = relay.Var("x_var")
     y1 = relay.nn.conv2d(
         x_var,
         conv_weight,
         channels=channels,
         kernel_size=(3, 3),
         data_layout="NHWC",
         kernel_layout="HWIO",
         padding=(1, 1),
     )
     z = relay.add(y1, x)
     let = relay.Let(x_var, x, z)
     return relay.Function(args, let)
示例#21
0
    def after():
        var1 = relay.var("var1", shape=(2, ))
        var2 = relay.var("var2", shape=(), dtype="int32")
        var3 = relay.var("var3", shape=(2, ))
        var4 = relay.const(10, dtype="int32")

        cb_1 = relay.annotation.compiler_begin(var2, target)
        cb_2 = relay.annotation.compiler_begin(var4, target)

        less_condition = relay.less(cb_1, cb_2)
        ce_1 = relay.annotation.compiler_end(less_condition, target)

        loop = relay.var("while_loop")

        # if condition
        cb_3 = relay.annotation.compiler_begin(var2, target)
        cb_4 = relay.annotation.compiler_begin(relay.const(1, dtype="int32"),
                                               target)
        add_op_1 = relay.add(cb_3, cb_4)
        ce_2 = relay.annotation.compiler_end(add_op_1, target)
        cb_5 = relay.annotation.compiler_begin(ce_2, "default")
        cb_6 = relay.annotation.compiler_begin(var3, target)
        cb_7 = relay.annotation.compiler_begin(var1, target)
        add_op_2 = relay.add(cb_6, cb_7)
        ce_3 = relay.annotation.compiler_end(add_op_2, target)
        cb_8 = relay.annotation.compiler_begin(ce_3, "default")
        true_branch = loop(cb_5, cb_8)  # while loop
        ce_4 = relay.annotation.compiler_end(true_branch, "default")
        if_condition = relay.If(ce_1, ce_4, var3)

        cb_9 = relay.annotation.compiler_begin(relay.const(0, dtype="int32"),
                                               "default")
        cb_10 = relay.annotation.compiler_begin(var1, target)
        zeros_like = relay.zeros_like(cb_10)
        ce_5 = relay.annotation.compiler_end(zeros_like, target)
        cb_11 = relay.annotation.compiler_begin(ce_5, "default")
        while_condition = loop(cb_9, cb_11)
        ce_6 = relay.annotation.compiler_end(while_condition, "default")

        func_1 = relay.Function([var2, var3], if_condition)
        ret = relay.Let(loop, func_1, ce_6)
        func_2 = relay.Function([var1], ret)
        mod = tvm.IRModule.from_expr(func_2)
        return mod
 def expected_if_expr(x):
     """
     free_var %x: float32
     let %v1: float32 = add(%x, 1f /* ty=float32 */) /* ty=float32 */;
     %0 = equal(%x, 2f /* ty=float32 */) /* ty=bool */;
     if (%0) {
       multiply(%v1, 2f /* ty=float32 */) /* ty=float32 */
     } else {
       multiply(%v1, 1f /* ty=float32 */) /* ty=float32 */
     }
     """
     one = relay.const(1, dtype="float32")
     two = relay.const(2, dtype="float32")
     v1 = relay.var("v1")
     v2 = relay.equal(x, two)
     true_branch = relay.multiply(v1, two)
     false_branch = relay.multiply(v1, one)
     body = relay.If(v2, true_branch, false_branch)
     body = relay.Let(v1, relay.add(x, one), body)
     return body
示例#23
0
def test_local_recursion():
    mod = tvm.IRModule()
    p = Prelude(mod)
    _, cons, nil = p.mod.get_type("List")

    v = relay.Var("v")
    h = relay.Var("h")
    t = relay.Var("t")
    f = relay.Var("f")

    # just returns the same list
    let = relay.Let(
        f,
        relay.Function(
            [v],
            relay.Match(
                v,
                [
                    relay.Clause(
                        relay.PatternConstructor(
                            cons, [relay.PatternVar(h),
                                   relay.PatternVar(t)]),
                        cons(h, f(t)),
                    ),
                    relay.Clause(relay.PatternConstructor(nil, []), nil()),
                ],
            ),
        ),
        f(
            cons(relay.const(1),
                 cons(relay.const(2), cons(relay.const(3), nil())))),
    )

    val = run_as_python(let, mod)
    assert_constructor_value(val, cons, 2)
    assert_tensor_value(val.fields[0], 1)
    assert_constructor_value(val.fields[1], cons, 2)
    assert_tensor_value(val.fields[1].fields[0], 2)
    assert_constructor_value(val.fields[1].fields[1], cons, 2)
    assert_tensor_value(val.fields[1].fields[1].fields[0], 3)
    assert_constructor_value(val.fields[1].fields[1].fields[1], nil, 0)
示例#24
0
def test_head_cons():
    mod = relay.Module()
    p = Prelude(mod)
    def hd_impl():
        a = relay.TypeVar("a")
        x = relay.Var("x", p.l(a))
        y = relay.Var("y")
        z = relay.Var("z")
        cons_case = relay.Clause(relay.PatternConstructor(p.cons,
                                                          [relay.PatternVar(y),
                                                           relay.PatternVar(z)]),
                                 y)
        return relay.Function([x], relay.Match(x, [cons_case]), a, [a])
    t = relay.TypeVar("t")
    x = relay.Var("x", t)
    hd = relay.Var("hd")
    body = relay.Let(hd, hd_impl(), hd(p.cons(x, p.nil())))
    f = relay.Function([x], body, None, [t])
    f = infer_type(f, mod=mod)
    res = dcpe(f)
    assert alpha_equal(res, relay.Function([x], x, t, [t]))
示例#25
0
def test_match_effect_exactly_once():
    mod = relay.Module()
    p = Prelude(mod)

    # the list should be of length 1!
    # Unless we mistakenly execute the data clause more than once
    r = relay.Var('r')
    data = seq(relay.RefWrite(r, p.cons(relay.Tuple([]), relay.RefRead(r))), relay.RefRead(r))
    match = relay.Let(
        r, relay.RefCreate(p.nil()),
        relay.Match(data, [
            relay.Clause(relay.PatternConstructor(p.nil, []), relay.const(0)),
            relay.Clause(
                relay.PatternConstructor(
                    p.cons,
                    [relay.PatternWildcard(), relay.PatternConstructor(p.nil, [])]),
                relay.const(1)),
            relay.Clause(relay.PatternWildcard(), relay.const(2))
        ]))

    match_val = run_as_python(match, mod)
    assert_tensor_value(match_val, 1)
示例#26
0
    def convert_func(self, graph):
        """Convert a graph."""
        for p in graph.parameters:
            self.node_map[p] = self.on_parameter(p)

        params = [self.ref(p) for p in graph.parameters]

        seq = []
        for node in toposort(graph.output, NodeVisitor(), in_graph(graph)):
            if node in self.node_map:
                continue
            elif node.is_constant_graph() and node.value.parent is None:
                self.node_map[node] = self.graph_map[node.value]
            else:
                self.node_map[node] = relay.var(f"seq.{self.i}")
                self.i += 1
                seq.append(node)

        out = self.ref(graph.output)

        for op in reversed(seq):
            var = self.node_map[op]
            if op.is_apply():
                val = self.on_apply(op)
            elif op.is_constant_graph():
                val = self.convert_func(op.value)
            elif op.is_constant():
                val = self.on_constant(op)
                # This forces the rebuild of constants every time they
                # are encountered since they may be shared amongst
                # multiple graphs and it causes problems otherwise.
                del self.node_map[op]
            else:
                raise AssertionError(f"Bad node for sequence: {op}")
            out = relay.Let(var, val, out)

        return relay.Function(params,
                              out,
                              ret_type=to_relay_type(graph.output.abstract))
示例#27
0
def test_valid_if():
    cond = relay.var("cond", dtype="bool", shape=())
    shared = relay.var("shared")
    true_branch = shared
    false_branch = relay.add(shared, shared)
    body = relay.If(cond, true_branch, false_branch)
    shared_bound = relay.var("shared_bound", shape=(1, ), dtype="float32")
    body = relay.Let(shared, shared_bound, body)
    """
    The program below uses let binding to control the scope of %shared, which
    follows the basic block normal form.

    free_var %shared_bound: Tensor[(1), float32]
    let %shared = %shared_bound;
    free_var %cond: bool
    if (%cond) {
      %shared
    } else {
      add(%shared, %shared)
    }
    """
    check_basic_block_normal_form(body)
def test_recursive():
    mod = tvm.IRModule()

    x = relay.var('x', shape=(2,))
    i = relay.var('i', shape=(), dtype='int32')
    s = relay.var('s', shape=(2,))
    cond = i < relay.const(10, dtype='int32')

    loop = relay.var('while_loop')
    sb = relay.scope_builder.ScopeBuilder()
    with sb.if_scope(cond):
        ii = i + relay.const(1, dtype='int32')
        ss = s + x
        sb.ret(loop(ii, ss))
    with sb.else_scope():
        sb.ret(s)
    func = relay.Function([i, s], sb.get())

    ret = relay.Let(loop, func, loop(relay.const(0, dtype='int32'), relay.zeros(shape=(2,), dtype='float32')))
    mod["main"] = relay.Function([x], ret)

    new_mod = transform.LambdaLift()(mod)
    assert len(new_mod.functions) == 2
示例#29
0
def test_valid_if2():
    """
    fn (%x: float32) {
      let %v1 = add(%x, 1f);
      %0 = equal(%x, 2f);
      if (%0) {
        multiply(%v1, 2f)
      } else {
        multiply(%v1, 1f)
      }
    }
    """
    x = relay.var("x", shape=(), dtype="float32")
    one = relay.const(1, dtype="float32")
    two = relay.const(2, dtype="float32")
    v1 = relay.var("v1")
    v2 = relay.equal(x, two)
    true_branch = relay.multiply(v1, two)
    false_branch = relay.multiply(v1, one)
    body = relay.If(v2, true_branch, false_branch)
    body = relay.Let(v1, relay.add(x, one), body)
    func = relay.Function([x], body)
    check_basic_block_normal_form(func)
示例#30
0
def seq(*exprs):
    ret = exprs[0]
    for expr in exprs[1:]:
        ret = relay.Let(relay.var('_'), ret, expr)
    return ret