Esempio n. 1
0
def test_match():
    # pair each match keyword with whether it specifies a complete match or not
    match_keywords = [("match", True), ("match?", False)]
    for (match_keyword, is_complete) in match_keywords:
        mod = relay.Module()

        list_var = relay.GlobalTypeVar("List")
        typ_var = relay.TypeVar("A")
        cons_constructor = relay.Constructor(
            "Cons", [typ_var, list_var(typ_var)], list_var)
        nil_constructor = relay.Constructor("Nil", [], list_var)
        list_def = relay.TypeData(list_var, [typ_var],
                                  [cons_constructor, nil_constructor])
        mod[list_var] = list_def

        length_var = relay.GlobalVar("length")
        typ_var = relay.TypeVar("A")
        input_type = list_var(typ_var)
        input_var = relay.Var("xs", input_type)
        rest_var = relay.Var("rest")
        cons_case = relay.Let(
            _, UNIT,
            relay.add(relay.const(1), relay.Call(length_var, [rest_var])))
        body = relay.Match(input_var, [
            relay.Clause(
                relay.PatternConstructor(
                    cons_constructor,
                    [relay.PatternWildcard(),
                     relay.PatternVar(rest_var)]), cons_case),
            relay.Clause(relay.PatternConstructor(nil_constructor, []),
                         relay.const(0))
        ],
                           complete=is_complete)
        length_func = relay.Function([input_var], body, int32, [typ_var])
        mod[length_var] = length_func

        assert parses_as(
            """
            %s

            def @length[A](%%xs: List[A]) -> int32 {
              %s (%%xs) {
                Cons(_, %%rest) => {
                  ();;
                  1 + @length(%%rest)
                },
                Nil => 0,
              }
            }
            """ % (LIST_DEFN, match_keyword), mod)
def test_extern_ccompiler_default_ops():
    def expected():
        x = relay.var("x", shape=(8, 8))
        y = relay.var("y", shape=(8, 8))
        x0 = relay.var("x0", shape=(8, 8))
        y0 = relay.var("y0", shape=(8, 8))
        add = x0 + y0
        # Function that uses C compiler
        func = relay.Function([x0, y0], add)
        func = func.set_attribute("Primitive", tvm.expr.IntImm("int32", 1))
        func = func.set_attribute("Compiler",
                                  tvm.expr.StringImm("ccompiler"))
        func = func.set_attribute("ExternalSymbol",
                                  tvm.expr.StringImm("ccompiler_0"))
        add_call = relay.Call(func, [x, y])
        # Function that uses default compiler. Ops are fused in this function.
        p0 = relay.var("p0", shape=(8, 8))
        log = relay.log(p0)
        exp = relay.exp(p0)
        concat = relay.concatenate([log, exp], axis=0)
        fused_func = relay.Function([p0], concat)
        fused_func = fused_func.set_attribute("Primitive",
                                              tvm.expr.IntImm("int32", 1))
        fused_call = relay.Call(fused_func, [add_call])
        main = relay.Function([x, y], fused_call)
        mod = relay.Module()
        mod["main"] = main
        return mod

    x = relay.var("x", shape=(8, 8))
    y = relay.var("y", shape=(8, 8))
    add = x + y
    log = relay.log(add)
    exp = relay.exp(add)
    concat = relay.concatenate([log, exp], axis=0)
    f = relay.Function([x, y], concat)
    mod = relay.Module()
    mod["main"] = f
    mod = WhiteListAnnotator(["add", "subtract", "multiply"], "ccompiler")(mod)
    mod = transform.PartitionGraph()(mod)

    fused_mod = transform.FuseOps(2)(mod)
    expected_mod = expected()
    assert relay.alpha_equal(fused_mod, expected_mod)

    x_data = np.random.rand(8, 8).astype('float32')
    y_data = np.random.rand(8, 8).astype('float32')
    np_add = x_data + y_data
    res = np.concatenate([np.log(np_add), np.exp(np_add)])
    check_result(mod, {"x": x_data, "y": y_data}, (16, 8), res)
Esempio n. 3
0
 def run(dtype):
     mod = relay.Module()
     p = Prelude(mod)
     take = p.get_var('tensor_take', dtype)
     tensor2 = p.get_var('tensor2', dtype)
     v = relay.var('v')
     lower = relay.var('lower')
     upper = relay.var('upper')
     mod["main"] = relay.Function([v, lower, upper], take(tensor2(v), lower, upper))
     v_data = np.random.uniform(size=(10, 10)).astype(dtype)
     expected = [np.take(v_data, range(2, 5), axis=0)]
     check_tensor_array(mod, expected, *(v_data, 2, 5), dtype=dtype)
     expected = [np.take(v_data, range(0, 9), axis=0)]
     check_tensor_array(mod, expected, *(v_data, 0, 9), dtype=dtype)
Esempio n. 4
0
def verify_any_global_pool2d(pool_type, data_shape, layout, static_data_shape,
                             ref_out_shape):
    mod = relay.Module()
    dtype = "float32"
    pool_func = relay.nn.global_max_pool2d if pool_type == "max" else relay.nn.global_avg_pool2d
    data = relay.var('data', shape=data_shape, dtype=dtype)
    y = pool_func(data, layout)
    mod["main"] = relay.Function([data], y)
    data_np = np.random.uniform(size=static_data_shape).astype(dtype)
    for kind in ["debug", "vm"]:
        ex = relay.create_executor(kind, mod=mod, ctx=tvm.cpu(), target="llvm")
        result = ex.evaluate()(data_np)
        assert result.asnumpy().shape == ref_out_shape, \
            "Shape mismatch: expect %s but got %s." % (str(ref_out_shape), str(result.asnumpy().shape))
Esempio n. 5
0
def test_tensorrt_not_compatible():
    if should_skip():
        return
    dtype = 'float32'
    xshape = (1, 32, 14, 14)
    x = relay.var('x', shape=(xshape), dtype=dtype)
    y = relay.add(x, x)
    z = relay.erf(y)
    out = relay.nn.relu(z)
    f = relay.Function([x], out)
    mod = relay.Module()
    mod['main'] = f
    mod = relay.tensorrt.EnableTrt(mod)
    assert not mod['main'].attrs
Esempio n. 6
0
def test_any_reshape_like():
    mod = relay.Module()
    dtype = "float32"
    data = relay.var('data', shape=(relay.Any(), 3, 10), dtype=dtype)
    shape_like = relay.var('data', shape=(relay.Any(), 5, 6), dtype=dtype)
    y = relay.reshape_like(data, shape_like)
    mod["main"] = relay.Function([data, shape_like], y)
    data_np = np.random.uniform(size=(3, 3, 10)).astype(dtype)
    shape_like_np = np.random.uniform(size=(3, 5, 6)).astype(dtype)
    for kind in ["debug", "vm"]:
        ex = relay.create_executor(kind, mod=mod, ctx=tvm.cpu(), target="llvm")
        result = ex.evaluate()(data_np, shape_like_np)
        assert result.asnumpy().shape == shape_like_np.shape, \
            "Shape mismatch: expect %s but got %s." % (str(shape_like_np.shape), str(result.asnumpy().shape))
Esempio n. 7
0
def test_ref():
    mod = relay.Module()
    three_with_ref = relay.GlobalVar('three_with_ref')
    i = relay.Var('i')
    iv = relay.Var('iv')
    u = relay.Var('u')
    uv = relay.Var('uv')
    body = relay.add(iv, uv)
    body = relay.Let(uv, relay.RefRead(i), body)
    body = relay.Let(u, relay.RefWrite(i, relay.const(2)), body)
    body = relay.Let(iv, relay.RefRead(i), body)
    body = relay.Let(i, relay.RefCreate(relay.const(1)), body)
    mod[three_with_ref] = relay.Function([], body)
    check_eval(three_with_ref, [], 3, mod=mod)
Esempio n. 8
0
 def before():
     x = relay.var("x", shape=(1, 64, 56, 56))
     weight = relay.var('weight', shape=(64, 64, 3, 3))
     y = relay.nn.conv2d(x,
                         weight,
                         channels=64,
                         kernel_size=(3, 3),
                         padding=(1, 1))
     y = relay.nn.relu(y)
     mod = relay.Module()
     foo = relay.GlobalVar('foo')
     mod[foo] = relay.Function([x, weight], y)
     mod["main"] = relay.Function([x, weight], foo(x, weight))
     return mod
Esempio n. 9
0
 def run(dtype):
     x = relay.var('x')
     mod = relay.Module()
     p = Prelude(mod)
     expand_dims_func = p.get_var('tensor_expand_dims', dtype)
     tensor1 = p.get_var('tensor1', dtype)
     mod["main"] = relay.Function([x], expand_dims_func(tensor1(x)))
     for kind in ["debug"]:
         ex = relay.create_executor(kind, mod=mod, ctx=tvm.cpu(), target="llvm")
         x_np = np.random.uniform(size=(1,)).astype(dtype)
         result = ex.evaluate()(x_np)
         got = vmobj_to_list(result)
         expected = [np.expand_dims(x_np, axis=0)]
         tvm.testing.assert_allclose(expected, got)
Esempio n. 10
0
 def run(dtype):
     mod = relay.Module()
     p = Prelude(mod)
     l = relay.var('l')
     i = relay.var('i')
     read_func = p.get_var('tensor_array_read', dtype)
     tensor_array = p.get_var('tensor_array', dtype)
     mod["main"] = relay.Function([l, i], read_func(tensor_array(l), i))
     for kind in ["debug"]:
         ex = relay.create_executor(kind, mod=mod, ctx=tvm.cpu(), target="llvm")
         result = ex.evaluate()(10, 5)
         got = vmobj_to_list(result)
         expected = [0]
         tvm.testing.assert_allclose(expected, got)
Esempio n. 11
0
def test_multiple_cons_defn():
    mod = relay.Module()

    list_var = relay.GlobalTypeVar("List")
    typ_var = relay.TypeVar("A")
    prog = relay.TypeData(
            list_var,
            [typ_var],
            [
                relay.Constructor("Cons", [typ_var, list_var(typ_var)], list_var),
                relay.Constructor("Nil", [], list_var),
            ])
    mod[list_var] = prog
    assert parses_as(LIST_DEFN, mod)
Esempio n. 12
0
def create_vm(f, ctx=tvm.cpu(), target="llvm"):
    if isinstance(f, relay.Expr):
        mod = relay.Module()
        mod["main"] = f
        compiler = relay.vm.VMCompiler()
        vm = compiler.compile(mod, target)
        vm.init(ctx)
        return vm
    else:
        assert isinstance(f, relay.Module), "expected mod as relay.Module"
        compiler = relay.vm.VMCompiler()
        vm = compiler.compile(f, target)
        vm.init(ctx)
        return vm
Esempio n. 13
0
def test_list_tl_empty_list():
    mod = relay.Module()
    p = Prelude(mod)

    nil = p.nil
    l = p.l
    tl = p.tl

    f = relay.Function([], tl(nil()))

    mod["main"] = f

    result = veval(mod)
    print(result)
Esempio n. 14
0
def veval(f, *args, ctx=tvm.cpu(), target="llvm"):
    if isinstance(f, relay.Expr):
        mod = relay.Module()
        mod["main"] = f
        vm = relay.vm.compile(mod, target)
        vm.init(tvm.cpu())
        return vm.invoke("main", *args)
    else:
        assert isinstance(f, relay.Module), "expected expression or module"
        mod = f
        vm = relay.vm.compile(mod, target)
        vm.init(tvm.cpu())
        ret = vm.invoke("main", *args)
        return ret
Esempio n. 15
0
def test_simple_if():
    x = relay.var('x', shape=(10, 10))
    y = relay.var('y', shape=(10, 10))
    f = relay.Function([x, y], relay.If(any(relay.op.equal(x, y)), x, y))
    x_data = np.random.rand(10, 10).astype('float32')
    y_data = np.random.rand(10, 10).astype('float32')

    mod = relay.Module()
    mod["main"] = f
    # same
    check_result([x_data, x_data], x_data, mod=mod)

    # diff
    check_result([x_data, y_data], y_data, mod=mod)
Esempio n. 16
0
def test_match_var():
    mod = relay.Module()
    box, box_ctor = init_box_adt(mod)
    v = relay.Var('v')
    w = relay.Var('w')
    match = relay.Let(
        v, box_ctor(relay.const(1)),
        relay.Match(v, [
            relay.Clause(relay.PatternVar(w), w)
        ]))

    match_val = run_as_python(match, mod)
    assert_constructor_value(match_val, box_ctor, 1)
    assert_tensor_value(match_val.fields[0], 1)
Esempio n. 17
0
def test_add():
    mod = relay.Module()
    p = Prelude(mod)
    nat = p.nat
    add = p.add
    s = p.s
    z = p.z
    ctx = tvm.context("llvm", 0)
    intrp = create_executor(mod=mod, ctx=ctx, target="llvm")
    assert mod[add].checked_type == relay.FuncType([nat(), nat()], nat())
    assert count(intrp.evaluate(add(s(z()), s(z())))) == 2
    assert count(intrp.evaluate(to_a_normal_form(add(s(z()), s(z())),
                                                 mod))) == 2
    assert "let" in mod[add].astext()
Esempio n. 18
0
def verify_any_split(data_shape, indices_or_sections, axis, static_data_shape,
                     ref_out_shape):
    mod = relay.Module()
    dtype = "float32"
    data = relay.var('data', shape=data_shape, dtype=dtype)
    y = relay.split(data, indices_or_sections, axis)
    mod["main"] = relay.Function([data], y.astuple())
    data_np = np.random.uniform(size=static_data_shape).astype(dtype)
    for kind in ["vm"]:
        ex = relay.create_executor(kind, mod=mod, ctx=tvm.cpu(), target="llvm")
        result = ex.evaluate()(data_np)
        for ret, ref_ret in zip(result, ref_out_shape):
            assert ret.asnumpy().shape == ref_ret, \
                "Shape mismatch: expect %s but got %s." % (str(ref_ret), str(ret.asnumpy().shape))
def test_extern_ccompiler():
    x = relay.var('x', shape=(2, 2))
    y = relay.var('y', shape=(2, 2))
    z = x + x
    p = y * y
    f = relay.Function([x, y], p - z)
    x_data = np.random.rand(2, 2).astype('float32')
    y_data = np.random.rand(2, 2).astype('float32')
    mod = relay.Module()
    mod["main"] = f
    mod = WhiteListAnnotator(["add", "subtract", "multiply"], "ccompiler")(mod)
    mod = transform.PartitionGraph()(mod)

    check_result(mod, {"x": x_data, "y": y_data}, (2, 2), (y_data * y_data) - (x_data + x_data))
Esempio n. 20
0
def test_tensorrt_simple():
    if should_skip():
        return
    dtype = 'float32'
    xshape = (1, 32, 14, 14)
    yshape = (1, 32,  1,  1)
    zshape = (1,  1,  1,  1)
    x = relay.var('x', shape=(xshape), dtype=dtype)
    y = relay.var('y', shape=(yshape), dtype=dtype)
    z = relay.var('z', shape=(zshape), dtype=dtype)
    w = z * (x + y)
    out = relay.nn.relu(w)
    f = relay.Function([x, y, z], out)

    mod = relay.Module()
    mod['main'] = f
    mod = relay.tensorrt.EnableTrt(mod)

    ref_mod = relay.Module()
    ref_mod['main'] = f

    x_data = np.random.uniform(-1, 1, xshape).astype(dtype)
    y_data = np.random.uniform(-1, 1, yshape).astype(dtype)
    z_data = np.random.uniform(-1, 1, zshape).astype(dtype)

    # Test against reference.
    for kind in ["graph"]:
        ex = relay.create_executor(kind, mod=mod, ctx=tvm.gpu(0), target='cuda')
        # First execution will trigger build of TRT engine(s).
        res = ex.evaluate()(x_data, y_data, z_data)
        # TRT engine is reused for second execution.
        res = ex.evaluate()(x_data, y_data, z_data)

        ref_ex = relay.create_executor(kind, mod=ref_mod, ctx=tvm.cpu(0))
        ref_res = ref_ex.evaluate()(x_data, y_data, z_data)

        tvm.testing.assert_allclose(res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
Esempio n. 21
0
def test_nested_match_pattern():
    mod = relay.Module()
    box, box_ctor = init_box_adt(mod)
    v = relay.Var('v')
    w = relay.Var('w')
    match = relay.Let(
        v, box_ctor(box_ctor(relay.const(2))),
        relay.Match(v, [
            relay.Clause(
                relay.PatternConstructor(box_ctor, [
                    relay.PatternConstructor(box_ctor, [relay.PatternVar(w)])
                ]), w)
        ]))
    match_val = run_as_python(match, mod)
    assert_tensor_value(match_val, 2)
Esempio n. 22
0
def test_globalvar_as_call_arg():
    mod = relay.Module()
    p = Prelude(mod)
    tensor_array = p.get_var('tensor_array', 'int32')
    tensor1 = p.get_var('tensor1', 'int32')
    write = p.get_var('tensor_array_write', 'int32')
    stack = p.get_var('tensor_array_stack', 'int32')
    v = relay.var('v')
    init_tensor_array = tensor_array(relay.const(3))
    tensor_array1 = write(init_tensor_array, relay.const(0), tensor1(v))
    tensor_array2 = stack(tensor_array1)
    mod["main"] = relay.Function([v], tensor_array2)
    mod = relay.transform.RemoveUnusedFunctions()(mod)
    l = set([x[0].name_hint for x in mod.functions.items()])
    assert 'tensor_array_int32' in l
Esempio n. 23
0
def test_constructor():
    mod = relay.Module()
    box, box_ctor = init_box_adt(mod)

    init_box_int = box_ctor(relay.const(1))
    box_val_int = run_as_python(init_box_int, mod)

    assert_constructor_value(box_val_int, box_ctor, 1)
    assert_tensor_value(box_val_int.fields[0], 1)

    init_box_tup = box_ctor(relay.Tuple([]))
    box_val_tup = run_as_python(init_box_tup, mod)

    assert_constructor_value(box_val_tup, box_ctor, 1)
    assert_adt_len(box_val_tup.fields[0], 0)
Esempio n. 24
0
def test_add_op_scalar():
    """
    test_add_op_scalar:
        fn (x, y) {
            return x + y;
        }
    """
    mod = relay.Module()
    x = relay.var('x', shape=())
    y = relay.var('y', shape=())
    func = relay.Function([x, y], relay.op.add(x, y))
    x_data = np.array(10.0, dtype='float32')
    y_data = np.array(1.0, dtype='float32')
    mod["main"] = func
    check_result([x_data, y_data], x_data + y_data, mod=mod)
Esempio n. 25
0
def test_add_op_broadcast():
    """
    test_add_op_broadcast:
        fn (x, y) {
            return x + y;
        }
    """
    mod = relay.Module()
    x = relay.var('x', shape=(10, 5))
    y = relay.var('y', shape=(1, 5))
    func = relay.Function([x, y], relay.op.add(x, y))
    x_data = np.random.rand(10, 5).astype('float32')
    y_data = np.random.rand(1, 5).astype('float32')
    mod["main"] = func
    check_result([x_data, y_data], x_data + y_data, mod=mod)
Esempio n. 26
0
def test_let_scalar():
    sb = relay.ScopeBuilder()

    x = relay.var('x', 'float32')
    x1 = sb.let('x1', x)
    xplusone = x1 + relay.const(42.0, 'float32')
    sb.ret(xplusone)
    body = sb.get()

    f = relay.Function([x], body)

    x_data = np.array(np.random.rand()).astype('float32')
    mod = relay.Module()
    mod["main"] = f
    check_result([x_data], x_data + 42.0, mod=mod)
Esempio n. 27
0
 def run(dtype):
     mod = relay.Module()
     p = Prelude(mod)
     concat = p.get_var('tensor_concatenate', dtype)
     tensor1 = p.get_var('tensor1', dtype)
     v1 = relay.var('v1')
     v2 = relay.var('v2')
     mod["main"] = relay.Function([v1, v2], concat(tensor1(v1),
                                                   tensor1(v2)))
     v1_data = np.random.uniform(low=0.0, high=8.0,
                                 size=(5, )).astype(dtype)
     v2_data = np.random.uniform(low=0.0, high=8.0,
                                 size=(5, )).astype(dtype)
     expected = [np.concatenate((v1_data, v2_data))]
     check_tensor_array(mod, expected, *(v1_data, v2_data), dtype=dtype)
Esempio n. 28
0
 def run(dtype):
     mod = relay.Module()
     p = Prelude(mod)
     v1 = relay.var('v1')
     v2 = relay.var('v2')
     tensor_array = p.get_var('tensor_array', dtype)
     init_tensor_array = tensor_array(relay.const(2))
     write_func = p.get_var('tensor_array_write', dtype)
     tensor1 = p.get_var('tensor1', dtype)
     tensor_array1 = write_func(init_tensor_array, relay.const(0),
                                tensor1(v1))
     tensor_array2 = write_func(tensor_array1, relay.const(1), tensor1(v2))
     mod["main"] = relay.Function([v1, v2], tensor_array2)
     expected = [3, 7]
     check_tensor_array(mod, expected, *(3, 7), dtype=dtype)
Esempio n. 29
0
def veval(f, *args, ctx=tvm.cpu()):
    if isinstance(f, relay.Expr):
        ex = relay.create_executor('vm', mod=relay.Module(), ctx=ctx)
        if len(args) == 0:
            return ex.evaluate(f)
        else:
            return ex.evaluate(f)(*args)
    else:
        assert isinstance(f, relay.Module), "expected expression or module"
        mod = f
        ex = relay.create_executor('vm', mod=mod, ctx=ctx)
        if len(args) == 0:
            return ex.evaluate()
        else:
            return ex.evaluate()(*args)
Esempio n. 30
0
def verify_any_dense(data_shape, weight_shape, units, static_data_shape,
                     static_weight_shape, ref_out_shape):
    mod = relay.Module()
    dtype = "float32"
    data = relay.var('data', shape=data_shape, dtype=dtype)
    weight = relay.var('weight', shape=weight_shape, dtype=dtype)
    y = relay.nn.dense(data, weight, units)
    mod["main"] = relay.Function([data, weight], y)
    data_np = np.random.uniform(size=static_data_shape).astype(dtype)
    weight_np = np.random.uniform(size=static_weight_shape).astype(dtype)
    for kind in ["debug", "vm"]:
        ex = relay.create_executor(kind, mod=mod, ctx=tvm.cpu(), target="llvm")
        result = ex.evaluate()(data_np, weight_np)
        assert result.asnumpy().shape == ref_out_shape, \
            "Shape mismatch: expect %s but got %s." % (str(ref_out_shape), str(result.asnumpy().shape))