Пример #1
0
 def before():
     x = relay.var("x", shape=(1, 16, 16, 16), dtype="float32")
     w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32")
     y = relay.nn.conv2d(x, w, padding=(1, 1))
     y = relay.reshape(y, newshape=(1, 16, -1))
     y = relay.reshape(y, newshape=(4, 8, -1, 16))
     y = relay.reverse_reshape(y, newshape=(32, 0, -1))
     return relay.Function([x, w], y)
Пример #2
0
 def before():
     x = relay.var("x", shape=(1, 16, 16, 16), dtype="float32")
     w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32")
     y = relay.nn.conv2d(x, w, padding=(1, 1))
     y = relay.reshape(y, newshape=(1, 16, -1))
     y = relay.reshape(y, newshape=(4, 8, -1, 16))
     y = relay.reverse_reshape(y, newshape=(32, 0, -1))
     return tvm.IRModule.from_expr(y)
Пример #3
0
 def symbolic():
     b = tvm.te.size_var("b")
     x = relay.var("x", shape=(b, 16, 16, 16), dtype="float32")
     w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32")
     y = relay.nn.conv2d(x, w, padding=(1, 1))
     y = relay.reshape(y, newshape=(1, 16, -1))
     y = relay.reshape(y, newshape=(4, 8, -1, 16))
     y = relay.reverse_reshape(y, newshape=(32, 0, -1))
     return relay.Function([x, w], y)
Пример #4
0
    def verify_reverse_reshape(shape, newshape, oshape):
        x = relay.var("x", relay.TensorType(shape, "float32"))
        z = relay.reverse_reshape(x, newshape=newshape)
        zz = relay.ir_pass.infer_type(z)
        assert "newshape=" in z.astext()
        assert zz.checked_type == relay.ty.TensorType(oshape, "float32")

        func = relay.Function([x], z)
        x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32")
        ref_res = np.reshape(x_data, oshape)
        for target, ctx in ctx_list():
            for kind in ["graph", "debug"]:
                intrp = relay.create_executor(kind, ctx=ctx, target=target)
                op_res = intrp.evaluate(func)(x_data)
                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5)
Пример #5
0
    def verify_reverse_reshape(shape, newshape, oshape):
        x = relay.var("x", relay.TensorType(shape, "float32"))
        z = relay.reverse_reshape(x, newshape=newshape)
        zz = relay.ir_pass.infer_type(z)
        assert "newshape=" in z.astext()
        assert zz.checked_type == relay.ty.TensorType(oshape, "float32")

        func = relay.Function([x], z)
        x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32")
        ref_res = np.reshape(x_data, oshape)
        for target, ctx in ctx_list():
            for kind in ["graph", "debug"]:
                intrp = relay.create_executor(kind, ctx=ctx, target=target)
                op_res = intrp.evaluate(func)(x_data)
                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5)
Пример #6
0
    def verify_reverse_reshape(executor_kind, shape, newshape, oshape):
        x = relay.var("x", relay.TensorType(shape, "float32"))
        z = relay.reverse_reshape(x, newshape=newshape)
        zz = run_infer_type(z)
        assert "newshape=" in z.astext()
        assert zz.checked_type == relay.ty.TensorType(oshape, "float32")

        func = relay.Function([x], z)
        x_data = np.random.uniform(low=-1, high=1,
                                   size=shape).astype("float32")
        ref_res = np.reshape(x_data, oshape)
        for target, dev in tvm.testing.enabled_targets():
            op_res = relay.create_executor(
                executor_kind, device=dev,
                target=target).evaluate(func)(x_data)
            tvm.testing.assert_allclose(op_res.numpy(), ref_res, rtol=1e-5)
Пример #7
0
def test_vm_reshape_tensor():
    x_np = np.random.uniform(size=(8, 16)).astype("float32")
    x = relay.var("x", shape=(8, 16), dtype="float32")
    y = relay.reshape(x, [-1, 4, 8])
    mod = tvm.IRModule()
    mod["main"] = relay.Function([x], y)
    with tvm.transform.PassContext(opt_level=3):
        exec = relay.vm.compile(mod, "llvm")
    assert "reshape_tensor" in exec.bytecode
    check_result([x_np], x_np.reshape([4, 4, 8]), mod)

    x = relay.var("x", shape=(8, 16), dtype="float32")
    y = relay.reshape(x, [16, -1])
    y = relay.reverse_reshape(y, [-1, 4, 0])
    mod = tvm.IRModule()
    mod["main"] = relay.Function([x], y)
    with tvm.transform.PassContext(opt_level=3):
        exec = relay.vm.compile(mod, "llvm")
    assert exec.bytecode.count("reshape_tensor") == 1
    check_result([x_np], x_np.reshape([4, 4, 8]), mod)

    # reshape with symbolic/any shape
    for n in [tvm.tir.Any(), tvm.te.size_var("n")]:
        x = relay.var("x", shape=(n, 16), dtype="float32")
        y = relay.reshape(x, [-1, 4])
        y = relay.reshape(y, [0, 2, -1])
        mod = tvm.IRModule()
        mod["main"] = relay.Function([x], y)
        with tvm.transform.PassContext(opt_level=3):
            exec = relay.vm.compile(mod, "llvm")
        assert exec.bytecode.count("reshape_tensor") == 1
        check_result([x_np], x_np.reshape([32, 2, 2]), mod)

    # dyn.reshape
    x = relay.var("x", shape=(8, 16), dtype="float32")
    y = relay.var("y", shape=(3, ), dtype="int32")
    z = relay.reshape(x, [-1, 4, 8])
    z = relay.reshape(z, y)
    mod = tvm.IRModule()
    mod["main"] = relay.Function([x, y], z)
    with tvm.transform.PassContext(opt_level=3):
        exec = relay.vm.compile(mod, "llvm")
    assert exec.bytecode.count("reshape_tensor") == 2
    assert "reshape_tensor" in exec.bytecode
    y_np = np.array([8, 2, 8]).astype("int32")
    check_result([x_np, y_np], x_np.reshape([8, 2, 8]), mod)