Пример #1
0
def test_reshape_like_infer_type():
    x = relay.var('x', relay.TensorType((1, 2, 3), 'float32'))
    y = relay.var('y', relay.TensorType((1, 6), 'float32'))
    z = relay.reshape_like(x, y)
    zz = run_infer_type(z)
    assert (zz.checked_type == relay.TensorType((1, 6), 'float32'))
    (n, c, h, w) = (te.size_var('n'), 2, 3, te.size_var('w'))
    x = relay.var('x', relay.TensorType((n, c, h, w), 'float32'))
    y = relay.var('y', relay.TensorType((1, 8, 8), 'float32'))
    z = relay.reshape_like(x, y)
    zz = run_infer_type(z)
    assert (zz.checked_type == relay.TensorType((1, 8, 8), 'float32'))
Пример #2
0
def test_reshape_like_infer_type():
    # concrete shape
    x = relay.var("x", relay.TensorType((1, 2, 3), "float32"))
    y = relay.var("y", relay.TensorType((1,6), "float32"))
    z = relay.reshape_like(x, y)
    zz = relay.ir_pass.infer_type(z)
    assert zz.checked_type == relay.TensorType((1, 6), "float32")

    # symbolic shape
    n, c, h, w = tvm.var("n"), 2, 3, tvm.var("w")
    x = relay.var("x", relay.TensorType((n, c, h, w), "float32"))
    y = relay.var("y", relay.TensorType((1, 8, 8), "float32"))
    z = relay.reshape_like(x, y)
    zz = relay.ir_pass.infer_type(z)
    assert zz.checked_type == relay.TensorType((1, 8, 8), "float32")
Пример #3
0
def test_reshape_like_infer_type():
    # concrete shape
    x = relay.var("x", relay.TensorType((1, 2, 3), "float32"))
    y = relay.var("y", relay.TensorType((1,6), "float32"))
    z = relay.reshape_like(x, y)
    zz = relay.ir_pass.infer_type(z)
    assert zz.checked_type == relay.TensorType((1, 6), "float32")

    # symbolic shape
    n, c, h, w = tvm.var("n"), 2, 3, tvm.var("w")
    x = relay.var("x", relay.TensorType((n, c, h, w), "float32"))
    y = relay.var("y", relay.TensorType((1, 8, 8), "float32"))
    z = relay.reshape_like(x, y)
    zz = relay.ir_pass.infer_type(z)
    assert zz.checked_type == relay.TensorType((1, 8, 8), "float32")
Пример #4
0
def test_concretize_reshape_like_attrs():
    data = relay.var("data", shape=(2, 3, 4), dtype="float32")
    shape_like = relay.var("shape_like", shape=(6, 2, 2), dtype="float32")
    expr = relay.reshape_like(data, shape_like, lhs_begin=2, rhs_begin=1)

    expected = run_infer_type(relay.reshape(data, (2, 3, 2, 2)))
    actual = run_opt_pass(expr, relay.transform.SimplifyExpr())
    assert tvm.ir.structural_equal(actual, expected)
Пример #5
0
def test_any_reshape_like():
    mod = tvm.IRModule()
    dtype = "float32"
    data = relay.var("data", shape=(relay.Any(), 3, 10), dtype=dtype)
    shape_like = relay.var("data", shape=(relay.Any(), 5, 6), dtype=dtype)
    y = relay.reshape_like(data, shape_like)
    mod["main"] = relay.Function([data, shape_like], y)
    data_np = np.random.uniform(size=(3, 3, 10)).astype(dtype)
    shape_like_np = np.random.uniform(size=(3, 5, 6)).astype(dtype)
    check_result([data_np, shape_like_np], mod, shape_like_np.shape, assert_shape=True)
Пример #6
0
def test_any_reshape_like():
    mod = tvm.IRModule()
    dtype = "float32"
    data = relay.var('data', shape=(relay.Any(), 3, 10), dtype=dtype)
    shape_like = relay.var('data', shape=(relay.Any(), 5, 6), dtype=dtype)
    y = relay.reshape_like(data, shape_like)
    mod["main"] = relay.Function([data, shape_like], y)
    data_np = np.random.uniform(size=(3, 3, 10)).astype(dtype)
    shape_like_np = np.random.uniform(size=(3, 5, 6)).astype(dtype)
    for kind in ["debug", "vm"]:
        ex = relay.create_executor(kind, mod=mod, ctx=tvm.cpu(), target="llvm")
        result = ex.evaluate()(data_np, shape_like_np)
        assert result.asnumpy().shape == shape_like_np.shape, \
            "Shape mismatch: expect %s but got %s." % (str(shape_like_np.shape), str(result.asnumpy().shape))
Пример #7
0
 def verify_reshape_like(shape, oshape):
     x_data = np.random.uniform(low=(- 1), high=1, size=shape).astype('float32')
     y_data = np.random.uniform(low=(- 1), high=1, size=oshape).astype('float32')
     ref_res = np.reshape(x_data, y_data.shape)
     x = relay.var('x', relay.TensorType(shape, 'float32'))
     y = relay.var('x', relay.TensorType(oshape, 'float32'))
     z = relay.reshape_like(x, y)
     zz = run_infer_type(z)
     assert (zz.checked_type == relay.ty.TensorType(ref_res.shape, 'float32'))
     func = relay.Function([x, y], z)
     for (target, ctx) in tvm.testing.enabled_targets():
         for kind in ['graph', 'debug']:
             intrp = relay.create_executor(kind, ctx=ctx, target=target)
             op_res = intrp.evaluate(func)(x_data, y_data)
             tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-05)
Пример #8
0
    def verify_reshape_like(shape, oshape):
        x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32")
        y_data = np.random.uniform(low=-1, high=1, size=oshape).astype("float32")
        ref_res = np.reshape(x_data, y_data.shape)

        x = relay.var("x", relay.TensorType(shape, "float32"))
        y = relay.var("x", relay.TensorType(oshape, "float32"))
        z = relay.reshape_like(x, y)
        zz = relay.ir_pass.infer_type(z)
        assert zz.checked_type == relay.ty.TensorType(ref_res.shape, "float32")

        func = relay.Function([x, y], z)

        for target, ctx in ctx_list():
            for kind in ["graph", "debug"]:
                intrp = relay.create_executor(kind, ctx=ctx, target=target)
                op_res = intrp.evaluate(func)(x_data, y_data)
                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5)
Пример #9
0
    def verify_reshape_like(shape, oshape):
        x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32")
        y_data = np.random.uniform(low=-1, high=1, size=oshape).astype("float32")
        ref_res = np.reshape(x_data, y_data.shape)

        x = relay.var("x", relay.TensorType(shape, "float32"))
        y = relay.var("x", relay.TensorType(oshape, "float32"))
        z = relay.reshape_like(x, y)
        zz = relay.ir_pass.infer_type(z)
        assert zz.checked_type == relay.ty.TensorType(ref_res.shape, "float32")

        func = relay.Function([x, y], z)

        for target, ctx in ctx_list():
            for kind in ["graph", "debug"]:
                intrp = relay.create_executor(kind, ctx=ctx, target=target)
                op_res = intrp.evaluate(func)(x_data, y_data)
                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5)
Пример #10
0
def test_reshape_like_grad():
    data = relay.var("data", shape=(2, 3, 4), dtype="float32")
    shape_like = relay.var("shape_like", shape=(6, 2, 2), dtype="float32")
    fwd_func = relay.Function([data, shape_like],
                              relay.reshape_like(data, shape_like))
    check_grad(fwd_func)
Пример #11
0
def get_net(input_shape=(1, 3, 24, 12), dtype="float32", wtype=None):
    """Get synthetic testing network.

    Parameters
    ----------
    image_shape : tuple, optional
        The input shape as (batch_size, channels, height, width).

    dtype : str, optional
        The data type for the input.

    wtype : str, optional
        The data type for weights. Defaults to `dtype`.

    Returns
    -------
    net : relay.Function
        The dataflow.
    """
    if wtype is None:
        wtype = dtype
    data = relay.var("data", shape=input_shape, dtype=dtype)
    dense_shape = [-1, input_shape[3]]
    dense = relay.nn.relu(
        relay.nn.dense(
            relay.reshape(data, dense_shape),
            relay.var("dense_weight",
                      shape=[input_shape[3], dense_shape[1]],
                      dtype=wtype),
        ))
    dense = relay.reshape_like(dense, data)
    conv_shape = [input_shape[1], input_shape[1], 3, 3]
    conv = relay.nn.softmax(
        relay.nn.conv2d(
            data,
            relay.var("conv_weight", shape=conv_shape, dtype=wtype),
            padding=1,
            kernel_size=3,
        ))
    added = relay.add(dense, conv)
    biased = layers.batch_norm_infer(relay.nn.bias_add(
        added, relay.var("bias", dtype=wtype)),
                                     name="batch_norm")
    dense = relay.nn.relu(
        relay.nn.dense(
            relay.reshape(biased, dense_shape),
            relay.var("dense2_weight",
                      shape=[input_shape[3], dense_shape[1]],
                      dtype=wtype),
        ))
    dense = relay.reshape_like(dense, data)
    conv = relay.nn.softmax(
        relay.nn.conv2d(
            biased,
            relay.var("conv2_weight", shape=conv_shape, dtype=wtype),
            padding=1,
            kernel_size=3,
        ))
    added = relay.add(dense, conv)
    args = relay.analysis.free_vars(added)
    return relay.Function(args, added)