def test_conv2d_transpose():
    x = sym.Variable("x")
    y = sym.conv2d_transpose(x,
                             channels=10,
                             kernel_size=(3, 3),
                             strides=(2, 2),
                             name="y",
                             padding=(1, 1),
                             output_padding=(2, 2))
    dtype = "float32"
    dshape = (1, 3, 18, 18)
    kshape = (3, 10, 3, 3)
    oshape = (1, 10, 37, 37)
    shape_dict = {"x": dshape}
    for target, ctx in ctx_list():
        graph, lib, _ = nnvm.compiler.build(y, target, shape_dict)
        m = graph_runtime.create(graph, lib, ctx)
        data = tvm.nd.array(np.random.uniform(size=dshape).astype(dtype))
        kernel = tvm.nd.array(np.random.uniform(size=kshape).astype(dtype))
        bias = tvm.nd.array(np.random.uniform(size=kshape[1]).astype(dtype))
        m.run(x=data, y_weight=kernel, y_bias=bias)
        out = m.get_output(0, tvm.nd.empty(oshape, dtype))
        c_np = topi.testing.conv2d_transpose_nchw_python(
            data.asnumpy(), kernel.asnumpy(), 2, 1)
        c_np = c_np + bias.asnumpy().reshape(kshape[1], 1, 1)
        d_np = np.zeros(shape=oshape)
        d_np[:, :, 0:c_np.shape[2], 0:c_np.shape[3]] = c_np
        tvm.testing.assert_allclose(out.asnumpy(), d_np, rtol=1e-5)
示例#2
0
def test_conv2d_transpose():
    x = sym.Variable("data", shape=(1, 32, 512, 512))
    y = sym.conv2d_transpose(x, name="conv", channels=12,
                             kernel_size=(3,3), padding=(1,1), layout="NCHW")
    _, ldict = correct_layout(y)
    assert(ldict["data"][0] == "NCHW")
    assert(ldict["conv_weight"][0] == "OIHW")
    assert(ldict["conv_bias"][0] == "C")
    assert(ldict["conv"][0] == "NCHW")
示例#3
0
def test_conv2d_transpose():
    x = sym.Variable("data", shape=(1, 32, 512, 512))
    y = sym.conv2d_transpose(x,
                             name="conv",
                             channels=12,
                             kernel_size=(3, 3),
                             padding=(1, 1),
                             layout="NCHW")
    _, ldict = correct_layout(y)
    assert (ldict["data"][0] == "NCHW")
    assert (ldict["conv_weight"][0] == "OIHW")
    assert (ldict["conv_bias"][0] == "C")
    assert (ldict["conv"][0] == "NCHW")
示例#4
0
def test_conv2d_transpose():
    x = sym.Variable("x")
    y = sym.conv2d_transpose(x, channels=10, kernel_size=(3,3), strides=(2,2),
                             name="y", padding=(1,1), output_padding=(2,2))
    dtype = "float32"
    dshape = (1, 3, 18, 18)
    kshape = (3, 10, 3, 3)
    oshape = (1, 10, 37, 37)
    shape_dict = {"x": dshape}
    for target, ctx in ctx_list():
        graph, lib, _ = nnvm.compiler.build(y, target, shape_dict)
        m = graph_runtime.create(graph, lib, ctx)
        data = tvm.nd.array(np.random.uniform(size=dshape).astype(dtype))
        kernel = tvm.nd.array(np.random.uniform(size=kshape).astype(dtype))
        bias = tvm.nd.array(np.random.uniform(size=kshape[1]).astype(dtype))
        m.run(x=data, y_weight=kernel, y_bias=bias)
        out = m.get_output(0, tvm.nd.empty(oshape, dtype))
        c_np = topi.testing.conv2d_transpose_nchw_python(
            data.asnumpy(), kernel.asnumpy(), 2, 1)
        c_np = c_np + bias.asnumpy().reshape(kshape[1], 1, 1)
        d_np = np.zeros(shape=oshape)
        d_np[:,:,0:c_np.shape[2],0:c_np.shape[3]] = c_np
        tvm.testing.assert_allclose(out.asnumpy(), d_np, rtol=1e-5)
示例#5
0
 def check(in_shape, out_shape, **kwargs):
     x = sym.Variable("x", shape=in_shape)
     y = sym.conv2d_transpose(x, name="y", **kwargs)
     sdict = infer_shape(y)
     assert(tuple(sdict["y"][0]) == tuple(out_shape))
示例#6
0
 def check(in_shape, out_shape, **kwargs):
     x = sym.Variable("x", shape=in_shape)
     y = sym.conv2d_transpose(x, name="y", **kwargs)
     sdict = infer_shape(y)
     assert(tuple(sdict["y"][0]) == tuple(out_shape))