コード例 #1
0
ファイル: squeezenet.py プロジェクト: LANHUIYING/tvm
def get_symbol(num_classes, version, **kwargs):
    """Get symbol of SqueezeNet

    Parameters
    ----------
    num_classes: int
        The number of classification results

    version : str, optional
        "1.0" or "1.1" of SqueezeNet
    """
    assert version == '1.1', ("Unsupported SqueezeNet version {version}:"
                              "1.1 expected".format(version=version))
    net = sym.Variable("data")

    net = sym.conv2d(net, channels=64, kernel_size=(3, 3), strides=(2, 2))
    net = sym.relu(net)
    net = sym.max_pool2d(net, pool_size=(3, 3), strides=(2, 2))
    net = _make_fire(net, 16, 64, 64)
    net = _make_fire(net, 16, 64, 64)
    net = sym.max_pool2d(net, pool_size=(3, 3), strides=(2, 2))
    net = _make_fire(net, 32, 128, 128)
    net = _make_fire(net, 32, 128, 128)
    net = sym.max_pool2d(net, pool_size=(3, 3), strides=(2, 2))
    net = _make_fire(net, 48, 192, 192)
    net = _make_fire(net, 48, 192, 192)
    net = _make_fire(net, 64, 256, 256)
    net = _make_fire(net, 64, 256, 256)

    net = sym.dropout(net, rate=0.5)
    net = sym.conv2d(net, channels=num_classes, kernel_size=(1, 1))
    net = sym.relu(net)
    net = sym.global_avg_pool2d(net)
    return sym.softmax(net, axis=1)
コード例 #2
0
ファイル: test_op_fusion.py プロジェクト: LANHUIYING/tvm
def test_injective_conv2d():
    channels = 16
    data = sym.Variable(name="data")
    pool = sym.global_avg_pool2d(data=data)
    weight = sym.reshape(pool, shape=[1, channels, 1, 1])
    residual = sym.conv2d(data=data, kernel_size=(3,3), channels=channels, padding=(1, 1),
                          layout="NCHW", kernel_layout="OIHW", use_bias=False, name="conv")
    net = weight * data + residual
    size = 56
    dtype="float32"
    dshape = (1, channels, size, size)
    kshape = (channels, channels, 3, 3)
    oshape = dshape
    shape_dict = {"data": dshape}

    for target, ctx in ctx_list():
        graph, lib, _ = nnvm.compiler.build(net, target, shape_dict)
        # data, global_avg_pool, conv weight, conv op, fused elemwise add
        assert graph.index.num_nodes == 5

        data = tvm.nd.array(np.random.uniform(size=dshape).astype(dtype))
        kernel = tvm.nd.array(np.random.uniform(size=kshape).astype(dtype))
        m = graph_runtime.create(graph, lib, ctx)
        m.run(data=data, conv_weight=kernel)
        # get output
        out = m.get_output(0, tvm.nd.empty(oshape, dtype))
        residual = topi.testing.conv2d_nchw_python(
            data.asnumpy(), kernel.asnumpy(), (1,1), 'SAME')
        weight = np.mean(data.asnumpy(), axis=(2, 3))
        c_np = weight[:, :, np.newaxis, np.newaxis] * data.asnumpy() + residual
        tvm.testing.assert_allclose(out.asnumpy(), c_np, rtol=1e-5)
コード例 #3
0
ファイル: test_top_level2.py プロジェクト: LANHUIYING/tvm
def test_global_avg_pool2d():
    x = sym.Variable("x")
    y = sym.global_avg_pool2d(x, name="y")
    dtype = "float32"
    dshape = (1, 1024, 7, 7)
    oshape = (1, 1024, 1, 1)
    shape_dict = {"x": dshape}
    for target, ctx in ctx_list():
        graph, lib, _ = nnvm.compiler.build(y, target, shape_dict)
        m = graph_runtime.create(graph, lib, ctx)
        data = tvm.nd.array(np.random.uniform(size=dshape).astype(dtype))
        m.run(x=data)
        out = m.get_output(0, tvm.nd.empty(oshape, dtype))
        b_np = np.mean(data.asnumpy(), axis=(2,3), keepdims=True)
        tvm.testing.assert_allclose(out.asnumpy(), b_np, rtol=1e-5)
コード例 #4
0
ファイル: test_top_level2.py プロジェクト: yljason8888/nnvm
def test_global_avg_pool2d():
    x = sym.Variable("x")
    y = sym.global_avg_pool2d(x, name="y")
    dtype = "float32"
    dshape = (1, 1024, 7, 7)
    oshape = (1, 1024, 1, 1)
    shape_dict = {"x": dshape}
    for target, ctx in ctx_list():
        graph, lib, _ = nnvm.compiler.build(y, target, shape_dict)
        m = graph_runtime.create(graph, lib, ctx)
        data = tvm.nd.array(np.random.uniform(size=dshape).astype(dtype))
        m.run(x=data)
        out = m.get_output(0, tvm.nd.empty(oshape, dtype))
        b_np = np.mean(data.asnumpy(), axis=(2, 3), keepdims=True)
        np.testing.assert_allclose(out.asnumpy(), b_np, rtol=1e-5)
コード例 #5
0
def test_injective_conv2d():
    channels = 16
    data = sym.Variable(name="data")
    pool = sym.global_avg_pool2d(data=data)
    weight = sym.reshape(pool, shape=[1, channels, 1, 1])
    residual = sym.conv2d(data=data,
                          kernel_size=(3, 3),
                          channels=channels,
                          padding=(1, 1),
                          layout="NCHW",
                          kernel_layout="OIHW",
                          use_bias=False,
                          name="conv")
    net = weight * data + residual
    size = 56
    dtype = "float32"
    dshape = (1, channels, size, size)
    kshape = (channels, channels, 3, 3)
    oshape = dshape
    shape_dict = {"data": dshape}

    for target, ctx in ctx_list():
        graph, lib, _ = nnvm.compiler.build(net, target, shape_dict)
        # data, global_avg_pool, conv weight, conv op, fused elemwise add
        assert graph.index.num_nodes == 5

        data = tvm.nd.array(np.random.uniform(size=dshape).astype(dtype))
        kernel = tvm.nd.array(np.random.uniform(size=kshape).astype(dtype))
        m = graph_runtime.create(graph, lib, ctx)
        m.run(data=data, conv_weight=kernel)
        # get output
        out = m.get_output(0, tvm.nd.empty(oshape, dtype))
        residual = topi.testing.conv2d_nchw_python(data.asnumpy(),
                                                   kernel.asnumpy(), (1, 1),
                                                   'SAME')
        weight = np.mean(data.asnumpy(), axis=(2, 3))
        c_np = weight[:, :, np.newaxis, np.newaxis] * data.asnumpy() + residual
        np.testing.assert_allclose(out.asnumpy(), c_np, rtol=1e-5)
コード例 #6
0
 def alter_global_avg_pool2d_layout(attrs, inputs, tinfos):
     new_attrs = {k: attrs[k] for k in attrs.keys()}
     new_attrs["layout"] = "NCHW16c"
     return sym.global_avg_pool2d(inputs[0], **new_attrs)