예제 #1
0
def test_fuse_conv2d_elu():
    def elu(data):
        return -0.5 * sym.relu(1 - sym.exp(data)) + sym.relu(data)

    def get_sym(out_channel):
        data = sym.Variable(name="data")
        data = sym.conv2d(data=data, kernel_size=(3,3), channels=out_channel, padding=(1, 1),
                          layout="NCHW", kernel_layout="OIHW", use_bias=True)
        data = sym.batch_norm(data)
        data = elu(data)
        return data

    in_channel = 8
    out_channel = 16
    size = 64
    dshape = (1, in_channel, size, size)
    oshape = (1, out_channel, size, size)
    data = np.random.uniform(-1, 1, dshape).astype(np.float32)

    for target, ctx in ctx_list():
        sym1 = get_sym(out_channel)
        sym2 = get_sym(out_channel)
        _, params1 = utils.create_workload(sym1, 1, dshape[1:], seed=0)
        _, params2 = utils.create_workload(sym2, 1, dshape[1:], seed=0)
        output1, g1 = build_and_run(sym1, params1, data, oshape, target, ctx, opt_level=2)
        output2, g2 = build_and_run(sym2, params2, data, oshape, target, ctx, opt_level=0)
        np.testing.assert_allclose(output1, output2, rtol=1e-5, atol=1e-5)
        # data, conv weight, bias, batch norm gamma, batch norm beta, conv op
        assert g1.index.num_nodes == 6
예제 #2
0
def test_fuse_conv2d_elu():
    def elu(data):
        return -0.5 * sym.relu(1 - sym.exp(data)) + sym.relu(data)

    def get_sym(out_channel):
        data = sym.Variable(name="data")
        data = sym.conv2d(data=data, kernel_size=(3,3), channels=out_channel, padding=(1, 1),
                          layout="NCHW", kernel_layout="OIHW", use_bias=True)
        data = sym.batch_norm(data)
        data = elu(data)
        return data

    in_channel = 8
    out_channel = 16
    size = 64
    dshape = (1, in_channel, size, size)
    oshape = (1, out_channel, size, size)
    data = np.random.uniform(-1, 1, dshape).astype(np.float32)

    for target, ctx in ctx_list():
        sym1 = get_sym(out_channel)
        sym2 = get_sym(out_channel)
        _, params1 = utils.create_workload(sym1, 1, dshape[1:], seed=0)
        _, params2 = utils.create_workload(sym2, 1, dshape[1:], seed=0)
        output1, g1 = build_and_run(sym1, params1, data, oshape, target, ctx, opt_level=2)
        output2, g2 = build_and_run(sym2, params2, data, oshape, target, ctx, opt_level=0)
        tvm.testing.assert_allclose(output1, output2, rtol=1e-5, atol=1e-5)
        # data, conv weight, bias, batch norm gamma, batch norm beta, conv op
        assert g1.index.num_nodes == 6
예제 #3
0
def get_network(name, batch_size):
    """Get the symbol definition and random weight of a network"""
    input_shape = (batch_size, 3, 224, 224)
    output_shape = (batch_size, 1000)

    if "resnet" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.resnet.get_workload(num_layers=n_layer,
                                                       batch_size=batch_size)
    elif "vgg" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.vgg.get_workload(num_layers=n_layer,
                                                    batch_size=batch_size)
    elif name == 'mobilenet':
        net, params = nnvm.testing.mobilenet.get_workload(
            batch_size=batch_size)
    elif name == 'squeezenet_v1.1':
        net, params = nnvm.testing.squeezenet.get_workload(
            batch_size=batch_size, version='1.1')
    elif name == 'inception_v3':
        input_shape = (1, 3, 299, 299)
        net, params = nnvm.testing.inception_v3.get_workload(
            batch_size=batch_size)
    elif name == 'custom':
        # an example for custom network
        from nnvm.testing import utils
        net = nnvm.sym.Variable('data')
        net = nnvm.sym.conv2d(net,
                              channels=4,
                              kernel_size=(3, 3),
                              padding=(1, 1))
        net = nnvm.sym.flatten(net)
        net = nnvm.sym.dense(net, units=1000)
        net, params = utils.create_workload(net, batch_size, (3, 224, 224))
    elif name == 'mxnet':
        # an example for mxnet model
        from mxnet.gluon.model_zoo.vision import get_model
        block = get_model('resnet18_v1', pretrained=True)
        net, params = nnvm.frontend.from_mxnet(block)
        net = nnvm.sym.softmax(net)
    elif name == "yolo":
        from flextensor.testing.net.yolo_v1 import get_workload
        data_shape = (3, 448, 448)
        input_shape = (batch_size, *data_shape)
        output_shape = (batch_size, 1470)
        net, params = get_workload(batch_size=batch_size,
                                   image_shape=data_shape,
                                   dtype=dtype)
    elif name == "overfeat":
        from flextensor.testing.net.overfeat import get_workload
        data_shape = (3, 192, 192)
        input_shape = (batch_size, *data_shape)
        output_shape = (batch_size, 1000)
        net, params = get_workload(batch_size=batch_size,
                                   image_shape=data_shape,
                                   dtype=dtype)
    else:
        raise ValueError("Unsupported network: " + name)

    return net, params, input_shape, output_shape
예제 #4
0
def get_network(name, batch_size):
    """Get the symbol definition and random weight of a network"""
    input_shape = (batch_size, 3, 224, 224)
    output_shape = (batch_size, 1000)

    if name =='resnet-18':
        net, params = nnvm.testing.resnet.get_workload(num_layers=18, batch_size=batch_size)
    elif name =='mobilenet':
        net, params = nnvm.testing.mobilenet.get_workload(batch_size=batch_size)
    elif name =='squeezenet v1.1':
        net, params = nnvm.testing.squeezenet.get_workload(batch_size=batch_size, version='1.1')
    elif name =='vgg-16':
        net, params = nnvm.testing.vgg.get_workload(num_layers=16, batch_size=batch_size)
    elif name =='custom':
        # an example for custom network
        from nnvm.testing import utils
        net = nnvm.sym.Variable('data')
        net = nnvm.sym.conv2d(net, channels=4, kernel_size=(3,3), padding=(1,1))
        net = nnvm.sym.flatten(net)
        net = nnvm.sym.dense(net, units=1000)
        net, params = utils.create_workload(net, batch_size, (3, 224, 224))
    elif name == 'mxnet':
        # an example for mxnet model
        from mxnet.gluon.model_zoo.vision import get_model
        block = get_model('resnet18_v1', pretrained=True)
        net, params = nnvm.frontend.from_mxnet(block)
        net = nnvm.sym.softmax(net)
    else:
        raise ValueError("Unsupported network: " + name)

    return net, params, input_shape, output_shape
예제 #5
0
    def test_duplex_data_transfer():
        """ This unittest tests duplex communication between the host and
        accelerator device. The network is as following:
                    data
                      |
                    conv2d  (acc)
                      |
                 batch_norm (cpu)
                      |
                    conv2d  (acc)
        """
        out_channels = 16
        data = symbol.Variable(name="data")
        simple_net = symbol.conv2d(data=data, kernel_size=(3, 3),
                                   channels=out_channels, padding=(1, 1),
                                   use_bias=False)
        simple_net = symbol.batch_norm(simple_net)
        simple_net = symbol.conv2d(data=simple_net, kernel_size=(3, 3),
                                   channels=out_channels, padding=(1, 1),
                                   use_bias=False)

        batch_size = 1
        data_shape = (batch_size, 3, 224, 224)
        shape_dict = {"data": data_shape}
        net, params = utils.create_workload(simple_net, batch_size,
                                            data_shape[1:])
        params["data"] = data = np.random.uniform(-1, 1,
                                                  size=data_shape).astype(
            "float32")

        check_graph(net, ['batch_norm'], shape_dict, params)
예제 #6
0
def get_workload(batch_size=1, num_classes=1000, version='1.0',
                 image_shape=(3, 224, 224), dtype="float32", **kwargs):
    """Get benchmark workload for SqueezeNet

    Parameters
    ----------
    batch_size : int
        The batch size used in the model

    num_classes : int, optional
        Number of classes

    version : str, optional
        "1.0" or "1.1" of SqueezeNet

    image_shape : tuple, optional
        The input image shape

    dtype : str, optional
        The data type

    kwargs : dict
        Extra arguments

    Returns
    -------
    net : nnvm.Symbol
        The computational graph

    params : dict of str to NDArray
        The parameters.
    """
    net = get_symbol(num_classes=num_classes, version=version, **kwargs)
    return create_workload(net, batch_size, image_shape, dtype)
예제 #7
0
def get_network(name, batch_size):
    """Get the symbol definition and random weight of a network"""
    input_shape = (batch_size, 3, 224, 224)
    # output_shape not really used; so not changed here
    output_shape = (batch_size, 1000)

    if "resnet" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.resnet.get_workload(num_layers=n_layer,
                                                       batch_size=batch_size)
    elif "vgg" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.vgg.get_workload(num_layers=n_layer,
                                                    batch_size=batch_size)
    elif name == 'mobilenet':
        net, params = nnvm.testing.mobilenet.get_workload(
            batch_size=batch_size)
    elif name == 'squeezenet_v1.1':
        net, params = nnvm.testing.squeezenet.get_workload(
            batch_size=batch_size, version='1.1')
    elif name == 'inception_v3':
        input_shape = (1, 3, 299, 299)
        net, params = nnvm.testing.inception_v3.get_workload(
            batch_size=batch_size)
    elif name == 'custom':
        # an example for custom network
        from nnvm.testing import utils
        net = nnvm.sym.Variable('data')
        net = nnvm.sym.conv2d(net,
                              channels=4,
                              kernel_size=(3, 3),
                              padding=(1, 1))
        net = nnvm.sym.flatten(net)
        net = nnvm.sym.dense(net, units=1000)
        net, params = utils.create_workload(net, batch_size, (3, 224, 224))
    elif name == 'mxnet':
        # an example for mxnet model
        from mxnet.gluon.model_zoo.vision import get_model
        block = get_model('resnet18_v1', pretrained=True)
        net, params = nnvm.frontend.from_mxnet(block)
        net = nnvm.sym.softmax(net)
    elif name == 'caffe_squeezenet':
        import coremltools
        coreml_model = coremltools.models.MLModel(
            os.path.join('../models', 'SqueezeNet_v1.1.mlmodel'))
        net, params = nnvm.frontend.from_coreml(coreml_model)
    else:
        raise ValueError("Unsupported network: " + name)

    return net, params, input_shape, output_shape
예제 #8
0
def get_network(name, batch_size):
    """Get the symbol definition and random weight of a network"""
    input_shape = (batch_size, 3, 224, 224)
    output_shape = (batch_size, 1000)

    if "resnet" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.resnet.get_workload(num_layers=n_layer,
                                                       batch_size=batch_size)
    elif "vgg" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.vgg.get_workload(num_layers=n_layer,
                                                    batch_size=batch_size)
    elif name == 'mobilenet':
        net, params = nnvm.testing.mobilenet.get_workload(
            batch_size=batch_size)
    elif name == 'squeezenet_v1.1':
        net, params = nnvm.testing.squeezenet.get_workload(
            batch_size=batch_size, version='1.1')
    elif name == 'inception_v3':
        input_shape = (1, 3, 299, 299)
        net, params = nnvm.testing.inception_v3.get_workload(
            batch_size=batch_size)
    elif name == 'custom':
        # an example for custom network
        from nnvm.testing import utils
        net = nnvm.sym.Variable('data')
        net = nnvm.sym.conv2d(net,
                              channels=4,
                              kernel_size=(3, 3),
                              padding=(1, 1))
        net = nnvm.sym.flatten(net)
        net = nnvm.sym.dense(net, units=1000)
        net, params = utils.create_workload(net, batch_size, (3, 224, 224))
    elif name == 'mxnet':
        # an example for mxnet model
        from mxnet.gluon.model_zoo.vision import get_model
        block = get_model('resnet18_v1', pretrained=True)
        net, params = nnvm.frontend.from_mxnet(block)
        net = nnvm.sym.softmax(net)
    else:
        onnx_model = onnx.load_model(
            'out/models/resnet50_conv_bs1_0/model.onnx')
        net, params = nnvm.frontend.from_onnx(onnx_model)
        output_shape = (batch_size, 6, 112, 112)

    return net, params, input_shape, output_shape
예제 #9
0
def test_duplex_data_transfer(device, target):
    R""" This unittest tests duplex communication between the host and
    accelerator device. The network is as following:
                data
                  |
                conv2d  (acc)
                  |
             batch_norm (cpu)
                  |
                conv2d  (acc)
    """
    if not tvm.module.enabled(device):
        print("Skip test because %s is not enabled." % device)
        return

    out_channels = 16
    data = symbol.Variable(name="data")
    simple_net = symbol.conv2d(data=data,
                               kernel_size=(3, 3),
                               channels=out_channels,
                               padding=(1, 1),
                               use_bias=False)
    simple_net = symbol.batch_norm(simple_net)
    simple_net = symbol.conv2d(data=simple_net,
                               kernel_size=(3, 3),
                               channels=out_channels,
                               padding=(1, 1),
                               use_bias=False)

    batch_size = 1
    data_shape = (batch_size, 3, 224, 224)
    shape_dict = {"data": data_shape}
    net, params = utils.create_workload(simple_net, batch_size, data_shape[1:])
    params["data"] = data = np.random.uniform(
        -1, 1, size=data_shape).astype("float32")

    target = {"cpu": "llvm", device: target}
    op_name_device = {
        "conv2d": device,
        "batch_norm": "cpu",
        "broadcast_add": "cpu",
        "elemwise_mul": "cpu"
    }
    fallback_device = tvm.context("cpu")
    check_graph(net, target, op_name_device, fallback_device, shape_dict,
                params)
예제 #10
0
파일: tvm_conv.py 프로젝트: shinh/test
def get_network(name, batch_size):
    """Get the symbol definition and random weight of a network"""
    input_shape = (batch_size, 3, 224, 224)
    output_shape = (batch_size, 1000)

    if "resnet" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.resnet.get_workload(num_layers=n_layer, batch_size=batch_size)
    elif "vgg" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.vgg.get_workload(num_layers=n_layer, batch_size=batch_size)
    elif name == 'mobilenet':
        net, params = nnvm.testing.mobilenet.get_workload(batch_size=batch_size)
    elif name == 'squeezenet_v1.1':
        net, params = nnvm.testing.squeezenet.get_workload(batch_size=batch_size, version='1.1')
    elif name == 'inception_v3':
        input_shape = (1, 3, 299, 299)
        net, params = nnvm.testing.inception_v3.get_workload(batch_size=batch_size)
    elif name == 'custom':
        # an example for custom network
        from nnvm.testing import utils
        net = nnvm.sym.Variable('data')
        net = nnvm.sym.conv2d(net, channels=4, kernel_size=(3,3), padding=(1,1))
        net = nnvm.sym.flatten(net)
        net = nnvm.sym.dense(net, units=1000)
        net, params = utils.create_workload(net, batch_size, (3, 224, 224))
    elif name == 'mxnet':
        # an example for mxnet model
        from mxnet.gluon.model_zoo.vision import get_model
        block = get_model('resnet18_v1', pretrained=True)
        net, params = nnvm.frontend.from_mxnet(block)
        net = nnvm.sym.softmax(net)
    else:
        onnx_model = onnx.load_model(
            'out/models/resnet50_conv_bs1_0/model.onnx')
        net, params = nnvm.frontend.from_onnx(onnx_model)
        output_shape = (batch_size, 6, 112, 112)

    return net, params, input_shape, output_shape
예제 #11
0
def get_workload(batch_size=1,
                 num_classes=1000,
                 version='1.0',
                 image_shape=(3, 224, 224),
                 dtype="float32",
                 **kwargs):
    """Get benchmark workload for SqueezeNet

    Parameters
    ----------
    batch_size : int
        The batch size used in the model

    num_classes : int, optional
        Number of classes

    version : str, optional
        "1.0" or "1.1" of SqueezeNet

    image_shape : tuple, optional
        The input image shape

    dtype : str, optional
        The data type

    kwargs : dict
        Extra arguments

    Returns
    -------
    net : nnvm.Symbol
        The computational graph

    params : dict of str to NDArray
        The parameters.
    """
    net = get_symbol(num_classes=num_classes, version=version, **kwargs)
    return create_workload(net, batch_size, image_shape, dtype)
예제 #12
0
def main(conv_config):
    # Define conv2d network.
    N, H, W, CO, CI, KH, KW, strides, padding = conv_configs[conv_config]
    batch_size = N
    data_shape = (N, CI, H, W)
    data = sym.Variable(name="data")
    simple_net = sym.conv2d(data=data,
                            kernel_size=(KH, KW),
                            channels=CO,
                            padding=padding)

    # Use cuDNN as conv2d backend.
    net, params = utils.create_workload(simple_net, batch_size, data_shape[1:])
    target = "cuda -libs=cudnn"
    graph, lib, params = nnvm.compiler.build(net,
                                             target,
                                             shape={"data": data_shape},
                                             params=params)

    ctx = tvm.context(target, 0)
    data = np.random.uniform(-1, 1, size=data_shape).astype("float32")
    module = runtime.create(graph, lib, ctx)
    module.set_input(**params)
    module.set_input("data", data)
    module.run()
    out_shape = (batch_size, CO, W, H)
    out = module.get_output(0, tvm.nd.empty(out_shape))
    out_cudnn = out.asnumpy()

    print('Time cost of cuDNN conv2d operator ({}):'.format(conv_config))
    costs = []
    for _ in range(10):
        evaluator = module.module.time_evaluator("run", ctx, number=1000)
        cost = evaluator().mean
        costs.append(cost)
        print('%.8f' % cost)
    print('Mean:', '%.8f' % np.mean(costs))
예제 #13
0
######################################################################
# Create a simple network
# -----------------------
# Let's create a very simple network for demonstration.
# It consists of convolution, batch normalization, and ReLU activation.

out_channels = 16
data = sym.Variable(name="data")
simple_net = sym.conv2d(data=data, kernel_size=(3,3), channels=out_channels, padding = (1, 1), use_bias=True)
simple_net = sym.batch_norm(data=simple_net)
simple_net = sym.relu(data=simple_net)

batch_size = 1
data_shape = (batch_size, 3, 224, 224)
net, params = utils.create_workload(simple_net, batch_size, data_shape[1:])

######################################################################
# Build and run with cuda backend
# -------------------------------
# We build and run this network with cuda backend, as usual.
# By setting the logging level to DEBUG, the result of NNVM graph compilation will be dumped as pseudo code.
import logging
logging.basicConfig(level=logging.DEBUG) # to dump TVM IR after fusion

target = "cuda"
graph, lib, params = nnvm.compiler.build(
    net, target, shape={"data": data_shape}, params=params)

ctx = tvm.context(target, 0)
data = np.random.uniform(-1, 1, size=data_shape).astype("float32")
예제 #14
0
def get_network(name, batch_size, dtype='float32'):
    """Get the symbol definition and random weight of a network
    
    Parameters
    ----------
    name: str
        The name of the network, can be 'resnet-18', 'resnet-50', 'vgg-16', 'inception_v3', 'mobilenet', ...
    batch_size: int
        batch size
    dtype: str
        Data type

    Returns
    -------
    net: nnvm.symbol
        The NNVM symbol of network definition
    params: dict
        The random parameters for benchmark
    input_shape: tuple
        The shape of input tensor
    output_shape: tuple
        The shape of output tensor
    """
    input_shape = (batch_size, 3, 224, 224)
    output_shape = (batch_size, 1000)

    if name == 'mobilenet':
        net, params = nnvm.testing.mobilenet.get_workload(
            batch_size=batch_size, dtype=dtype)
    elif name == 'mobilenet_v2':
        net, params = nnvm.testing.mobilenet_v2.get_workload(
            batch_size=batch_size, dtype=dtype)
    elif name == 'inception_v3':
        input_shape = (batch_size, 3, 299, 299)
        net, params = nnvm.testing.inception_v3.get_workload(
            batch_size=batch_size, dtype=dtype)
    elif "resnet" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.resnet.get_workload(num_layers=n_layer,
                                                       batch_size=batch_size,
                                                       dtype=dtype)
    elif "vgg" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.vgg.get_workload(num_layers=n_layer,
                                                    batch_size=batch_size,
                                                    dtype=dtype)
    elif "densenet" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.densenet.get_workload(num_layers=n_layer,
                                                         batch_size=batch_size,
                                                         dtype=dtype)
    elif "squeezenet" in name:
        version = name.split("_v")[1]
        net, params = nnvm.testing.squeezenet.get_workload(
            batch_size=batch_size, version=version, dtype=dtype)
    elif name == 'custom':
        # an example for custom network
        from nnvm.testing import utils
        net = nnvm.sym.Variable('data')
        net = nnvm.sym.conv2d(net,
                              channels=4,
                              kernel_size=(3, 3),
                              padding=(1, 1))
        net = nnvm.sym.flatten(net)
        net = nnvm.sym.dense(net, units=1000)
        net, params = utils.create_workload(net,
                                            batch_size, (3, 224, 224),
                                            dtype=dtype)
    elif name == 'mxnet':
        # an example for mxnet model
        from mxnet.gluon.model_zoo.vision import get_model
        block = get_model('resnet18_v1', pretrained=True)
        net, params = nnvm.frontend.from_mxnet(block)
        net = nnvm.sym.softmax(net)
    else:
        raise ValueError("Unsupported network: " + name)

    return net, params, input_shape, output_shape
예제 #15
0
    # define network
    data = sym.Variable("data")
    y1 = sym.conv2d(data=data,
                    channels=1,
                    kernel_size=(3, 3),
                    padding=(0, 0),
                    use_bias=False,
                    out_layout='NCHW')
    y2 = sym.flatten(y1)
    #y3 = sym.dense(y2, units=10, use_bias=False)
    y3 = sym.dense(y2, weight=dense_weight, use_bias=False)
    y4 = sym.softmax(y3)
    out = y4  # This is some of the loss function

    # create workload
    net, params = create_workload(out, batch_size, image_shape, dtype)
    #print(net.debug_str())

    target = tvm.target.create('llvm')
    #target = tvm.target.create('opencl')
    with nnvm.compiler.build_config(opt_level=0):
        graph, lib, params = nnvm.compiler.build(net,
                                                 target,
                                                 shape={"data": data_shape},
                                                 params=params)

    # create random input
    #ctx = tvm.opencl()
    ctx = tvm.context("llvm", 0)
    #data = np.random.uniform(-1, 1, size=data_shape).astype("float32")
    data = train_data[:1].swapaxes(1, 3).swapaxes(2, 3).astype("float32")
예제 #16
0
파일: util.py 프로젝트: LANHUIYING/tvm
def get_network(name, batch_size, dtype='float32'):
    """Get the symbol definition and random weight of a network
    
    Parameters
    ----------
    name: str
        The name of the network, can be 'resnet-18', 'resnet-50', 'vgg-16', 'inception_v3', 'mobilenet', ...
    batch_size: int
        batch size
    dtype: str
        Data type

    Returns
    -------
    net: nnvm.symbol
        The NNVM symbol of network definition
    params: dict
        The random parameters for benchmark
    input_shape: tuple
        The shape of input tensor
    output_shape: tuple
        The shape of output tensor
    """
    input_shape = (batch_size, 3, 224, 224)
    output_shape = (batch_size, 1000)

    if name == 'mobilenet':
        net, params = nnvm.testing.mobilenet.get_workload(batch_size=batch_size, dtype=dtype)
    elif name == 'mobilenet_v2':
        net, params = nnvm.testing.mobilenet_v2.get_workload(batch_size=batch_size, dtype=dtype)
    elif name == 'inception_v3':
        input_shape = (batch_size, 3, 299, 299)
        net, params = nnvm.testing.inception_v3.get_workload(batch_size=batch_size, dtype=dtype)
    elif "resnet" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.resnet.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
    elif "vgg" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.vgg.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
    elif "densenet" in name:
        n_layer = int(name.split('-')[1])
        net, params = nnvm.testing.densenet.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
    elif "squeezenet" in name:
        version = name.split("_v")[1]
        net, params = nnvm.testing.squeezenet.get_workload(batch_size=batch_size, version=version, dtype=dtype)
    elif name == 'custom':
        # an example for custom network
        from nnvm.testing import utils
        net = nnvm.sym.Variable('data')
        net = nnvm.sym.conv2d(net, channels=4, kernel_size=(3,3), padding=(1,1))
        net = nnvm.sym.flatten(net)
        net = nnvm.sym.dense(net, units=1000)
        net, params = utils.create_workload(net, batch_size, (3, 224, 224), dtype=dtype)
    elif name == 'mxnet':
        # an example for mxnet model
        from mxnet.gluon.model_zoo.vision import get_model
        block = get_model('resnet18_v1', pretrained=True)
        net, params = nnvm.frontend.from_mxnet(block)
        net = nnvm.sym.softmax(net)
    else:
        raise ValueError("Unsupported network: " + name)

    return net, params, input_shape, output_shape