Example #1
0
def test_alter_op():
    """Test directly replacing an operator with a new one"""
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var('weight', shape=(64, 64, 3, 3))
        y = relay.nn.conv2d(x, weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.relu(y)
        y = relay.Function([x, weight], y)
        return y

    @register_alter_op_layout("nn.conv2d", level=100)
    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        weight = relay.multiply(weight, relay.const(2.0, "float32"))
        return relay.nn.conv2d(data, weight, **attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var('weight', shape=(64, 64, 3, 3))
        y = relay.nn.conv2d(x, relay.multiply(weight, relay.const(2.0, "float32")),
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.relu(y)
        y = relay.Function([x, weight], y)
        return y

    a = before()
    a = run_opt_pass(a, transform.AlterOpLayout())
    b = run_opt_pass(expected(), transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
    def partition():
        data = relay.var("data", relay.TensorType((1, 16, 224, 224),
                                                  "float32"))
        bn_gamma = relay.var("bn_gamma", relay.TensorType((16, ), "float32"))
        bn_beta = relay.var("bn_beta", relay.TensorType((16, ), "float32"))
        bn_mmean = relay.var("bn_mean", relay.TensorType((16, ), "float32"))
        bn_mvar = relay.var("bn_var", relay.TensorType((16, ), "float32"))

        bn_output = relay.nn.batch_norm(data, bn_gamma, bn_beta, bn_mmean,
                                        bn_mvar)

        func = relay.Function([data, bn_gamma, bn_beta, bn_mmean, bn_mvar],
                              bn_output.astuple())
        mod = tvm.IRModule()
        mod["main"] = func
        op_list = ["nn.batch_norm", "nn.conv2d"]
        mod = WhiteListAnnotator(op_list, "test_compiler")(mod)

        opt_pass = tvm.transform.Sequential([
            transform.InferType(),
            transform.PartitionGraph(),
            transform.SimplifyInference(),
            transform.FoldConstant(),
            transform.AlterOpLayout(),
            transform.Inline(),
        ])

        with relay.build_config(opt_level=3):
            mod = opt_pass(mod)

        return mod
def test_alter_layout_strided_slice():
    """Test rewriting strided_slice during alter_iop_layout"""
    def before():
        x = relay.var("x", shape=(1, 32, 28, 28))
        weight = relay.var('weight', shape=(32, 32, 3, 3))
        y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1))
        y = relay.strided_slice(y, begin=[0, 16], end=[None, None])
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW4c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 32, 28, 28))
        weight = relay.var("weight")
        x = relay.layout_transform(x, "NCHW", "NCHW4c")
        y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1),
                            data_layout="NCHW4c")
        y = relay.strided_slice(y, begin=[0, 4], end=[None, 8])
        y = relay.layout_transform(y, "NCHW4c", "NCHW")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(a, [transform.CanonicalizeOps(),
                             transform.AlterOpLayout()])
        b = run_opt_pass(expected(), transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_depthwise_conv2d():
    """Test depthwise_conv2d operator"""
    def before():
        x = relay.var("x", shape=(1, 32, 56, 56))
        w = relay.var("w", shape=(32, 1, 3, 3))
        y = relay.nn.conv2d(x, w, padding=(1, 1), channels=32, kernel_size=(3, 3), groups=32)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    from tvm import topi
    def alter_conv2d(attrs, inputs, tinfos, out_type):
        with tvm.target.Target("llvm"):
            return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type)


    def expected():
        x = relay.var("x", shape=(1, 32, 56, 56))
        w = relay.var("w", shape=(32, 1, 3, 3))
        x = relay.layout_transform(x, "NCHW", "NCHW8c")
        w = relay.layout_transform(w, "OIHW", "OIHW1i8o")
        y = relay.nn.contrib_depthwise_conv2d_nchwc(x, w, padding=(1, 1, 1, 1), channels=32, kernel_size=(3, 3),
                                                    groups=32, data_layout="NCHW8c", kernel_layout="OIHW1i8o",
                                                    out_layout="NCHW8c")
        y = relay.layout_transform(y, "NCHW8c", "NCHW")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(a, [transform.CanonicalizeOps(),
                             transform.AlterOpLayout()])
        b = run_opt_pass(expected(), transform.InferType())

    assert(tvm.ir.structural_equal(a, b))
def test_alter_layout_nhwc_arm():
    """ Check that AlterOplayout does not alter NHWC data layout. """

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        from tvm import topi

        with tvm.target.Target("llvm -device=arm_cpu"):
            return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type)

    # Check NHWC conversion.
    def before_nhwc():
        x = relay.var("x", shape=(1, 56, 56, 64))
        weight1 = relay.var("weight1", shape=(3, 3, 64, 64))
        weight2 = relay.var("weight2", shape=(3, 3, 64, 64))
        y = relay.nn.conv2d(
            x, weight1, channels=64, kernel_size=(3, 3), data_layout="NHWC", kernel_layout="HWIO"
        )
        y = relay.nn.relu(y)
        y = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NHWC")
        y = relay.nn.conv2d(
            y, weight2, channels=64, kernel_size=(3, 3), data_layout="NHWC", kernel_layout="HWIO"
        )
        y = relay.nn.relu(y)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def expected_nhwc():
        return before_nhwc()

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before_nhwc()
        a = run_opt_pass(a, transform.AlterOpLayout())
        b = run_opt_pass(expected_nhwc(), transform.InferType())

    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
Example #6
0
def test_simple_network():
    batch_size = 1
    dshape = (batch_size, 64, 56, 56)
    weight_conv = relay.var("weight_conv", shape=(64, 64, 3, 3))
    data1 = relay.var("data1", shape=dshape)
    data2 = relay.var("data2", shape=dshape)
    weight_dense = relay.var("weight_dense", shape=(1, 56 * 56 * 64))

    conv2d_1 = relay.nn.conv2d(data1,
                               weight_conv,
                               channels=64,
                               kernel_size=(3, 3),
                               padding=(1, 1))
    conv2d_2 = relay.nn.conv2d(data2,
                               weight_conv,
                               channels=64,
                               kernel_size=(3, 3),
                               padding=(1, 1))
    add = relay.add(conv2d_1, conv2d_2)
    flattened = relay.nn.batch_flatten(add)
    dense_1 = relay.nn.dense(flattened, weight_dense)

    func = relay.Function(
        [data1, data2, weight_conv, weight_dense],
        relay.Tuple(
            tvm.runtime.convert([conv2d_1, conv2d_2, dense_1, add,
                                 flattened])))
    # alter the CONV 2D data layout to test
    func = run_opt_pass(func, transform.AlterOpLayout())
    compute_count = analysis.get_total_mac_number(func)
    expect_count = 231411712
    assert compute_count == expect_count
def test_alter_layout_nchw_upsamping_op():
    """Test upsamping operators """
    def before():
        x = relay.var("x", shape=(1, 32, 28, 28))
        weight = relay.var('weight', shape=(32, 32, 3, 3))
        y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1))
        y = relay.nn.upsampling(y, scale_h=2, scale_w=2)
        y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2))
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 32, 28, 28))
        weight = relay.var("weight")
        x = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.nn.upsampling(y, scale_h=2, scale_w=2, layout="NCHW16c")
        y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2), layout='NCHW16c')
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(a, transform.AlterOpLayout())
        b = run_opt_pass(expected(), transform.InferType())

    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
Example #8
0
def test_alter_layout_depthwise_conv2d():
    """Test depthwise_conv2d operator"""
    def before():
        x = relay.var("x", shape=(1, 32, 56, 56))
        w = relay.var("w", shape=(32, 1, 3, 3))
        y = relay.nn.conv2d(x, w, padding=(1, 1), channels=32, kernel_size=(3, 3), groups=32)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    import topi
    @register_alter_op_layout("nn.conv2d", level=110)
    def alter_conv2d(attrs, inputs, tinfos):
        with tvm.target.create("llvm"):
            return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, relay)

    def expected():
        x = relay.var("x", shape=(1, 32, 56, 56))
        w = relay.var("w", shape=(32, 1, 3, 3))
        x = relay.layout_transform(x, "NCHW", "NCHW8c")
        w = relay.layout_transform(w, "OIHW", "OIHW1i8o")
        y = relay.nn.contrib_depthwise_conv2d_nchwc(x, w, padding=(1, 1), channels=32, kernel_size=(3, 3),
                                                    groups=32, data_layout="NCHW8c", kernel_layout="OIHW1i8o",
                                                    out_layout="NCHW8c")
        y = relay.layout_transform(y, "NCHW8c", "NCHW")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    a = before()
    a = run_opt_pass(a, [transform.CanonicalizeOps(),
                         transform.AlterOpLayout()])

    b = expected()
    b = run_opt_pass(b, transform.InferType())

    assert(analysis.alpha_equal(a, b))
Example #9
0
def test_alter_layout():
    """Test alternating the layout of a conv2d.
    The layout of broadcast operators and the weight should be changed accordingly.
    """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        bias = relay.var("bias")
        weight = relay.var("weight")
        y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
        y = relay.nn.bias_add(y, bias)
        # a useless tuple, which will be eliminated
        y = relay.Tuple([y])[0]
        y = relay.nn.relu(y)
        y = relay.nn.max_pool2d(y, pool_size=(2, 2))
        y = relay.cast(y, 'int32')
        y = relay.nn.batch_flatten(y)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    @register_alter_op_layout("nn.conv2d", level=102)
    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        new_attrs['kernel_layout'] = 'OIHW16i'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        bias = relay.var("bias", shape=(64,))
        weight = relay.var("weight", shape=(64, 64, 3, 3))

        y = relay.layout_transform(x, "NCHW", "NCHW16c")
        w = relay.layout_transform(weight, "OIHW", "OIHW16i")
        y = relay.nn.conv2d(y, w,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            kernel_layout="OIHW16i",
                            data_layout="NCHW16c")
        b = relay.expand_dims(bias, axis=1, num_newaxis=2)
        b = relay.layout_transform(b, "CHW", "CHW16c")
        y = relay.add(y, b)

        y = relay.nn.relu(y)
        y = relay.nn.max_pool2d(y, pool_size=(2, 2), layout="NCHW16c")
        y = relay.cast(y, 'int32')
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.nn.batch_flatten(y)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    a = before()
    a = run_opt_pass(a, [transform.CanonicalizeOps(),
                         transform.AlterOpLayout()])

    b = expected()
    b = run_opt_pass(b, transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
Example #10
0
def test_alter_layout_resnet():
    """Test alternating the layout of a residual block
    This also tests the elimination of duplicated transformation.
    If a same transformation applies to a same node twice, only one transformation will be created.
    """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight1 = relay.var('weight1')
        weight2 = relay.var('weight2')
        y = relay.nn.conv2d(x,
                            weight1,
                            channels=32,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.relu(y)
        y2 = relay.nn.conv2d(x, weight2, channels=32, kernel_size=(1, 1))
        y2 = relay.nn.relu(y2)
        y = y + y2
        y = relay.nn.global_max_pool2d(y)
        return relay.Function(analysis.free_vars(y), y)

    # Register alter op layout. "level" is used to override the previously registered functions.
    @register_alter_op_layout("nn.conv2d", level=104)
    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight1 = relay.var('weight1')
        weight2 = relay.var('weight2')
        x = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(x,
                            weight1,
                            channels=32,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.nn.relu(y)
        y2 = relay.nn.conv2d(x,
                             weight2,
                             channels=32,
                             kernel_size=(1, 1),
                             data_layout='NCHW16c')
        y2 = relay.nn.relu(y2)
        y = y + y2
        y = relay.nn.global_max_pool2d(y, layout="NCHW16c")
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        return relay.Function(analysis.free_vars(y), y)

    a = before()
    a = run_opt_pass(a, transform.AlterOpLayout())

    b = expected()
    b = run_opt_pass(b, transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
Example #11
0
def test_alter_layout_nhwc_nchw_arm():
    """ Check NHWC to NHCW conversion for a small sequence of ops."""
    # Register alter op layout. "level" is used to override the previously registered functions.
    @register_alter_op_layout("nn.conv2d", level=115)
    def alter_conv2d(attrs, inputs, tinfos):
        from topi.arm_cpu.conv2d import _alter_conv2d_layout_arm
        return _alter_conv2d_layout_arm(attrs, inputs, tinfos, tvm.relay)

    # Check NHWC conversion.
    def before_nhwc():
        x = relay.var("x", shape=(1, 56, 56, 64))
        weight1 = relay.var('weight1', shape=(3, 3, 64, 64))
        weight2 = relay.var('weight2', shape=(3, 3, 64, 64))
        y = relay.nn.conv2d(x, weight1,
                            channels=64,
                            kernel_size=(3, 3),
                            data_layout='NHWC',
                            kernel_layout='HWIO')
        y = relay.nn.relu(y)
        y = relay.nn.avg_pool2d(y,
                                pool_size=(1,1),
                                layout='NHWC')
        y = relay.nn.conv2d(y, weight2,
                            channels=64,
                            kernel_size=(3, 3),
                            data_layout='NHWC',
                            kernel_layout='HWIO')
        y = relay.nn.relu(y)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def expected_nhwc():
        x = relay.var("x", shape=(1, 56, 56, 64))
        weight1 = relay.var('weight1', shape=(3, 3, 64, 64))
        weight2 = relay.var('weight2', shape=(3, 3, 64, 64))
        y = relay.layout_transform(x, "NHWC", "NCHW")
        weight1 = relay.layout_transform(weight1, "HWIO", "OIHW")
        weight2 = relay.layout_transform(weight2, "HWIO", "OIHW")
        y = relay.nn.conv2d(y, weight1,
                            channels=64,
                            kernel_size=(3, 3))
        y = relay.nn.relu(y)
        y = relay.nn.avg_pool2d(y,
                                pool_size=(1,1))
        y = relay.nn.conv2d(y, weight2,
                            channels=64,
                            kernel_size=(3, 3))
        y = relay.nn.relu(y)
        y = relay.layout_transform(y, "NCHW", "NHWC")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    a = before_nhwc()
    a = run_opt_pass(a, transform.AlterOpLayout())

    b = expected_nhwc()
    b = run_opt_pass(b, transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
Example #12
0
def test_alter_layout_conv2d():
    """Additional layout transformations should occour on the graph.
    """
    def convnet():
        """Alternating layout of simple convnet (from image super-resolution).
        """
        bias1 = relay.var('bias1', shape=(64, ))
        bias2 = relay.var('bias2', shape=(64, ))
        bias3 = relay.var('bias3', shape=(64, ))
        bias4 = relay.var('bias4', shape=(64, ))
        weight1 = relay.var('weight1', shape=(64, 1, 5, 5))
        weight2 = relay.var('weight2', shape=(64, 64, 3, 3))
        weight3 = relay.var('weight3', shape=(64, 64, 3, 3))
        weight4 = relay.var('weight4', shape=(64, 64, 3, 3))
        data = relay.var("x", shape=(1, 1, 224, 224))
        n00 = relay.nn.conv2d(data,
                              weight1,
                              padding=[2, 2],
                              kernel_size=[5, 5])
        n01 = relay.expand_dims(bias1, axis=1, num_newaxis=2)
        n02 = relay.add(n00, n01)
        n03 = relay.nn.relu(n02)
        n04 = relay.nn.conv2d(n03, weight2, padding=[1, 1], kernel_size=[3, 3])
        n05 = relay.expand_dims(bias2, axis=1, num_newaxis=2)
        n06 = relay.add(n04, n05)
        n07 = relay.nn.relu(n06)
        n08 = relay.nn.conv2d(n07, weight3, padding=[1, 1], kernel_size=[3, 3])
        n09 = relay.expand_dims(bias3, axis=1, num_newaxis=2)
        n10 = relay.add(n08, n09)
        n11 = relay.nn.relu(n10)
        n12 = relay.nn.conv2d(n11, weight4, padding=[1, 1], kernel_size=[3, 3])
        n13 = relay.expand_dims(bias4, axis=1, num_newaxis=2)
        n14 = relay.add(n12, n13)
        n15 = relay.reshape(n14, newshape=[1, 1, 3, 3, 224, 224])
        n16 = relay.transpose(n15, axes=[0, 1, 4, 2, 5, 3])
        net = relay.reshape(n16, newshape=[1, 1, 672, 672])
        args = relay.analysis.free_vars(net)
        return relay.Function(args, net)

    # orig net
    N = convnet()

    # trigger a test
    # for each known alter_conv2d
    targets = [
        'cuda', 'opencl -device=mali', 'opencl -device=intel_graphics',
        'llvm -device=arm_cpu', 'llvm -device=core-avx-ii'
    ]

    for tgt in targets:
        with tvm.target.create(tgt) as target:
            with autotvm.tophub.context(target):
                mod = relay.Module.from_expr(N)
                mod = transform.AlterOpLayout()(mod)
                O = mod[mod.entry_func]

                # graph should differ
                assert not relay.analysis.alpha_equal(N, O)
def test_alter_layout_broadcast_scalar_op():
    """Test alternating the layout of a conv2d.
    The layout of broadcast operators and the weight should be changed accordingly.
    """

    def before():
        x = relay.var("x", shape=(1, 500, 500, 64))
        kernel = relay.var("kernel", shape=(3, 3, 64, 64), dtype="float32")
        bias = relay.var("bias", shape=(64,))
        multiplier1 = relay.var("multiplier1", shape=(1,), dtype="float32")
        multiplier2 = relay.var("multiplier2", shape=(1, 1), dtype="float32")

        y = relay.nn.conv2d(x, kernel, data_layout="NHWC", kernel_layout="HWIO", kernel_size=(3, 3))
        y = relay.add(bias, y)
        y = relay.nn.relu(y)

        y = relay.multiply(multiplier1, y)
        y = relay.multiply(y, multiplier2)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs["data_layout"] = "NCHW16c"
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 500, 500, 64))
        kernel = relay.var("kernel", shape=(3, 3, 64, 64), dtype="float32")
        bias = relay.var("bias", shape=(64,))
        multiplier1 = relay.var("multiplier1", shape=(1,), dtype="float32")
        multiplier2 = relay.var("multiplier2", shape=(1, 1), dtype="float32")

        b = relay.expand_dims(bias, axis=0, num_newaxis=3)
        b = relay.layout_transform(b, "NHWC", "NCHW16c")

        y = relay.layout_transform(x, "NHWC", "NCHW16c")
        y = relay.nn.conv2d(
            y, kernel, data_layout="NCHW16c", kernel_layout="HWIO", kernel_size=(3, 3)
        )

        y = relay.add(b, y)
        y = relay.nn.relu(y)

        y = relay.multiply(multiplier1, y)
        y = relay.multiply(y, multiplier2)
        y = relay.layout_transform(y, "NCHW16c", "NHWC")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
        b = run_opt_pass(expected(), transform.InferType())

    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
Example #14
0
def test_alter_layout_broadcast_op():
    """Test boradcast operators """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        bias = relay.var("bias", shape=(64, ))
        scale = relay.var("scale", shape=(64, 1, 1))
        weight = relay.var("weight")
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.bias_add(y, bias)  # test broadcasting to lhs
        y = relay.multiply(scale, y)  # test broadcasting to rhs
        y = relay.Function(analysis.free_vars(y), y)
        return y

    # Register alter op layout. "level" is used to override the previously registered functions.
    @register_alter_op_layout("nn.conv2d", level=105)
    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        bias = relay.var("bias", shape=(64, ))
        scale = relay.var("scale", shape=(64, 1, 1))
        weight = relay.var("weight")
        x = relay.layout_transform(x, "NCHW", "NCHW16c")
        bias = relay.expand_dims(bias, 1, 2)
        bias = relay.expand_dims(bias, 0, 1)
        bias = relay.layout_transform(bias, "NCHW", "NCHW16c")
        scale = relay.expand_dims(scale, 0, 1)
        scale = relay.layout_transform(scale, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.add(y, bias)  # test broadcasting to lhs
        y = relay.multiply(scale, y)  # test broadcasting to rhs
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    a = before()
    a = run_opt_pass(a,
                     [transform.CanonicalizeOps(),
                      transform.AlterOpLayout()])

    b = expected()
    b = run_opt_pass(b, transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_concatenate():
    """ """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight1 = relay.var('weight1')
        weight2 = relay.var('weight2')
        y = relay.nn.conv2d(x,
                            weight1,
                            channels=32,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y1 = relay.nn.conv2d(y,
                             weight2,
                             channels=32,
                             kernel_size=(3, 3),
                             padding=(1, 1))
        ret = relay.concatenate([y, y1], axis=1)
        y = relay.Function(analysis.free_vars(ret), ret)
        return y

    @register_alter_op_layout("nn.conv2d", level=107)
    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight1 = relay.var('weight1')
        weight2 = relay.var('weight2')
        y = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(y,
                            weight1,
                            channels=32,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y1 = relay.nn.conv2d(y,
                             weight2,
                             channels=32,
                             kernel_size=(3, 3),
                             padding=(1, 1),
                             data_layout='NCHW16c')
        ret = relay.concatenate([y, y1], axis=1)
        ret = relay.layout_transform(ret, "NCHW16c", "NCHW")
        y = relay.Function(analysis.free_vars(ret), ret)
        return y

    a = before()
    a = run_opt_pass(a, transform.AlterOpLayout())

    b = expected()
    b = run_opt_pass(b, transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
Example #16
0
def test_alter_layout_resnet():
    """Test alternating the layout of a residual block
    This also tests the elimination of duplicated transformation.
    If a same transformation applies to a same node twice, only one transformation will be created.
    """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight1 = relay.var('weight1')
        weight2 = relay.var('weight2')
        y = relay.nn.conv2d(x,
                            weight1,
                            channels=32,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.relu(y)
        y2 = relay.nn.conv2d(x, weight2, channels=32, kernel_size=(1, 1))
        y2 = relay.nn.relu(y2)
        y = y + y2
        y = relay.nn.global_max_pool2d(y)
        return relay.Function(analysis.free_vars(y), y)

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight1 = relay.var('weight1')
        weight2 = relay.var('weight2')
        x = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(x,
                            weight1,
                            channels=32,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.nn.relu(y)
        y2 = relay.nn.conv2d(x,
                             weight2,
                             channels=32,
                             kernel_size=(1, 1),
                             data_layout='NCHW16c')
        y2 = relay.nn.relu(y2)
        y = y + y2
        y = relay.nn.global_max_pool2d(y, layout="NCHW16c")
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        return relay.Function(analysis.free_vars(y), y)

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(a, transform.AlterOpLayout())
        b = run_opt_pass(expected(), transform.InferType())

    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
Example #17
0
def test_alter_layout_broadcast_op():
    """Test boradcast operators """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        bias = relay.var("bias", shape=(64, ))
        scale = relay.var("scale", shape=(64, 1, 1))
        weight = relay.var("weight")
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.bias_add(y, bias)  # test broadcasting to lhs
        y = relay.multiply(scale, y)  # test broadcasting to rhs
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        bias = relay.var("bias", shape=(64, ))
        scale = relay.var("scale", shape=(64, 1, 1))
        weight = relay.var("weight")
        x = relay.layout_transform(x, "NCHW", "NCHW16c")
        bias = relay.expand_dims(bias, 1, 2)
        bias = relay.expand_dims(bias, 0, 1)
        bias = relay.layout_transform(bias, "NCHW", "NCHW16c")
        scale = relay.expand_dims(scale, 0, 1)
        scale = relay.layout_transform(scale, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.add(y, bias)  # test broadcasting to lhs
        y = relay.multiply(scale, y)  # test broadcasting to rhs
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(
            a, [transform.CanonicalizeOps(),
                transform.AlterOpLayout()])
        b = run_opt_pass(expected(), transform.InferType())

    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
Example #18
0
def test_alter_layout_lrn():
    """Test alternating the layout of a conv2d.
    The layout of broadcast operators and the weight should be changed accordingly.
    """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        bias = relay.var("bias")
        weight = relay.var("weight")
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.max_pool2d(y, pool_size=(2, 2))
        y = relay.nn.lrn(y)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        new_attrs['kernel_layout'] = 'OIHW16i'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        bias = relay.var("bias", shape=(64, ))
        weight = relay.var("weight", shape=(64, 64, 3, 3))

        y = relay.layout_transform(x, "NCHW", "NCHW16c")
        w = relay.layout_transform(weight, "OIHW", "OIHW16i")
        y = relay.nn.conv2d(y,
                            w,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            kernel_layout="OIHW16i",
                            data_layout="NCHW16c")
        y = relay.nn.max_pool2d(y, pool_size=(2, 2), layout="NCHW16c")
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.nn.lrn(y)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(
            a, [transform.CanonicalizeOps(),
                transform.AlterOpLayout()])
        b = run_opt_pass(expected(), transform.InferType())

    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
Example #19
0
def test_alter_layout_nhwc_nchw_arm():
    """ Check NHWC to NHCW conversion for a small sequence of ops."""
    def alter_conv2d(attrs, inputs, tinfos, out_type):
        import topi
        with tvm.target.create("llvm -device=arm_cpu"):
            return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type)

    # Check NHWC conversion.
    def before_nhwc():
        x = relay.var("x", shape=(1, 56, 56, 64))
        weight1 = relay.var('weight1', shape=(3, 3, 64, 64))
        weight2 = relay.var('weight2', shape=(3, 3, 64, 64))
        y = relay.nn.conv2d(x,
                            weight1,
                            channels=64,
                            kernel_size=(3, 3),
                            data_layout='NHWC',
                            kernel_layout='HWIO')
        y = relay.nn.relu(y)
        y = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout='NHWC')
        y = relay.nn.conv2d(y,
                            weight2,
                            channels=64,
                            kernel_size=(3, 3),
                            data_layout='NHWC',
                            kernel_layout='HWIO')
        y = relay.nn.relu(y)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def expected_nhwc():
        x = relay.var("x", shape=(1, 56, 56, 64))
        weight1 = relay.var('weight1', shape=(3, 3, 64, 64))
        weight2 = relay.var('weight2', shape=(3, 3, 64, 64))
        y = relay.layout_transform(x, "NHWC", "NCHW")
        weight1 = relay.layout_transform(weight1, "HWIO", "OIHW")
        weight2 = relay.layout_transform(weight2, "HWIO", "OIHW")
        y = relay.nn.conv2d(y, weight1, channels=64, kernel_size=(3, 3))
        y = relay.nn.relu(y)
        y = relay.nn.avg_pool2d(y, pool_size=(1, 1))
        y = relay.nn.conv2d(y, weight2, channels=64, kernel_size=(3, 3))
        y = relay.nn.relu(y)
        y = relay.layout_transform(y, "NCHW", "NHWC")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before_nhwc()
        a = run_opt_pass(a, transform.AlterOpLayout())
        b = run_opt_pass(expected_nhwc(), transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
Example #20
0
def test_alter_layout_nchw_upsamping_op():
    """Test upsamping operators """
    def before():
        x = relay.var("x", shape=(1, 32, 28, 28))
        weight = relay.var('weight', shape=(32, 32, 3, 3))
        y = relay.nn.conv2d(x,
                            weight,
                            channels=32,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.upsampling(y, scale=2)
        y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2))
        y = relay.Function(analysis.free_vars(y), y)
        return y

    # Register alter op layout. "level" is used to override the previously registered functions.
    @register_alter_op_layout("nn.conv2d", level=108)
    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 32, 28, 28))
        weight = relay.var("weight")
        x = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(x,
                            weight,
                            channels=32,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.nn.upsampling(y, scale=2, layout="NCHW16c")
        y = relay.nn.avg_pool2d(y,
                                pool_size=(2, 2),
                                strides=(2, 2),
                                layout='NCHW16c')
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    a = before()
    a = run_opt_pass(a,
                     [transform.CanonicalizeOps(),
                      transform.AlterOpLayout()])

    b = expected()
    b = run_opt_pass(b, transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_dual_path():
    """
    Test alternating the layout with two outputs.
    One path continues to use the new layout while one path fall backs to old layout.
    """

    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight1 = relay.var("weight1")
        weight2 = relay.var("weight2")
        y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1))
        y = relay.nn.relu(y)
        y1 = relay.nn.conv2d(y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1))
        y1 = relay.nn.relu(y1)
        y2 = relay.nn.batch_flatten(y)
        ret = relay.Tuple([y1, y2])
        y = relay.Function(analysis.free_vars(ret), ret)
        return y

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs["data_layout"] = "NCHW16c"
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight1 = relay.var("weight1")
        weight2 = relay.var("weight2")
        y = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(
            y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
        )
        y = relay.nn.relu(y)
        y1 = relay.nn.conv2d(
            y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
        )
        y1 = relay.nn.relu(y1)
        y1 = relay.layout_transform(y1, "NCHW16c", "NCHW")
        y2 = relay.layout_transform(y, "NCHW16c", "NCHW")
        y2 = relay.nn.batch_flatten(y2)
        ret = relay.Tuple([y1, y2])
        y = relay.Function(analysis.free_vars(ret), ret)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(a, transform.AlterOpLayout())
        b = run_opt_pass(expected(), transform.InferType())

    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
Example #22
0
def test_alter_layout_scalar():
    """Test alternating the layout of a conv2d.
    The layout of broadcast operators and the weight should be changed accordingly.
    """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var("weight")
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.add(y, relay.const(1, "float32"))
        y = relay.Function(analysis.free_vars(y), y)
        return y

    # Register alter op layout. "level" is used to override the previously registered functions.
    @register_alter_op_layout("nn.conv2d", level=106)
    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        w = relay.var("weight")

        y = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(y,
                            w,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.add(y, relay.const(1.0, "float32"))

        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.Function(analysis.free_vars(y), y)
        return y

    a = before()
    a = run_opt_pass(a,
                     [transform.CanonicalizeOps(),
                      transform.AlterOpLayout()])

    b = expected()
    b = run_opt_pass(b, transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
Example #23
0
def test_alter_layout_prelu():
    """Test PRelu operator"""
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var("weight")
        alpha = relay.var("alpha", relay.IncompleteType())
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.prelu(y, alpha)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    # Register alter op layout. "level" is used to override the previously registered functions.
    @register_alter_op_layout("nn.conv2d", level=111)
    def alter_conv2d(attrs, inputs, tinfos):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        w = relay.var("weight")
        alpha = relay.var("alpha", relay.IncompleteType())

        y = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(y,
                            w,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.nn.prelu(y, alpha)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    a = before()
    a = run_opt_pass(a,
                     [transform.CanonicalizeOps(),
                      transform.AlterOpLayout()])

    b = expected()
    b = run_opt_pass(b, transform.InferType())

    assert (analysis.alpha_equal(a, b))
Example #24
0
def test_alter_layout_prelu():
    """Test PRelu operator"""
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var("weight")
        alpha = relay.var("alpha", relay.IncompleteType())
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.prelu(y, alpha)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        new_attrs = dict(attrs)
        new_attrs['data_layout'] = 'NCHW16c'
        return relay.nn.conv2d(data, weight, **new_attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        w = relay.var("weight")
        alpha = relay.var("alpha", relay.IncompleteType())

        y = relay.layout_transform(x, "NCHW", "NCHW16c")
        y = relay.nn.conv2d(y,
                            w,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1),
                            data_layout="NCHW16c")
        y = relay.layout_transform(y, "NCHW16c", "NCHW")
        y = relay.nn.prelu(y, alpha)
        y = relay.Function(analysis.free_vars(y), y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(
            a, [transform.CanonicalizeOps(),
                transform.AlterOpLayout()])
        b = run_opt_pass(expected(), transform.InferType())

    assert (analysis.alpha_equal(a, b))
Example #25
0
def test_alter_op_with_global_var():
    """Test directly replacing an operator with a new one"""
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var('weight', shape=(64, 64, 3, 3))
        y = relay.nn.conv2d(x,
                            weight,
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.relu(y)
        mod = tvm.IRModule()
        foo = relay.GlobalVar('foo')
        mod[foo] = relay.Function([x, weight], y)
        mod["main"] = relay.Function([x, weight], foo(x, weight))
        return mod

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        weight = relay.multiply(weight, relay.const(2.0, "float32"))
        return relay.nn.conv2d(data, weight, **attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var('weight', shape=(64, 64, 3, 3))
        y = relay.nn.conv2d(x,
                            relay.multiply(weight, relay.const(2.0,
                                                               "float32")),
                            channels=64,
                            kernel_size=(3, 3),
                            padding=(1, 1))
        y = relay.nn.relu(y)
        mod = tvm.IRModule()
        foo = relay.GlobalVar('foo')
        mod[foo] = relay.Function([x, weight], y)
        mod["main"] = relay.Function([x, weight], foo(x, weight))
        return mod

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = transform.AlterOpLayout()(a)
        b = transform.InferType()(expected())

    assert tvm.ir.structural_equal(a, b,
                                   map_free_vars=True), "Actual = \n" + str(a)
Example #26
0
def test_alter_return_none():
    """Test doing nothing by returning 'None' """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        y = relay.nn.global_max_pool2d(x)
        y = relay.Function([x], y)
        return y

    called = [False]

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        called[0] = True
        return None

    with TempOpAttr("nn.global_max_pool2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(a, transform.AlterOpLayout())
        b = run_opt_pass(before(), transform.InferType())

    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
    assert (called[0])
Example #27
0
def partition_for_bnns(mod, params=None):
    """Partition the graph greedily offloading supported
    operators to BNNS.

    Parameters
    ----------
    mod : Module
        The module to run passes on.
    params : Optional[Dict[str, NDArray]]
        Constant input parameters.

    Returns
    -------
    ret : annotated and partitioned module.
    """
    if params:
        mod["main"] = bind_params_by_name(mod["main"], params)

    seq = tvm.transform.Sequential(
        [
            transform.InferType(),
            transform.FoldConstant(),
            transform.FoldScaleAxis(),
            transform.DynamicToStatic(),
            transform.AlterOpLayout(),
            # TODO(apeskov): WA. AlterOpLayout call lead to constants shape transformation
            #   Some expand_dims op may appears after constants. It breaks BNNS fusing.
            #   So we have to call FoldConstant right before bnns composite passes.
            transform.FoldConstant(),
            transform.MergeComposite(get_pattern_table("bnns")),
            transform.AnnotateTarget("bnns"),
            #   If you no need in per layer performance statistic you can
            #   uncomment next line
            # transform.MergeCompilerRegions(),
            transform.PartitionGraph(),
        ]
    )

    return seq(mod)
Example #28
0
def test_alter_return_none():
    """Test doing nothing by returning 'None' """
    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        y = relay.nn.global_max_pool2d(x)
        y = relay.Function([x], y)
        return y

    called = [False]

    # Register alter op layout. "level" is used to override the previously registered functions.
    @register_alter_op_layout("nn.global_max_pool2d", level=101)
    def alter_conv2d(attrs, inputs, tinfos):
        called[0] = True
        return None

    a = before()
    a = run_opt_pass(a, transform.AlterOpLayout())

    b = before()
    b = run_opt_pass(b, transform.InferType())
    assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
    assert (called[0])
Example #29
0
    def partition():
        data = relay.var("data", relay.TensorType((1, 3, 224, 224), "float32"))
        weight = relay.var("weight", relay.TensorType((16, 3, 3, 3),
                                                      "float32"))
        bn_gamma = relay.var("bn_gamma", relay.TensorType((16, ), "float32"))
        bn_beta = relay.var("bn_beta", relay.TensorType((16, ), "float32"))
        bn_mmean = relay.var("bn_mean", relay.TensorType((16, ), "float32"))
        bn_mvar = relay.var("bn_var", relay.TensorType((16, ), "float32"))

        conv = relay.nn.conv2d(data=data,
                               weight=weight,
                               kernel_size=(3, 3),
                               channels=16,
                               padding=(1, 1))
        bn_output = relay.nn.batch_norm(conv, bn_gamma, bn_beta, bn_mmean,
                                        bn_mvar)

        func = relay.Function(
            [data, weight, bn_gamma, bn_beta, bn_mmean, bn_mvar],
            bn_output.astuple())
        mod = tvm.IRModule()
        mod["main"] = func
        op_list = ["nn.batch_norm", "nn.conv2d"]
        mod = WhiteListAnnotator(op_list, "test_compiler")(mod)

        opt_pass = tvm.transform.Sequential([
            transform.InferType(),
            transform.PartitionGraph(),
            transform.SimplifyInference(),
            transform.FoldConstant(),
            transform.AlterOpLayout(),
        ])

        with tvm.transform.PassContext(opt_level=3):
            mod = opt_pass(mod)

        return mod
def test_alter_op():
    """Test directly replacing an operator with a new one"""

    def before():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var("weight", shape=(64, 64, 3, 3))
        y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
        y = relay.nn.relu(y)
        y = relay.Function([x, weight], y)
        return y

    def alter_conv2d(attrs, inputs, tinfos, out_type):
        data, weight = inputs
        weight = relay.multiply(weight, relay.const(2.0, "float32"))
        return relay.nn.conv2d(data, weight, **attrs)

    def expected():
        x = relay.var("x", shape=(1, 64, 56, 56))
        weight = relay.var("weight", shape=(64, 64, 3, 3))
        y = relay.nn.conv2d(
            x,
            relay.multiply(weight, relay.const(2.0, "float32")),
            channels=64,
            kernel_size=(3, 3),
            padding=(1, 1),
        )
        y = relay.nn.relu(y)
        y = relay.Function([x, weight], y)
        return y

    with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
        a = before()
        a = run_opt_pass(a, transform.AlterOpLayout())
        b = run_opt_pass(expected(), transform.InferType())

    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)