Example #1
0
def test_eq_ops():
    a = tir.IntImm("int8", 1)
    with pytest.raises(ValueError):
        assert a != None
    with pytest.raises(ValueError):
        assert not a == None
    b = tir.StringImm("abc")
    assert b != None
    assert not b == None
Example #2
0
    def expected():
        data = relay.var('data', shape=(1, 512, 28, 28))
        kernel = relay.var('kernel', shape=(256, 512, 1, 1))
        bias = relay.var('bias', shape=(256, ))
        a = relay.var('a', shape=(1, 256, 28, 28))
        b = relay.var('b', shape=(1, 256, 28, 28))

        # conv_bias_relu function
        in_1 = relay.var('in_1', shape=(1, 512, 28, 28))
        in_2 = relay.var('in_2', shape=(256, 512, 1, 1))
        in_3 = relay.var('in_3', shape=(256, ))

        conv_node = relay.nn.conv2d(in_1,
                                    in_2,
                                    kernel_size=(1, 1),
                                    padding=(0, 0),
                                    strides=(1, 1))

        bias_node = relay.nn.bias_add(conv_node, in_3)
        r = relay.nn.relu(bias_node)
        conv_bias_add_relu = relay.Function([in_1, in_2, in_3], r)
        conv_bias_add_relu = conv_bias_add_relu.set_attribute(
            "Primitive", tir.IntImm("int32", 1))
        conv_bias_add_relu = conv_bias_add_relu.set_attribute(
            "Composite", tir.StringImm("conv2d_bias_relu"))

        # add_relu function
        in_4 = relay.var('in_4', shape=(1, 256, 28, 28))
        in_5 = relay.var('in_5', shape=(1, 256, 28, 28))
        add_node = relay.add(in_4, in_5)
        r = relay.nn.relu(add_node)
        add_relu = relay.Function([in_4, in_5], r)
        add_relu = add_relu.set_attribute("Primitive", tir.IntImm("int32", 1))
        add_relu = add_relu.set_attribute("Composite",
                                          tir.StringImm("add_relu"))

        # merged function
        conv_bias_add_relu_1 = relay.Call(conv_bias_add_relu,
                                          [data, kernel, bias])
        add_relu_1 = relay.Call(add_relu, [conv_bias_add_relu_1, a])
        r = relay.multiply(add_relu_1, b)
        return relay.Function([data, kernel, bias, a, b], r)
Example #3
0
def tir_imm(obj, dtype=None) -> tir.PrimExpr:
    if isinstance(obj, tir.PrimExpr):
        return obj
    if isinstance(obj, bool):
        return tir.IntImm(dtype=dtype or 'bool', value=obj)
    if isinstance(obj, float):
        return tir.FloatImm(dtype=dtype or 'float32', value=obj)
    if isinstance(obj, int):
        return tir.IntImm(dtype=dtype or 'int32', value=obj)
    if isinstance(obj, str):
        return tir.StringImm(obj)
    assert False
    def after_B():
        inputs = [relay.var('input_' + str(i), shape=(10, 10)) for i in range(8)]
        add_relu_calls = []
        for i in range(4):
            x = relay.var('x' + str(i))
            y = relay.var('x' + str(i))
            add_relu = relay.add(x, y)
            add_relu = relay.nn.relu(add_relu)
            add_relu = relay.Function([x, y], add_relu)
            add_relu = add_relu.set_attribute('Composite', tir.StringImm('add_relu'))
            add_relu_call = relay.Call(add_relu, [inputs[i*2], inputs[i*2+1]])
            add_relu_calls.append(add_relu_call)

        add = relay.add(add_relu_calls[0], add_relu_calls[1])
        sub = relay.subtract(add_relu_calls[2], add_relu_calls[3])
        out = relay.multiply(add, sub)
        return relay.Function(inputs, out)