Exemplo n.º 1
0
def maximum_pattern() -> tvm.relay.dataflow_pattern.DFPattern:
    """
    This function creates the pattern for maximum with optional fused RELU activation.
    """
    pattern = is_op("maximum")(wildcard(), wildcard())
    pattern = pattern.optional(is_op("clip"))
    return pattern
Exemplo n.º 2
0
def qnn_maxpool2d_pattern() -> tvm.relay.dataflow_pattern.DFPattern:
    """
    This function creates the pattern for nn.max_pool2d with optional fused RELU activation.
    """
    pattern = is_op("nn.max_pool2d")(wildcard())
    pattern = pattern.optional(is_op("clip"))
    return pattern
Exemplo n.º 3
0
def shl_pattern() -> tvm.relay.dataflow_pattern.DFPattern:
    """
    This function creates the pattern for left_shift with optional fused RELU activation.
    """
    pattern = is_op("left_shift")(wildcard(), wildcard())
    pattern = pattern.optional(is_op("clip"))
    return pattern
Exemplo n.º 4
0
 def pattern_A():
     x = wildcard()
     y = wildcard()
     out = is_op('add')(x, y)
     out = is_op('abs')(out)
     out = is_op('nn.relu')(out)
     return out
Exemplo n.º 5
0
def _get_breakpoint_patterns() -> List[dfp.DFPattern]:
    norm = dfp.is_op('concatenate')(dfp.is_tuple(
        (dfp.wildcard(), dfp.wildcard(), dfp.wildcard(), dfp.wildcard(),
         dfp.wildcard(), dfp.wildcard())))
    red = dfp.is_op('concatenate')(dfp.is_tuple(
        (dfp.wildcard(), dfp.wildcard(), dfp.wildcard(), dfp.wildcard())))
    return [norm, red]
 def pattern_A():
     x = wildcard()
     y = wildcard()
     out = is_op("add")(x, y)
     out = is_op("abs")(out)
     out = is_op("nn.relu")(out)
     return out
Exemplo n.º 7
0
    def __init__(self, params: Dict[str, np.ndarray]):
        super(AvgAddSubst, self).__init__(params)

        self.x = dfp.wildcard()
        avg1 = dfp.is_op('nn.avg_pool2d')(self.x)
        avg2 = dfp.is_op('nn.avg_pool2d')(self.x)
        x = avg1 + avg2
        self.pattern = x
Exemplo n.º 8
0
Arquivo: ethosu.py Projeto: a1nc/tvm
def resize2d_pattern() -> tvm.relay.dataflow_pattern.DFPattern:
    """
    This function creates the pattern for image.resize2d.
    """
    dequant = is_op("qnn.dequantize")(wildcard(), is_constant(), is_constant())
    resize_2d = is_op("image.resize2d")(dequant).has_attr({"method": "linear"})
    quant = is_op("qnn.quantize")(resize_2d, is_constant(), is_constant())
    return quant | is_op("image.resize2d")(wildcard()).has_attr({"method": "nearest_neighbor"})
Exemplo n.º 9
0
def mean_pattern() -> tvm.relay.dataflow_pattern.DFPattern:
    """
    This function creates the pattern for mean.
    """
    pattern = is_op("cast")(wildcard())
    pattern = is_op("mean")(pattern)
    pattern = is_op("qnn.requantize")(pattern, is_constant(), is_constant(),
                                      is_constant(), is_constant())
    return pattern
Exemplo n.º 10
0
def qnn_conv2d_pattern() -> tvm.relay.dataflow_pattern.DFPattern:
    """
    This function creates the pattern for qnn.conv2D with optional fused RELU activation.
    """
    qnn_conv2d = is_op("qnn.conv2d")(
        wildcard(), is_constant(), is_constant(), is_constant(), is_constant(),
        is_constant()).has_attr({"kernel_layout": "HWIO"})
    bias_add = is_op("nn.bias_add")(qnn_conv2d, is_constant())
    req = is_op("qnn.requantize")(qnn_conv2d | bias_add, is_constant(),
                                  is_constant(), is_constant(), is_constant())
    clip_or_req = req.optional(is_op("clip"))
    return clip_or_req
Exemplo n.º 11
0
    def __init__(self, params: Dict[str, np.ndarray]):
        super(ConvBnSubst, self).__init__(params)

        self.x = dfp.wildcard()
        self.weight = dfp.is_var()
        self.conv = dfp.is_op('nn.conv2d')(self.x, self.weight)
        self.gamma = dfp.is_var()
        self.beta = dfp.is_var()
        self.moving_mean = dfp.is_var()
        self.moving_var = dfp.is_var()
        x = dfp.is_op('nn.batch_norm')(self.conv, self.gamma, self.beta,
                                       self.moving_mean, self.moving_var)
        x = dfp.is_tuple_get_item(x, 0)
        self.pattern = x
Exemplo n.º 12
0
    def __init__(self, params: Dict[str, np.ndarray]):
        super(ConvAddSubst, self).__init__(params)

        self.x1 = dfp.wildcard()
        self.w1 = dfp.is_var()
        x1 = dfp.is_op('nn.conv2d')(self.x1, self.w1)
        self.b1 = dfp.is_var()
        x1 = dfp.is_op('nn.bias_add')(x1, self.b1)
        self.x2 = dfp.wildcard()
        self.w2 = dfp.is_var()
        x2 = dfp.is_op('nn.conv2d')(self.x2, self.w2)
        self.b2 = dfp.is_var()
        x2 = dfp.is_op('nn.bias_add')(x2, self.b2)
        x = x1 + x2
        self.pattern = x
Exemplo n.º 13
0
def make_pattern_with_optional():
    r"""Create a pattern to match the following graph. Note that relu is optinal.

       conv2d
         |
      bias_add
         |
       (relu)
    """
    x = wildcard()
    y = wildcard()
    z = wildcard()
    conv_node = is_op('nn.conv2d')(x, y)
    bias_node = is_op('nn.bias_add')(conv_node, z)
    r = bias_node.optional(lambda x: is_op('nn.relu')(x))
    return r
Exemplo n.º 14
0
def requantize_pattern() -> tvm.relay.dataflow_pattern.DFPattern:
    """
    This function creates the pattern for qnn.requantize.
    """
    return is_op("qnn.requantize")(
        wildcard(), is_constant(), is_constant(), is_constant(), is_constant()
    )
Exemplo n.º 15
0
def concat_pattern():
    """Create pattern for concat"""
    tensors = is_tuple(None)
    scales = is_tuple(None)
    zero_points = is_tuple(None)
    concat = is_op("qnn.concatenate")(tensors, scales, zero_points, is_constant(), is_constant())
    return concat
Exemplo n.º 16
0
def make_conv_bias_relu_pattern():
    r"""Create a pattern to match the following graph.

       conv2d
         |
      bias_add
         |
       relu
    """
    x = wildcard()
    y = wildcard()
    z = wildcard()
    conv_node = is_op('nn.conv2d')(x, y)
    bias_node = is_op('nn.bias_add')(conv_node, z)
    r = is_op('nn.relu')(bias_node)
    return r
Exemplo n.º 17
0
def test_shape_func_nested_function():
    @tvm.register_func("relay.ext.test2")
    def relay_ext_test(func):
        return None

    data_shape = (relay.Any(), 16)
    weight_shape = (relay.Any(), 16)

    dense = relay.nn.dense(
        relay.var("data", shape=data_shape), relay.var("weight", shape=weight_shape)
    )
    mod = tvm.IRModule.from_expr(dense)

    patterns = [("test.dense", is_op("nn.dense")(wildcard(), wildcard()))]
    passes = tvm.transform.Sequential(
        [
            relay.transform.MergeComposite(patterns),
            relay.transform.AnnotateTarget(["test2"]),
            relay.transform.PartitionGraph(),
        ]
    )

    mod = passes(mod)

    compiler = VMCompiler()
    compiler.lower(mod, "llvm")
Exemplo n.º 18
0
def qnn_mul_pattern() -> tvm.relay.dataflow_pattern.DFPattern:
    """
    This function creates the pattern for qnn.mul with optional fused RELU activation.
    """
    pattern = is_op("qnn.mul")(
        wildcard(),
        wildcard(),
        is_constant(),
        is_constant(),
        is_constant(),
        is_constant(),
        is_constant(),
        is_constant(),
    )
    pattern = pattern.optional(is_op("clip"))
    return pattern
def make_relu_pattern():
    r"""Create a pattern to match the following graph
     a
     |
    relu
     |
    """
    pattern = is_op("nn.relu")(wildcard())
    return pattern
def make_add_pattern():
    r"""Create a pattern to match the following graph
    a  b
    \  /
    add
     |
    """
    pattern = is_op("add")(wildcard(), wildcard())
    return pattern
Exemplo n.º 21
0
def make_add_add_add_pattern():
    r"""Create a pattern to match the following graph.
       Useful for testing re-using a call node.

        x    y
      /  \  /
      |  add
       \  |  \
         add |
          | /
         add
    """
    x = wildcard()
    y = wildcard()
    add_node = is_op('add')(x, y)
    add_node_1 = is_op('add')(x, add_node)
    r = is_op('add')(add_node_1, add_node)
    return r
Exemplo n.º 22
0
def make_bn_relu_pattern():
    r"""Create a pattern to match the following graph.

     batch_norm
         |
    TupleGetItem(0)
         |
       relu
    """
    x = wildcard()
    gamma = wildcard()
    beta = wildcard()
    moving_mean = wildcard()
    moving_var = wildcard()
    bn_node = is_op('nn.batch_norm')(x, gamma, beta, moving_mean, moving_var)
    tuple_get_item_node = TupleGetItemPattern(bn_node, 0)
    r = is_op('nn.relu')(tuple_get_item_node)
    return r
Exemplo n.º 23
0
def make_add_relu_pattern():
    r"""Create a pattern to match the following graph.

        add
         |
       relu
    """
    add_node = wildcard() + wildcard()
    r = is_op('nn.relu')(add_node)
    return r
Exemplo n.º 24
0
def make_qnn_add_pattern():
    from tvm.relay.dataflow_pattern import wildcard, is_op

    lhs = wildcard()
    rhs = wildcard()
    lhs_scale = wildcard()
    lhs_zero_point = wildcard()
    rhs_scale = wildcard()
    rhs_zero_point = wildcard()
    output_scale = wildcard()
    output_zero_point = wildcard()
    qadd = is_op("qnn.add")(
        lhs,
        rhs,
        lhs_scale,
        lhs_zero_point,
        rhs_scale,
        rhs_zero_point,
        output_scale,
        output_zero_point,
    )
    return qadd.optional(is_op("clip"))
Exemplo n.º 25
0
 def get_pattern():
     conv = make_conv_bias_relu_pattern()
     clip = is_op('clip')(conv, wildcard(), wildcard())
     return is_op('multiply')(conv, clip)
Exemplo n.º 26
0
 def __init__(self):
     super().__init__(require_type=True, rewrite_once=True)
     self.reshape = is_op("reshape")(wildcard())
     self.strided_slice = is_op("strided_slice")(wildcard())
     self.pattern = self.reshape | self.strided_slice
Exemplo n.º 27
0
 def __init__(self):
     super().__init__(require_type=True)
     self.split_in = wildcard()
     self.pattern = is_op("split")(self.split_in)
Exemplo n.º 28
0
def strided_slice_pattern():
    """Create pattern for strided_slice"""
    pattern = is_op("strided_slice")(wildcard())
    return pattern
Exemplo n.º 29
0
def reshape_pattern():
    """Create pattern for reshape"""
    pattern = is_op("reshape")(wildcard())
    return pattern
Exemplo n.º 30
0
 def pattern_C():
     x = wildcard()
     out = is_op('abs')(x)
     out = is_op('nn.relu')(out)
     return out