def check(shape, channels, out_scale):
     x = relay.var("x", shape=shape)
     in_channels = shape[1]
     weight = relay.var("weight")
     y1 = before(x, weight, out_scale, channels)
     y1 = run_opt_pass(y1, transform.InferType())
     y1_folded = run_opt_pass(y1, transform.BackwardFoldScaleAxis())
     assert relay.analysis.alpha_equal(y1, y1_folded)
Exemplo n.º 2
0
 def check(shape, channels, blocking, out_scale):
     x = relay.var("x", shape=shape)
     in_channels = shape[1]
     weight = relay.var("weight")
     y1 = before(x, weight, out_scale, channels, blocking)
     y1 = run_opt_pass(y1, transform.InferType())
     y1_folded = run_opt_pass(y1, transform.BackwardFoldScaleAxis())
     assert tvm.ir.structural_equal(y1, y1_folded)
 def check(shape, channels, fbefore):
     x = relay.var("x", shape=shape)
     in_channels = shape[1]
     weight = relay.var("weight")
     out_bias = relay.var("out_bias", shape=(channels, ))
     out_scale = relay.const(_get_positive_scale((channels, 1, 1)))
     y1 = fbefore(x, weight, out_bias, out_scale, channels)
     y1 = run_opt_pass(y1, transform.InferType())
     y1_folded = run_opt_pass(y1, transform.BackwardFoldScaleAxis())
     assert relay.analysis.alpha_equal(y1_folded, y1)
 def check(shape, channels):
     x = relay.var("x", shape=shape)
     weight = relay.var("weight")
     out_scale = relay.const(-_get_positive_scale((channels, 1, 1)))
     y1 = before(x, weight, out_scale, channels)
     y1 = run_opt_pass(y1, transform.InferType())
     type_dict = {x.name_hint: x.checked_type for x in y1.params}
     weight = relay.var("weight", type_dict["weight"])
     y1_folded = run_opt_pass(y1, transform.BackwardFoldScaleAxis())
     y1_expected = expected(x, weight, out_scale, channels)
     y1_expected = run_opt_pass(y1_expected, transform.InferType())
     assert relay.analysis.alpha_equal(y1_folded, y1_expected)
    def check(data_shape, weight_shape):
        x = relay.var("x", shape=data_shape)
        weight = relay.var("weight", shape=weight_shape)
        out_channels = weight_shape[0]
        in_bias = relay.var("in_bias", shape=(out_channels, ))
        in_scale = relay.const(_get_positive_scale((out_channels, )))
        y1 = before(x, weight, in_bias, in_scale)
        y1 = run_opt_pass(y1, transform.InferType())
        y1_folded = run_opt_pass(y1, transform.BackwardFoldScaleAxis())
        y1_expected = expected(x, weight, in_bias, in_scale)

        y1_folded = run_opt_pass(y1_folded, transform.InferType())
        y1_expected = run_opt_pass(y1_expected, transform.InferType())
        assert tvm.ir.structural_equal(y1_folded, y1_expected)
    def check(shape, channels):
        x = relay.var("x", shape=shape)
        in_channels = shape[1]
        weight = relay.var("weight")
        out_bias = relay.var("out_bias", shape=(channels, ))
        out_scale = relay.const(_get_positive_scale((channels, 1, 1)))

        y1 = before(x, weight, out_bias, out_scale, channels)
        y1 = run_opt_pass(y1, transform.InferType())
        type_dict = {x.name_hint: x.checked_type for x in y1.params}
        weight = relay.var("weight", type_dict["weight"])
        y1_folded = run_opt_pass(y1, transform.BackwardFoldScaleAxis())
        y1_expected = expected(x, weight, out_bias, out_scale, channels)
        y1_expected = run_opt_pass(y1_expected, transform.InferType())
        assert tvm.ir.structural_equal(y1_folded, y1_expected)
 def check(shape, in_channels, channels, blocking, fbefore):
     x = relay.var("x", shape=shape)
     weight = relay.var("weight")
     if blocking:
         out_bias = relay.var("out_bias", shape=(channels // blocking[1], 1, 1, blocking[1]))
         out_scale = relay.const(
             _get_positive_scale((channels // blocking[1], 1, 1, blocking[1]))
         )
     else:
         out_bias = relay.var("out_bias", shape=(channels, 1, 1))
         out_scale = relay.const(_get_positive_scale((channels, 1, 1)))
     y1 = fbefore(x, weight, out_bias, out_scale, in_channels, channels, blocking)
     y1 = run_opt_pass(y1, transform.InferType())
     y1_folded = run_opt_pass(y1, transform.BackwardFoldScaleAxis())
     assert tvm.ir.structural_equal(y1_folded, y1)
Exemplo n.º 8
0
 def check(shape, channels, blocking):
     x = relay.var("x", shape=shape)
     weight = relay.var("weight")
     if blocking:
         out_scale = relay.const(-_get_positive_scale(
             (1, channels // blocking[1], 1, 1, blocking[1])))
     else:
         out_scale = relay.const(-_get_positive_scale((channels, 1, 1)))
     y1 = before(x, weight, out_scale, channels, blocking)
     y1 = run_opt_pass(y1, transform.InferType())
     type_dict = {x.name_hint: x.checked_type for x in y1.params}
     weight = relay.var("weight", type_dict["weight"])
     y1_folded = run_opt_pass(y1, transform.BackwardFoldScaleAxis())
     y1_expected = expected(x, weight, out_scale, channels, blocking)
     y1_expected = run_opt_pass(y1_expected, transform.InferType())
     assert tvm.ir.structural_equal(y1_folded, y1_expected)
Exemplo n.º 9
0
 def match_pass_name(name):
     if name == 'FoldScaleAxis':
         return transform.FoldScaleAxis()
     if name == 'BackwardFoldScaleAxis':
         return transform.BackwardFoldScaleAxis()
     if name == 'ForwardFoldScaleAxis':
         return transform.ForwardFoldScaleAxis()
     if name == 'FuseOps':
         return transform.FuseOps(3)
     if name == 'FoldConstant':
         return transform.FoldConstant()
     if name == 'CombineParallelConv2d':
         return transform.CombineParallelConv2D()
     if name == 'AlterOpLayout':
         return transform.AlterOpLayout()
     if name == 'EliminateCommonSubexpr':
         return transform.EliminateCommonSubexpr()
     if name == 'PartialEvaluate':
         return transform.PartialEvaluate()
     if name == 'CanonicalizeCast':
         return transform.CanonicalizeCast()
     if name == 'CanonicalizeOps':
         return transform.CanonicalizeOps()
     raise Exception('Name {} does not match any pass'.format(name))