コード例 #1
0
 def single_bn_pass():
     """
     Sub a BN which does NOT take Conv as inputs to ReLU6.
     """
     conv2d_prim = IsPrimTypeOf("Conv2D")
     conv2d = CallWith(conv2d_prim)
     pattern_0 = IsNot(conv2d)
     pattern = CallWith(P.BatchNorm(), inputs=[pattern_0])
     target = CallWith(P.ReLU6(), inputs=[pattern_0])
     return pattern, target
コード例 #2
0
 def single_bn_pass():
     """
     Sub a BN which does NOT take Conv as inputs to ReLU6.
     """
     conv2d_prim = Prim("Conv2D")
     conv2d = Call(conv2d_prim)
     pattern_0 = NoneOf(conv2d)
     pattern = Call(P.BatchNorm(), [pattern_0])
     target = Call(P.ReLU6(), [pattern_0])
     return pattern, target
コード例 #3
0
 def single_bn_pass():
     """
     Sub a BN which does NOT take MatMul as inputs to ReLU6.
     """
     matmul = Prim("MatMul")
     pattern_0 = NoneOf(matmul)
     softmax = P.Softmax()
     pattern = Call(softmax, [pattern_0])
     relu6 = P.ReLU6()
     target = Call(relu6, [pattern_0])
     return pattern, target
コード例 #4
0
    def softmax_relu_pass():
        x = Any()
        softmax_pattern = Prim(P.Softmax())
        call_softmax = Call(softmax_pattern, [x])
        relu_pattern = Prim(P.ReLU())
        call_relu = Call(relu_pattern, [x])

        pattern = OneOf([call_softmax, call_relu])
        relu6_pattern = Prim(P.ReLU6())
        target = Call(relu6_pattern, [x])
        return pattern, target
コード例 #5
0
ファイル: quant.py プロジェクト: lvleysuper/mindspore
 def __init__(self, num_bits=8, quant_delay=0, symmetric=False,
              narrow_range=False):
     super(ReLU6Quant, self).__init__()
     self.fake_quant_act = nn.FakeQuantWithMinMax(min_init=0,
                                                  max_init=6,
                                                  num_bits=num_bits,
                                                  quant_delay=quant_delay,
                                                  ema=True,
                                                  symmetric=symmetric,
                                                  narrow_range=narrow_range)
     self.relu6 = P.ReLU6()
コード例 #6
0
    def softmax_relu_pass():
        x = AnyPattern()
        softmax_pattern = IsPrimTypeOf(P.Softmax())
        call_softmax = CallWith(softmax_pattern, inputs=[x])
        relu_pattern = IsPrimTypeOf(P.ReLU())
        call_relu = CallWith(relu_pattern, inputs=[x])

        pattern = IsIn([call_softmax, call_relu])
        relu6_pattern = IsPrimTypeOf(P.ReLU6(), should_replace=False)
        target = CallWith(relu6_pattern, inputs=[x])
        return pattern, target
コード例 #7
0
 def single_bn_pass():
     """
     Sub a BN which does NOT take MatMul as inputs to ReLU6.
     """
     matmul = IsPrimTypeOf("MatMul")
     pattern_0 = IsNot(matmul)
     softmax = P.Softmax()
     pattern = CallWith(softmax, inputs=[pattern_0])
     relu6 = P.ReLU6()
     target = CallWith(relu6, inputs=[pattern_0], should_replace=False)
     return pattern, target
コード例 #8
0
ファイル: test_relu6_op.py プロジェクト: zuoshou030/mindspore
 def __init__(self):
     super(NetReLU6, self).__init__()
     self.relu6 = P.ReLU6()
コード例 #9
0
ファイル: test_ops.py プロジェクト: smartxcat/mindspore
     'skip': ['backward']}),
 ('Tanh', {
     'block': P.Tanh(),
     'desc_inputs': [[1, 3, 4, 4]],
     'desc_bprop': [[1, 3, 4, 4]]}),
 ('TanhGrad', {
     'block': G.TanhGrad(),
     'desc_inputs': [[1, 3, 4, 4], [1, 3, 4, 4]],
     'desc_bprop': [[1, 3, 4, 4]],
     'skip': ['backward']}),
 ('ReLU', {
     'block': P.ReLU(),
     'desc_inputs': [[1, 3, 4, 4]],
     'desc_bprop': [[1, 3, 4, 4]]}),
 ('ReLU6', {
     'block': P.ReLU6(),
     'desc_inputs': [[1, 3, 4, 4]],
     'desc_bprop': [[1, 3, 4, 4]]}),
 ('ReLUV2', {
     'block': P.ReLUV2(),
     'desc_inputs': [[1, 3, 4, 4]],
     'desc_bprop': [[1, 3, 4, 4], [1, 3, 4, 4]]}),
 ('ReLUGrad', {
     'block': G.ReluGrad(),
     'desc_inputs': [[1, 3, 4, 4], [1, 3, 4, 4]],
     'skip': ['backward']}),
 ('Elu', {
     'block': P.Elu(),
     'desc_inputs': [[2, 3, 4]],
     'desc_bprop': [[2, 3, 4]]}),
 ('EluGrad', {
コード例 #10
0
        'desc_inputs': [5.0],
        'skip': ['backward']
    }),
    # input is Tensor(Bool)
    ('ReLU1', {
        'block': (P.ReLU(), {
            'exception': TypeError,
            'error_keywords': ['ReLU']
        }),
        'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.bool_))],
        'skip': ['backward']
    }),

    # input is scalar
    ('ReLU60', {
        'block': (P.ReLU6(), {
            'exception': TypeError,
            'error_keywords': ['ReLU6']
        }),
        'desc_inputs': [5.0],
        'skip': ['backward']
    }),
    # input is Tensor(int32)
    ('ReLU61', {
        'block': (P.ReLU6(), {
            'exception': TypeError,
            'error_keywords': ['ReLU6']
        }),
        'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.int32))],
        'skip': ['backward']
    }),
コード例 #11
0
ファイル: test_relu6_op.py プロジェクト: yrpang/mindspore
 def __init__(self):
     super(NetRelu6Dynamic, self).__init__()
     self.test_dynamic = inner.GpuConvertToDynamicShape()
     self.relu6 = P.ReLU6()