Пример #1
0
def test_softmax_relu_sigmoid():
    """
    Use python pass to transform from Softmax(x) to ReLU(Sigmoid(x)).

    NOTE:
        Sigmoid pattern only exists in the target.
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(run_only_once=True)
    def softmax_relu_pass():
        x = Any()
        softmax_pattern = Prim(P.Softmax())
        pattern = Call(softmax_pattern, [x])
        sigmoid_pattern = Prim(P.Sigmoid())
        call_sigmoid = Call(sigmoid_pattern, [x])
        relu_pattern = Prim(P.ReLU())
        target = Call(relu_pattern, [call_sigmoid])
        return pattern, target

    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(3)
    unregiste_pass(softmax_relu_pass)
    assert "ReLU" in transformed_repr
    assert "Sigmoid" in transformed_repr
    assert "Softmax" not in transformed_repr
Пример #2
0
def test_newparameter_pattern():
    """
    Test NewParameter pattern in the target
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    set_renorm(False)
    set_reopt(False)
    @registe_pass(requires_grad=False, run_only_once=True)
    def softmax_addn_pass():
        x = Any()
        pattern = Call(P.Softmax(), [x])

        default_tensor0 = Tensor(np.ones((4, 4)), mindspore.float32)
        default_tensor1 = Tensor(np.ones((4, 4)), mindspore.float32)
        new_para_0 = NewParameter("Merlin", default_tensor0)
        new_para_1 = NewParameter("Arthur", default_tensor1)
        target_0 = Call(P.MatMul(), [new_para_0, new_para_1])
        target = Call("make_tuple", [target_0])
        return pattern, target
    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
    unregiste_pass(softmax_addn_pass)
    assert "MatMul" in transformed_repr
    assert "make_tuple" in transformed_repr
    assert "Softmax" not in transformed_repr
Пример #3
0
def test_gen_new_parameter():
    """
    Test gen_new_parameter
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    default_tensor = Tensor(np.ones((4, 4)), mindspore.float32)
    new_para = NewParameter("Merlin", default_tensor)
    set_renorm(False)
    set_reopt(False)
    gen_new_parameter(new_para)
    @registe_pass(requires_grad=False, run_only_once=True)
    def softmax_make_tuple_pass():
        x = Any()
        softmax = P.Softmax()
        pattern = Call(softmax, [x])

        target = Call("make_tuple", [pattern, new_para])
        return pattern, target
    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
    assert "Merlin" in transformed_repr
    unregiste_pass(softmax_make_tuple_pass)
    cancel_new_parameter(new_para)
    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
    assert "Merlin" not in transformed_repr
Пример #4
0
def test_imm_target():
    """
    Test NewParameter pattern in the target
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    set_renorm(False)
    set_reopt(False)

    @registe_pass(run_only_once=True)
    def softmax_pass():
        x = Any()
        pattern = Call(P.Softmax(), [x])
        imm = Imm(0)
        target_0 = Call("make_tuple", [pattern])
        target = Call("tuple_getitem", [target_0, imm])
        return pattern, target

    transformed_repr = get_func_graph(softmax_model,
                                      inputs).get_return().expanded_str(5)
    unregiste_pass(softmax_pass)
    assert "make_tuple" in transformed_repr
    assert "tuple_getitem" in transformed_repr
    assert "Softmax" in transformed_repr
Пример #5
0
def test_isnot_pattern_1():
    """
    Test IsNot pattern which expresses the IsNot semantics.
    Case: IsNot pattern matches with the graph
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(run_only_once=True)
    def single_bn_pass():
        """
        Sub a BN which does NOT take MatMul as inputs to ReLU6.
        """
        matmul = Prim("MatMul")
        pattern_0 = NoneOf(matmul)
        softmax = P.Softmax()
        pattern = Call(softmax, [pattern_0])
        relu6 = P.ReLU6()
        target = Call(relu6, [pattern_0])
        return pattern, target

    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
    unregiste_pass(single_bn_pass)
    assert "ReLU6" in transformed_repr
    assert "Softmax" not in transformed_repr
Пример #6
0
def test_newtensor_pattern():
    """
    Test NewTensor pattern in the target
    """
    set_renorm(False)
    set_reopt(False)
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(run_only_once=True)
    def softmax_addn_pass():
        x = Any()
        pattern = Call(P.Softmax(), [x])

        weight_tensor = Tensor(np.zeros([42]), mindspore.float16)
        new_weight = NewTensor(weight_tensor)
        target = Call(P.AddN(), [x, new_weight])
        return pattern, target

    transformed_repr = get_func_graph(softmax_model,
                                      inputs).get_return().expanded_str(2)
    unregiste_pass(softmax_addn_pass)
    assert "AddN" in transformed_repr
    assert "Softmax" not in transformed_repr
    set_renorm(True)
Пример #7
0
def test_isin_pattern_1():
    """
    Test IsIn. IsIn is used as nested inputs for the target in this case.
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(run_only_once=True)
    def softmax_neg_pass():
        x = Any()
        softmax_pattern = Prim(P.Softmax())
        call_softmax = Call(softmax_pattern, [x])
        relu_pattern = Prim(P.ReLU())
        call_relu = Call(relu_pattern, [x])

        pattern = OneOf([call_softmax, call_relu])
        neg_ops = Prim(P.Neg())
        target = Call(neg_ops, [pattern])
        return pattern, target

    transformed_repr = get_func_graph(softmax_model,
                                      inputs).get_return().expanded_str(4)
    unregiste_pass(softmax_neg_pass)
    assert "Neg" in transformed_repr
    assert "Softmax" in transformed_repr
Пример #8
0
def test_isin_pattern_0():
    """
    Test IsIn pattern which expresses the IsIn/OneOf semantics.
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(run_only_once=True)
    def softmax_relu_pass():
        x = Any()
        softmax_pattern = Prim(P.Softmax())
        call_softmax = Call(softmax_pattern, [x])
        relu_pattern = Prim(P.ReLU())
        call_relu = Call(relu_pattern, [x])

        pattern = OneOf([call_softmax, call_relu])
        relu6_pattern = Prim(P.ReLU6())
        target = Call(relu6_pattern, [x])
        return pattern, target

    transformed_repr = get_func_graph(softmax_model,
                                      inputs).get_return().expanded_str(2)
    unregiste_pass(softmax_relu_pass)
    assert "ReLU6" in transformed_repr
    assert "Softmax" not in transformed_repr
Пример #9
0
def test_imm_pattern():
    """
    Test NewParameter pattern in the target
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(run_only_once=True)
    def softmax_addn_pass():
        x = AnyPattern()
        softmax = P.Softmax()
        pattern = CallWith(softmax, inputs=[x])
        imm = Imm(0)
        target_0 = CallWith("make_tuple",
                            inputs=[pattern],
                            should_replace=False)
        target = CallWith("tuple_getitem",
                          inputs=[target_0, imm],
                          should_replace=False)
        return pattern, target

    transformed_repr = get_func_graph(softmax_model,
                                      inputs).get_return().expanded_str(5)
    print(transformed_repr)
    unregiste_pass(softmax_addn_pass)
    assert "make_tuple" in transformed_repr
    assert "tuple_getitem" in transformed_repr
    assert "Softmax" in transformed_repr
Пример #10
0
def test_isnot_pattern_0():
    """
    Test IsNot pattern which expresses the IsNot semantics.
    Case: IsNot pass failed to match
    """
    set_renorm(False)

    class ConvBN(nn.Cell):
        def __init__(self):
            super(ConvBN, self).__init__()
            self.conv = P.Conv2D(32, 3)
            self.conv_weight = Tensor(np.ones([32, 32, 3, 3]),
                                      mindspore.float32)
            self.scale = Tensor(np.ones([32]), mindspore.float32)
            self.bias = Tensor(np.ones([32]), mindspore.float32)
            self.mean = Tensor(np.ones([32]), mindspore.float32)
            self.variance = Tensor(np.ones([32]), mindspore.float32)
            self.bn = P.BatchNorm()

        def construct(self, x):
            x = self.conv(x, self.conv_weight)
            x = self.bn(x, self.scale, self.bias, self.mean, self.variance)
            return x

    inputs = Tensor(np.random.normal(0, 1, (10, 32, 32, 32)),
                    mindspore.float32)
    conv_bn_model = ConvBN()

    @registe_pass(run_only_once=True)
    def single_bn_pass():
        """
        Sub a BN which does NOT take Conv as inputs to ReLU6.
        """
        conv2d_prim = IsPrimTypeOf("Conv2D")
        conv2d = CallWith(conv2d_prim)
        pattern_0 = IsNot(conv2d)
        pattern = CallWith(P.BatchNorm(), inputs=[pattern_0])
        target = CallWith(P.ReLU6(), inputs=[pattern_0])
        return pattern, target

    @registe_pass(run_only_once=True)
    def bn_pass():
        """
        Sub a BN to Softmax.
        """
        bn = P.BatchNorm()
        pattern = CallWith(bn)
        softmax = P.Softmax()
        target = CallWith(softmax, should_replace=False)
        return pattern, target

    transformed_repr = get_func_graph(conv_bn_model,
                                      inputs).get_return().expanded_str(5)
    unregiste_pass(single_bn_pass)
    unregiste_pass(bn_pass)
    assert "ReLU6" not in transformed_repr
    assert "Softmax" in transformed_repr
    set_renorm(True)
Пример #11
0
def test_prim():
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(run_only_once=True)
    def softmax_relu_pass():
        x = Any()
        sigmoid_softmax_pattern = Prim([P.Sigmoid(), P.Softmax()])
        pattern = Call(sigmoid_softmax_pattern, [x])
        target = Call(P.ReLU(), [x])
        return pattern, target

    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(3)
    unregiste_pass(softmax_relu_pass)
    assert "ReLU" in transformed_repr
    assert "Softmax" not in transformed_repr
Пример #12
0
def test_softmax_relu():
    """
    Use python pass to transform from Softmax to ReLU.
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(run_only_once=True)
    def softmax_relu_pass():
        x = Any()
        pattern = Call(P.Softmax(), [x])
        target = Call(P.ReLU(), [x])
        return pattern, target

    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(2)
    unregiste_pass(softmax_relu_pass)
    assert "ReLU" in transformed_repr
    assert "Softmax" not in transformed_repr