Esempio n. 1
0
def test_gen_new_parameter():
    """
    Test gen_new_parameter
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    default_tensor = Tensor(np.ones((4, 4)), mindspore.float32)
    new_para = NewParameter("Merlin", default_tensor)
    _set_renorm(False)
    _set_reopt(False)
    gen_new_parameter(new_para)
    @registe_pass(requires_grad=False, run_only_once=True)
    def softmax_make_tuple_pass():
        x = Any()
        softmax = P.Softmax()
        pattern = Call(softmax, [x])

        target = Call("make_tuple", [pattern, new_para])
        return pattern, target
    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
    assert "Merlin" in transformed_repr
    unregiste_pass(softmax_make_tuple_pass)
    cancel_new_parameter(new_para)
    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
    assert "Merlin" not in transformed_repr
Esempio n. 2
0
def test_newparameter_pattern():
    """
    Test NewParameter pattern in the target
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    _set_renorm(False)
    _set_reopt(False)
    @registe_pass(requires_grad=False, run_only_once=True)
    def softmax_addn_pass():
        x = Any()
        pattern = Call(P.Softmax(), [x])

        default_tensor0 = Tensor(np.ones((4, 4)), mindspore.float32)
        default_tensor1 = Tensor(np.ones((4, 4)), mindspore.float32)
        new_para_0 = NewParameter("Merlin", default_tensor0)
        new_para_1 = NewParameter("Arthur", default_tensor1)
        target_0 = Call(P.MatMul(), [new_para_0, new_para_1])
        target = Call("make_tuple", [target_0])
        return pattern, target
    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
    unregiste_pass(softmax_addn_pass)
    assert "MatMul" in transformed_repr
    assert "make_tuple" in transformed_repr
    assert "Softmax" not in transformed_repr
Esempio n. 3
0
def test_isnot_pattern_0():
    """
    Test IsNot pattern which expresses the IsNot semantics.
    Case: IsNot pass failed to match
    """
    _set_renorm(False)
    _set_reopt(False)
    class ConvBN(nn.Cell):
        def __init__(self):
            super(ConvBN, self).__init__()
            self.conv = P.Conv2D(32, 3)
            self.conv_weight = Tensor(np.ones([32, 32, 3, 3]), mindspore.float32)
            self.scale = Tensor(np.ones([32]), mindspore.float32)
            self.bias = Tensor(np.ones([32]), mindspore.float32)
            self.mean = Tensor(np.ones([32]), mindspore.float32)
            self.variance = Tensor(np.ones([32]), mindspore.float32)
            self.bn = P.BatchNorm()
        def construct(self, x):
            x = self.conv(x, self.conv_weight)
            x = self.bn(x, self.scale, self.bias, self.mean, self.variance)
            return x
    inputs = Tensor(np.random.normal(0, 1, (10, 32, 32, 32)), mindspore.float32)
    conv_bn_model = ConvBN()

    @registe_pass(requires_grad=False, run_only_once=True)
    def single_bn_pass():
        """
        Sub a BN which does NOT take Conv as inputs to ReLU6.
        """
        conv2d_prim = Prim("Conv2D")
        conv2d = Call(conv2d_prim)
        pattern_0 = NoneOf(conv2d)
        pattern = Call(P.BatchNorm(), [pattern_0])
        target = Call(P.ReLU6(), [pattern_0])
        return pattern, target

    @registe_pass(requires_grad=False, run_only_once=True)
    def bn_pass():
        """
        Sub a BN to Softmax.
        """
        pattern = Call(P.BatchNorm())
        target = Call(P.Softmax())
        return pattern, target

    transformed_repr = get_func_graph(conv_bn_model, inputs).get_return().expanded_str(5)
    unregiste_pass(single_bn_pass)
    unregiste_pass(bn_pass)
    assert "ReLU6" not in transformed_repr
    assert "Softmax" in transformed_repr
    _set_renorm(True)
Esempio n. 4
0
def test_imm_target():
    """
    Test NewParameter pattern in the target
    """
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    _set_renorm(False)
    _set_reopt(False)
    @registe_pass(requires_grad=False, run_only_once=True)
    def softmax_pass():
        x = Any()
        pattern = Call(P.Softmax(), [x])
        imm = Imm(0)
        target_0 = Call("make_tuple", [pattern])
        target = Call("tuple_getitem", [target_0, imm])
        return pattern, target
    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
    unregiste_pass(softmax_pass)
    assert "make_tuple" in transformed_repr
    assert "tuple_getitem" in transformed_repr
    assert "Softmax" in transformed_repr
Esempio n. 5
0
def test_newtensor_pattern():
    """
    Test NewTensor pattern in the target
    """
    _set_renorm(False)
    _set_reopt(False)
    inputs = Tensor(np.ones([42]), mindspore.float16)
    softmax_model = nn.Softmax()

    @registe_pass(requires_grad=False, run_only_once=True)
    def softmax_addn_pass():
        x = Any()
        pattern = Call(P.Softmax(), [x])

        weight_tensor = Tensor(np.zeros([42]), mindspore.float16)
        new_weight = NewTensor(weight_tensor)
        target = Call(P.AddN(), [x, new_weight])
        return pattern, target
    transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(2)
    unregiste_pass(softmax_addn_pass)
    assert "AddN" in transformed_repr
    assert "Softmax" not in transformed_repr
    _set_renorm(True)