def softmax_relu_pass(): x = AnyPattern() softmax_pattern = IsPrimTypeOf(P.Softmax()) pattern = CallWith(softmax_pattern, inputs=[x]) relu_pattern = IsPrimTypeOf(P.ReLU(), should_replace=False) target = CallWith(relu_pattern, inputs=[x]) return pattern, target
def softmax_make_tuple_pass(): x = AnyPattern() softmax = P.Softmax() pattern = CallWith(softmax, inputs=[x]) target = CallWith("make_tuple", inputs=[pattern, new_para], should_replace=False) return pattern, target
def bn_pass(): """ Sub a BN to Softmax. """ bn = P.BatchNorm() pattern = CallWith(bn) softmax = P.Softmax() target = CallWith(softmax, should_replace=False) return pattern, target
def single_bn_pass(): """ Sub a BN which does NOT take Conv as inputs to ReLU6. """ conv2d_prim = IsPrimTypeOf("Conv2D") conv2d = CallWith(conv2d_prim) pattern_0 = IsNot(conv2d) pattern = CallWith(P.BatchNorm(), inputs=[pattern_0]) target = CallWith(P.ReLU6(), inputs=[pattern_0]) return pattern, target
def single_bn_pass(): """ Sub a BN which does NOT take MatMul as inputs to ReLU6. """ matmul = IsPrimTypeOf("MatMul") pattern_0 = IsNot(matmul) softmax = P.Softmax() pattern = CallWith(softmax, inputs=[pattern_0]) relu6 = P.ReLU6() target = CallWith(relu6, inputs=[pattern_0], should_replace=False) return pattern, target
def softmax_neg_pass(): x = AnyPattern() softmax_pattern = IsPrimTypeOf(P.Softmax()) call_softmax = CallWith(softmax_pattern, inputs=[x]) relu_pattern = IsPrimTypeOf(P.ReLU()) call_relu = CallWith(relu_pattern, inputs=[x]) pattern = IsIn([call_softmax, call_relu]) neg_ops = IsPrimTypeOf(P.Neg(), should_replace=False) target = CallWith(neg_ops, inputs=[pattern]) return pattern, target
def softmax_addn_pass(): x = AnyPattern() softmax = P.Softmax() pattern = CallWith(softmax, inputs=[x]) imm = Imm(0) target_0 = CallWith("make_tuple", inputs=[pattern], should_replace=False) target = CallWith("tuple_getitem", inputs=[target_0, imm], should_replace=False) return pattern, target
def softmax_addn_pass(): x = AnyPattern() softmax = P.Softmax() pattern = CallWith(softmax, inputs=[x]) weight_tensor = Tensor(np.zeros([42]), mindspore.float16) new_weight = NewTensor(weight_tensor) addn_ops = P.AddN() target = CallWith(addn_ops, inputs=[x, new_weight], should_replace=False) return pattern, target
def softmax_addn_pass(): x = AnyPattern() softmax = P.Softmax() pattern = CallWith(softmax, inputs=[x]) default_tensor0 = Tensor(np.ones((4, 4)), mindspore.float32) default_tensor1 = Tensor(np.ones((4, 4)), mindspore.float32) new_para_0 = NewParameter("Merlin", default_tensor0) new_para_1 = NewParameter("Arthur", default_tensor1) target_0 = CallWith(P.MatMul(), inputs=[new_para_0, new_para_1], should_replace=False) target = CallWith("make_tuple", inputs=[target_0], should_replace=False) return pattern, target