def softmax_make_tuple_pass(): x = Any() softmax = P.Softmax() pattern = Call(softmax, [x]) target = Call("make_tuple", [pattern, new_para]) return pattern, target
def bn_pass(): """ Sub a BN to Softmax. """ pattern = Call(P.BatchNorm()) target = Call(P.Softmax()) return pattern, target
def softmax_pass(): x = Any() pattern = Call(P.Softmax(), [x]) imm = Imm(0) target_0 = Call("make_tuple", [pattern]) target = Call(Constants.kTupleGetItem, [target_0, imm]) return pattern, target
def softmax_pass(): x = Any() pattern = Call(P.Softmax(), [x]) imm = Imm(0) target_0 = Call("make_tuple", [pattern]) target = Call("tuple_getitem", [target_0, imm]) return pattern, target
def softmax_addn_pass(): x = Any() pattern = Call(P.Softmax(), [x]) weight_tensor = Tensor(np.zeros([42]), mindspore.float16) new_weight = NewTensor(weight_tensor) target = Call(P.AddN(), [x, new_weight]) return pattern, target
def softmax_relu_pass(): x = Any() softmax_pattern = Prim(P.Softmax()) pattern = Call(softmax_pattern, [x]) sigmoid_pattern = Prim(P.Sigmoid()) call_sigmoid = Call(sigmoid_pattern, [x]) relu_pattern = Prim(P.ReLU()) target = Call(relu_pattern, [call_sigmoid]) return pattern, target
def single_bn_pass(): """ Sub a BN which does NOT take Conv as inputs to ReLU6. """ conv2d_prim = Prim("Conv2D") conv2d = Call(conv2d_prim) pattern_0 = NoneOf(conv2d) pattern = Call(P.BatchNorm(), [pattern_0]) target = Call(P.ReLU6(), [pattern_0]) return pattern, target
def softmax_addn_pass(): x = Any() pattern = Call(P.Softmax(), [x]) default_tensor0 = Tensor(np.ones((4, 4)), mindspore.float32) default_tensor1 = Tensor(np.ones((4, 4)), mindspore.float32) new_para_0 = NewParameter("Merlin", default_tensor0) new_para_1 = NewParameter("Arthur", default_tensor1) target_0 = Call(P.MatMul(), [new_para_0, new_para_1]) target = Call("make_tuple", [target_0]) return pattern, target
def single_bn_pass(): """ Sub a BN which does NOT take MatMul as inputs to ReLU6. """ matmul = Prim("MatMul") pattern_0 = NoneOf(matmul) softmax = P.Softmax() pattern = Call(softmax, [pattern_0]) relu6 = P.ReLU6() target = Call(relu6, [pattern_0]) return pattern, target
def softmax_neg_pass(): x = Any() softmax_pattern = Prim(P.Softmax()) call_softmax = Call(softmax_pattern, [x]) relu_pattern = Prim(P.ReLU()) call_relu = Call(relu_pattern, [x]) pattern = OneOf([call_softmax, call_relu]) neg_ops = Prim(P.Neg()) target = Call(neg_ops, [pattern]) return pattern, target
def softmax_relu_pass(): x = Any() softmax_pattern = Prim(P.Softmax()) call_softmax = Call(softmax_pattern, [x]) relu_pattern = Prim(P.ReLU()) call_relu = Call(relu_pattern, [x]) pattern = OneOf([call_softmax, call_relu]) relu6_pattern = Prim(P.ReLU6()) target = Call(relu6_pattern, [x]) return pattern, target
def softmax_relu_pass(): x = Any() sigmoid_softmax_pattern = Prim([P.Sigmoid(), P.Softmax()]) pattern = Call(sigmoid_softmax_pattern, [x]) target = Call(P.ReLU(), [x]) return pattern, target
def softmax_relu_pass(): x = Any() pattern = Call(P.Softmax(), [x]) target = Call(P.ReLU(), [x]) return pattern, target