def __init__(self, mul_weight, strategy1=None, strategy2=None): super().__init__() self.mul = P.Mul().shard(strategy1) self.dropout_do_mask = P.DropoutDoMask().shard(strategy2) self.dropout_gen_mask = P.DropoutGenMask() self.get_shape = P.Shape() self.cast = P.Cast() self.mul_weight = Parameter(mul_weight, "w1") self.keep_prob = Tensor(0.9)
def test_dropout(): dropOutGenMask = P.DropoutGenMask() dropoutDoMask = P.DropoutDoMask() shape = P.Shape() def get_dropout(x, prob): mask = dropOutGenMask(shape(x), prob) y = dropoutDoMask(x, mask, prob) return y return get_dropout
def __init__(self, keep_prob=0.5, seed0=0, seed1=0, dtype=mstype.float32): super(Dropout, self).__init__() if keep_prob <= 0 or keep_prob > 1: raise ValueError("dropout probability should be a number in range (0, 1], but got {}".format(keep_prob)) validator.check_subclass("dtype", dtype, mstype.number_type, self.cls_name) self.keep_prob = Tensor(keep_prob) self.seed0 = seed0 self.seed1 = seed1 self.dtype = dtype self.get_shape = P.Shape() self.dropout_gen_mask = P.DropoutGenMask(Seed0=seed0, Seed1=seed1) self.dropout_do_mask = P.DropoutDoMask() self.cast = P.Cast()
def __init__(self, keep_prob=0.5, dtype=mstype.float32): super(Dropout, self).__init__() if keep_prob <= 0 or keep_prob > 1: raise ValueError("dropout probability should be a number in range (0, 1], but got {}".format(keep_prob)) Validator.check_subclass("dtype", dtype, mstype.number_type, self.cls_name) Validator.check_value_type('keep_prob', keep_prob, [float], self.cls_name) self.keep_prob = keep_prob seed0, seed1 = _get_graph_seed(0, "dropout") self.seed0 = seed0 self.seed1 = seed1 self.dtype = dtype self.get_shape = P.Shape() self.dropout_gen_mask = P.DropoutGenMask(Seed0=self.seed0, Seed1=self.seed1) self.dropout_do_mask = P.DropoutDoMask() self.cast = P.Cast() self.is_gpu = context.get_context('device_target') in ["GPU"] self.dropout = P.Dropout(keep_prob)
'desc_inputs': [0], }), ('Softmax_ValueError_1', { 'block': (lambda _: P.Softmax("1"), {'exception': TypeError}), 'desc_inputs': [0], }), ('Softmax_ValueError_2', { 'block': (lambda _: P.Softmax(1.1), {'exception': TypeError}), 'desc_inputs': [0], }), ('Softmax_ValueError_3', { 'block': (lambda _: P.Softmax(axis="1"), {'exception': TypeError}), 'desc_inputs': [0], }), ('DropoutGenMask_ValueError_1', { 'block': (lambda _: P.DropoutGenMask(Seed0="seed0"), {'exception': TypeError}), 'desc_inputs': [0], }), ('DropoutGenMask_ValueError_2', { 'block': (lambda _: P.DropoutGenMask(Seed0=1.0), {'exception': TypeError}), 'desc_inputs': [0], }), ('DropoutGenMask_ValueError_3', { 'block': (lambda _: P.DropoutGenMask(Seed1="seed1"), {'exception': TypeError}), 'desc_inputs': [0], }), ('DropoutGenMask_ValueError_4', { 'block': (lambda _: P.DropoutGenMask(Seed1=2.0), {'exception': TypeError}), 'desc_inputs': [0], }), ('MaxPool2d_ValueError_1', {
def __init__(self): super(Net, self).__init__() self.mask = P.DropoutGenMask(10, 28) self.shape = P.Shape()
'desc_inputs': [[1152], Tensor(np.array(10).astype(np.int32))], 'desc_bprop': [Tensor(np.array(10).astype(np.float32))]}), ('UnsortedSegmentSum', { 'block': P.UnsortedSegmentSum(), 'desc_const': [1280], 'desc_inputs': [[1280,1024], Tensor(np.ones(1280).astype(np.int32))], 'desc_bprop': [[8192,1024]], 'skip': ['backward']}), ('UnsortedSegmentSum_1', { 'block': P.UnsortedSegmentSum(), 'desc_const': [4], 'desc_inputs': [[3, 2, 1, 3], Tensor(np.array([[0, 1], [0, 1], [0, 1]]).astype(np.int32))], 'desc_bprop': [[4, 1, 3]], 'skip': ['backward']}), ('DropoutGenMask', { 'block': P.DropoutGenMask(), 'desc_const': [(2, 2), Tensor(0.5, mstype.float32)], 'desc_inputs': [], 'desc_bprop': [Tensor(np.ones(1).astype(np.int8))], 'skip': ['backward']}), ('DropoutDoMask', { 'block': P.DropoutDoMask(), 'desc_const': [Tensor(0.5)], 'desc_inputs': [[64, 12, 128, 128], Tensor(np.ones(1572864).astype(np.uint8))], 'desc_bprop': [[64, 12, 128, 128]]}), ('Dropout', { 'block': nn.Dropout(0.5), 'desc_inputs': [[64, 12, 128, 128]], 'desc_bprop': [[64, 12, 128, 128]]}), ('ReduceMean0', { 'block': P.ReduceMean(),
'desc_inputs': [0], }), ('Softmax_ValueError_2', { 'block': (lambda _: P.Softmax(1.1), { 'exception': TypeError }), 'desc_inputs': [0], }), ('Softmax_ValueError_3', { 'block': (lambda _: P.Softmax(axis="1"), { 'exception': TypeError }), 'desc_inputs': [0], }), ('DropoutGenMask_ValueError_1', { 'block': (lambda _: P.DropoutGenMask(Seed0="seed0"), { 'exception': TypeError }), 'desc_inputs': [0], }), ('DropoutGenMask_ValueError_2', { 'block': (lambda _: P.DropoutGenMask(Seed0=1.0), { 'exception': TypeError }), 'desc_inputs': [0], }), ('DropoutGenMask_ValueError_3', { 'block': (lambda _: P.DropoutGenMask(Seed1="seed1"), { 'exception': TypeError }), 'desc_inputs': [0],
def __init__(self, shape): super(DropoutGenMaskNet, self).__init__() self.shape = shape self.dropout_gen_mask = P.DropoutGenMask(Seed0=0, Seed1=0)
backend_reshape = Primitive('Reshape') cast = P.Cast() backend_cast = Primitive('Cast') transpose = P.Transpose() backend_transpose = Primitive('Transpose') onehot1 = P.OneHot() onehot2 = P.OneHot() backend_onehot1 = Primitive('OneHot') backend_onehot2 = Primitive('OneHot') stridedslicegrad = G.StridedSliceGrad() backend_stridedslicegrad = Primitive('StridedSliceGrad') on_value = Tensor(1.0, mstype.float32) off_value = Tensor(0.0, mstype.float32) depth = Tensor(2, mstype.int32) shape = (2, 4, 2, 2) dropout_gen_mask = P.DropoutGenMask() class FnDict: def __init__(self): self.fnDict = {} def __call__(self, fn): self.fnDict[fn.__name__] = fn def __getitem__(self, name): return self.fnDict[name] def test_convert_reshape_input_to_attr(tag): fns = FnDict()