Exemple #1
0
    def __init__(self, params, learning_rate=0.1, momentum=0.0, dampening=0.0, weight_decay=0.0, nesterov=False,
                 loss_scale=1.0):

        super(SGD, self).__init__(learning_rate, params, weight_decay, loss_scale)

        if isinstance(momentum, int):
            momentum = float(momentum)
        if not isinstance(momentum, float):
            raise TypeError("momentum should be float number!")

        if isinstance(momentum, float) and momentum < 0.0:
            raise ValueError("momentum should be at least 0.0, but got momentum {}".format(momentum))

        if isinstance(dampening, int):
            dampening = float(dampening)
        if not isinstance(dampening, float):
            raise TypeError("dampening should be float number")

        if dampening < 0.0:
            raise ValueError("dampening should be at least 0.0, but got dampening {}".format(dampening))
        self.dampening = dampening

        if isinstance(weight_decay, int):
            weight_decay = float(weight_decay)

        validator.check_value_type("nesterov", nesterov, [bool], self.cls_name)
        self.nesterov = nesterov

        self.opt = P.SGD(dampening, weight_decay, nesterov)

        self.momentum = Parameter(Tensor(momentum, mstype.float32), name="momentum")
        self.accum = self.parameters.clone(prefix="accum", init='zeros')
        self.stat = self.parameters.clone(prefix="stat", init='ones')
        self.hyper_map = C.HyperMap()
Exemple #2
0
    def __init__(self,
                 params,
                 learning_rate=0.1,
                 momentum=0.0,
                 dampening=0.0,
                 weight_decay=0.0,
                 nesterov=False,
                 loss_scale=1.0):

        super(SGD, self).__init__(learning_rate, params)

        if isinstance(momentum, float) and momentum < 0.0:
            raise ValueError(
                "momentum should be at least 0.0, but got momentum {}".format(
                    momentum))

        if dampening < 0.0:
            raise ValueError(
                "dampening should be at least 0.0, but got dampening {}".
                format(dampening))
        self.dampening = dampening

        if weight_decay < 0.0:
            raise ValueError(
                "weight_decay should be at least 0.0, but got weight_decay {}".
                format(weight_decay))
        self.weight_decay = weight_decay

        validator.check_type("nesterov", nesterov, [bool])
        self.nesterov = nesterov

        self.opt = P.SGD(dampening, weight_decay, nesterov)

        self.dynamic_lr = False
        self.gather = None
        self.global_step = None
        self.axis = None
        if not isinstance(learning_rate, float):
            self.dynamic_lr = True
            self.gather = P.GatherV2()
            self.assignadd = P.AssignAdd()
            self.global_step = Parameter(initializer(0, [1], mstype.int32),
                                         name="global_step")
            self.axis = 0
        self.momentum = Parameter(momentum, name="momentum")
        self.params = self.parameters
        self.accum = self.params.clone(prefix="accum", init='zeros')
        self.stat = self.params.clone(prefix="stat", init='ones')
        self.hyper_map = C.HyperMap()

        self.weight_decay = weight_decay * loss_scale
        self.reciprocal_scale = 1.0 / loss_scale
Exemple #3
0
 def __init__(self, var):
     super().__init__()
     self.sgd = P.SGD()
     self.var = Parameter(var, name="var")
     self.mul = P.Mul()
Exemple #4
0
 def __init__(self, var):
     super(SGDNet, self).__init__()
     self.sgd = P.SGD()
     self.var = Parameter(var, name="var")