コード例 #1
0
    def __init__(self,
                 learning_rate,
                 parameters,
                 weight_decay=0.0,
                 loss_scale=1.0,
                 decay_filter=lambda x: 'beta' not in x.name and 'gamma' not in
                 x.name):
        super(Optimizer, self).__init__()
        if isinstance(learning_rate, float):
            self.dynamic_lr = False
            self.gather = None
            self.assignadd = None
            self.global_step = None
            validator.check_number_range("learning rate", learning_rate, 0.0,
                                         float("inf"), Rel.INC_LEFT)
        else:
            self.dynamic_lr = True
            self.gather = P.GatherV2()
            self.assignadd = P.AssignAdd()
            self.global_step = Parameter(initializer(0, [1], mindspore.int32),
                                         name='global_step')
            if isinstance(learning_rate, Iterable):
                learning_rate = Tensor(
                    np.array(list(learning_rate)).astype(np.float32))
            elif isinstance(learning_rate, Tensor):
                if learning_rate.dim() > 1:
                    raise ValueError(
                        "Learning rate should be a 0 or 1 dim `Tensor`,"
                        f"but got {learning_rate.dim()}.")
                if learning_rate.dim() == 1 and learning_rate.size() < 2:
                    logger.warning(
                        "If want to use the dynamic learning rate, please make sure that the number "
                        "of elements in the list, tuple or tensor passed is greater than 1."
                    )
            else:
                raise TypeError(
                    "Learning rate should be float, Tensor or Iterable.")

        if loss_scale <= 0.0:
            raise ValueError(
                "Loss scale should be greater than 0, but got {}".format(
                    loss_scale))
        if weight_decay < 0.0:
            raise ValueError(
                "Weight decay should be equal or greater than 0, but got {}".
                format(weight_decay))

        self.learning_rate = Parameter(learning_rate, name="learning_rate")
        self.parameters = ParameterTuple(parameters)
        self.reciprocal_scale = 1.0 / loss_scale
        self.weight_decay = weight_decay * loss_scale
        self.decay_flags = tuple(decay_filter(x) for x in self.parameters)

        if not self.parameters:
            raise ValueError("optimizer got an empty parameter list.")
コード例 #2
0
    def __init__(self, learning_rate, parameters):
        super(Optimizer, self).__init__()
        if isinstance(learning_rate, float):
            validator.check_number_range("learning rate", learning_rate, 0.0, float("inf"), Rel.INC_LEFT)
        elif isinstance(learning_rate, Iterable):
            learning_rate = Tensor(np.array(list(learning_rate)).astype(np.float32))
        elif isinstance(learning_rate, Tensor):
            if learning_rate.dim() > 1:
                raise ValueError("Learning rate should be a 0 or 1 dim `Tensor`,"
                                 f"but got {learning_rate.dim()}.")
        else:
            raise TypeError("Learning rate should be float, Tensor or Iterable.")

        if isinstance(learning_rate, Tensor) and learning_rate.dim() == 1 and learning_rate.size() < 2:
            logger.warning("If want to use the dynamic learning rate, please make sure that "
                           "the number of elements in the list, tuple or tensor passed is greater than 1.")
        self.learning_rate = Parameter(learning_rate, name="learning_rate")
        self.parameters = ParameterTuple(parameters)
        if not self.parameters:
            raise ValueError("optimizer got an empty parameter list.")