コード例 #1
0
 def __init__(self,
              learning_rate: Union[float, Callable, Dict] = 1e-3,
              beta_1: Union[float, Callable] = 0.9,
              beta_2: Union[float, Callable] = 0.999,
              beta_3: Union[float, Callable] = 0.9,
              num_epochs: int = 1000,
              steps_per_epoch: int = 100,
              epsilon: float = 1e-8,
              use_softplus: bool = False,
              beta_softplus: float = 50.0,
              eps_clipping: float = 1e-3,
              threshold_clipping: float = 1e-2,
              weight_decay: float = 1e-4,
              beta_lookahead: float = 0.5,
              lookahead_every_nth_iter=5.0,
              nb_warmup_iterations=None,
              nb_warmdown_iterations=None,
              centralize_gradients: bool = True,
              normalize_gradients: bool = True,
              name='Ranger21',
              **kwargs):
     super(Ranger21, self).__init__(name, **kwargs)
     self.num_epochs = num_epochs
     self.steps_per_epoch = steps_per_epoch
     self.nb_iterations = float(num_epochs * steps_per_epoch)
     self.use_softplus = use_softplus
     self.centralize_gradients = centralize_gradients
     self.normalize_gradients = normalize_gradients
     if isinstance(learning_rate, Dict):
         learning_rate = deserialize(learning_rate)
     self._set_hyper('learning_rate', kwargs.get('lr', learning_rate))
     self._set_hyper('beta_1', beta_1)
     self._set_hyper('beta_2', beta_2)
     self._set_hyper('beta_3', beta_3)
     self.epsilon = epsilon or backend_config.epsilon()
     self._set_hyper('nb_iterations', self.nb_iterations)
     self._set_hyper(
         'nb_warmup_iterations',
         float(0.22 * self.nb_iterations)
         if nb_warmup_iterations is None else nb_warmup_iterations)
     self._set_hyper(
         'nb_warmdown_iterations',
         float(0.28 * self.nb_iterations)
         if nb_warmdown_iterations is None else nb_warmdown_iterations)
     self._set_hyper('beta_softplus', beta_softplus)
     self._set_hyper('eps_clipping', eps_clipping)
     self._set_hyper('threshold_clipping', threshold_clipping)
     self._set_hyper('weight_decay', weight_decay)
     self._set_hyper('beta_lookahead', beta_lookahead)
     self._set_hyper('lookahead_every_nth_iter', lookahead_every_nth_iter)
コード例 #2
0
ファイル: optimizer_v2.py プロジェクト: fraudies/tensorflow
  def variables(self):
    """A list of variables which encode the current state of `Optimizer`.

    Includes slot variables and additional global variables created by the
    optimizer in the current default graph.

    Returns:
      A list of variables.
    """
    if "lr" in config:
      config["learning_rate"] = config.pop("lr")
    if "learning_rate" in config:
      if isinstance(config["learning_rate"], dict):
        config["learning_rate"] = learning_rate_schedule.deserialize(
            config["learning_rate"])
    return cls(**config)
コード例 #3
0
ファイル: optimizer_v2.py プロジェクト: z-zroud/tensorflow
  def from_config(cls, config, custom_objects=None):
    """Creates an optimizer from its config.

    This method is the reverse of `get_config`,
    capable of instantiating the same optimizer from the config
    dictionary.

    Arguments:
        config: A Python dictionary, typically the output of get_config.
        custom_objects: A Python dictionary mapping names to additional Python
          objects used to create this optimizer, such as a function used for a
          hyperparameter.

    Returns:
        An optimizer instance.
    """
    if "lr" in config:
      config["learning_rate"] = config.pop("lr")
    if "learning_rate" in config:
      if isinstance(config["learning_rate"], dict):
        config["learning_rate"] = learning_rate_schedule.deserialize(
            config["learning_rate"])
    return cls(**config)
コード例 #4
0
def _maybe_serialized(lr_decay, serialize_and_deserialize):
    if serialize_and_deserialize:
        serialized = learning_rate_schedule.serialize(lr_decay)
        return learning_rate_schedule.deserialize(serialized)
    else:
        return lr_decay
コード例 #5
0
def _maybe_serialized(lr_decay, serialize_and_deserialize):
  if serialize_and_deserialize:
    serialized = learning_rate_schedule.serialize(lr_decay)
    return learning_rate_schedule.deserialize(serialized)
  else:
    return lr_decay