Beispiel #1
0
    def test_gradients_clip_option(self):
        gradients_clip_option = optimizer_lib.GradientsClipOption(clipnorm=1,
                                                                  clipvalue=1)
        self.assertEqual(gradients_clip_option.clipnorm, 1)
        self.assertEqual(gradients_clip_option.clipvalue, 1)
        with self.assertRaisesRegex(ValueError, "At most one of.*"):
            _ = optimizer_lib.GradientsClipOption(clipnorm=1,
                                                  global_clipnorm=1)

        with self.assertRaisesRegex(ValueError,
                                    ".*should be a positive number.*"):
            _ = optimizer_lib.GradientsClipOption(clipnorm=-1)
Beispiel #2
0
    def testSaveAndLoadOptimizerWithModel(self, optimizer_fn):
        model = keras.Sequential(
            [keras.layers.Input(shape=(1, )),
             keras.layers.Dense(1)])
        optimizer = optimizer_fn()
        gradients_clip_option = optimizer_lib.GradientsClipOption(clipnorm=0.1)
        optimizer._gradients_clip_option = gradients_clip_option
        x = tf.expand_dims(tf.convert_to_tensor([1, 1, 1, 0, 0, 0]), axis=1)
        y = tf.expand_dims(tf.convert_to_tensor([1, 1, 1, 0, 0, 0]), axis=1)
        model.compile(loss="mse", optimizer=optimizer)
        model.fit(x, y)

        # Save in h5 format.
        path = os.path.join(self.get_temp_dir(), "model.h5")
        model.save(path)
        loaded_model = keras.models.load_model(path)
        loaded_model.load_weights(path)
        loaded_optimizer = loaded_model.optimizer
        self.assertEqual(type(optimizer), type(loaded_optimizer))
        self.assertEqual(loaded_optimizer.learning_rate, 0.002)
        self.assertEqual(loaded_optimizer._gradients_clip_option.clipnorm, 0.1)

        # Save in Keras SavedModel format.
        model.fit(x, y)
        path = os.path.join(self.get_temp_dir(), "model")
        model.save(path)
        loaded_model = keras.models.load_model(path)
        loaded_model.load_weights(path)
        loaded_optimizer = loaded_model.optimizer
        self.assertEqual(type(optimizer), type(loaded_optimizer))
        self.assertEqual(loaded_optimizer.learning_rate, 0.002)
        self.assertEqual(loaded_optimizer._gradients_clip_option.clipnorm, 0.1)
Beispiel #3
0
    def _process_kwargs(self, kwargs):
        legacy_gradients_clip_kwargs = {
            "clipnorm", "clipvalue", "global_clipnorm"
        }
        other_legacy_kwargs = {
            "lr", "decay", "gradient_transformers", "gradient_aggregator"
        }
        for k in kwargs:
            if k in legacy_gradients_clip_kwargs:
                logging.warning(
                    "%s is deprecated in `optimizer_experimental.Optimizer`"
                    ", please use `GradientsClipOption` instead to specify "
                    " your gradients clipping logic.", k)
            elif k in other_legacy_kwargs:
                logging.warning(
                    "%s is deprecated in `optimizer_experimental.Optimizer`"
                    ", please check the docstring for valid arguments.", k)
            else:
                raise TypeError(
                    f"{k} is not a valid argument, kwargs should be empty "
                    " for `optimizer_experimental.Optimizer`.")

        # TODO(b/208301504): gradients clipping options are populated for
        # backward compatibility, we should delete it when the migration is done.
        clipnorm = kwargs.pop("clipnorm", None)
        clipvalue = kwargs.pop("clipvalue", None)
        global_clipnorm = kwargs.pop("global_clipnorm", None)
        if self._gradients_clip_option is None and (clipnorm or clipvalue
                                                    or global_clipnorm):
            self._gradients_clip_option = optimizer_lib.GradientsClipOption(
                clipnorm, clipvalue, global_clipnorm)
Beispiel #4
0
 def testGetAndFromConfig(self):
     gradients_clip_option = optimizer_lib.GradientsClipOption(clipnorm=0.5)
     ema_option = optimizer_lib.EMAOption(use_ema=True,
                                          ema_momentum=0.5,
                                          ema_overwrite_frequency=50)
     optimizer = adam_new.Adam(learning_rate=np.float64(0.05),
                               beta_1=0.7,
                               beta_2=0.77,
                               amsgrad=True,
                               epsilon=0.001,
                               gradients_clip_option=gradients_clip_option,
                               ema_option=ema_option)
     config = optimizer.get_config()
     self.assertDictEqual(
         config, {
             "learning_rate": np.float32(0.05),
             "beta_1": 0.7,
             "beta_2": 0.77,
             "epsilon": 0.001,
             "amsgrad": True,
             "gradients_clip_option": {
                 "clipnorm": 0.5,
                 "global_clipnorm": None,
                 "clipvalue": None,
             },
             "ema_option": {
                 "use_ema": True,
                 "ema_momentum": 0.5,
                 "ema_overwrite_frequency": 50,
             }
         })
     restored_optimizer = adam_new.Adam.from_config(config)
     self.assertDictEqual(restored_optimizer.get_config(),
                          optimizer.get_config())
Beispiel #5
0
 def testClipGlobalNorm(self):
   gradients_clip_option = optimizer_lib.GradientsClipOption(global_clipnorm=1)
   optimizer = adam_new.Adam(gradients_clip_option=gradients_clip_option)
   grad = [
       tf.cast([100.0, 100.0], dtype=tf.float32),
       tf.cast([100.0, 100.0], dtype=tf.float32)
   ]
   clipped_grad = optimizer._clip_gradients(grad)
   self.assertAllClose(clipped_grad[0], [0.5, 0.5])
Beispiel #6
0
  def from_config(cls, config):
    """Creates an optimizer from its config.

    This method is the reverse of `get_config`, capable of instantiating the
    same optimizer from the config dictionary.

    Args:
        config: A Python dictionary, typically the output of get_config.

    Returns:
        An optimizer instance.
    """
    if "learning_rate" in config:
      if isinstance(config["learning_rate"], dict):
        config["learning_rate"] = learning_rate_schedule.deserialize(
            config["learning_rate"])
    if "gradients_clip_option" in config:
      config["gradients_clip_option"] = optimizer_lib.GradientsClipOption(
          **config["gradients_clip_option"])
    if "ema_option" in config:
      config["ema_option"] = optimizer_lib.EMAOption(**config["ema_option"])
    return cls(**config)
Beispiel #7
0
 def test_get_and_from_config(self):
     gradients_clip_option = optimizer_lib.GradientsClipOption(clipnorm=1,
                                                               clipvalue=1)
     config = gradients_clip_option.get_config()
     restored = optimizer_lib.GradientsClipOption(**config)
     self.assertDictEqual(restored.get_config(), config)
Beispiel #8
0
 def testClipValue(self):
     gradients_clip_option = optimizer_lib.GradientsClipOption(clipvalue=1)
     optimizer = adam_new.Adam(gradients_clip_option=gradients_clip_option)
     grad = [tf.convert_to_tensor([100.0, 100.0])]
     clipped_grad = optimizer._clip_gradients(grad)
     self.assertAllEqual(clipped_grad[0], [1.0, 1.0])
Beispiel #9
0
 def testClipNorm(self):
     gradients_clip_option = optimizer_lib.GradientsClipOption(clipnorm=1)
     optimizer = adam_new.Adam(gradients_clip_option=gradients_clip_option)
     grad = [tf.convert_to_tensor([100.0, 100.0])]
     clipped_grad = optimizer._clip_gradients(grad)
     self.assertAllClose(clipped_grad[0], [2**0.5 / 2, 2**0.5 / 2])