def _create_optimizer(self): """Creates optimizer.""" params = self.params opt = optimizer.LazyAdam(params["learning_rate"], params["optimizer_adam_beta1"], params["optimizer_adam_beta2"], epsilon=params["optimizer_adam_epsilon"]) return opt
def _create_optimizer(self): """Creates optimizer.""" params = self.params opt = optimizer.LazyAdam( params["learning_rate"], params["optimizer_adam_beta1"], params["optimizer_adam_beta2"], epsilon=params["optimizer_adam_epsilon"]) if params["dtype"] == tf.float16: opt = tf.keras.mixed_precision.experimental.LossScaleOptimizer( opt, loss_scale=flags_core.get_loss_scale(self.flags_obj, default_for_fp16="dynamic")) return opt