def lr_decay_fn(learning_rate, global_step): """Learning rate decay function. Args: learning_rate (float or Tensor): The original learning rate. global_step (int or scalar int Tensor): optimization step counter. Returns: scalar float Tensor: decayed learning rate. """ offset_global_step = tf.maximum( tf.minimum(tf.cast(global_step, tf.int32), end_step) - start_step, 0) if decay_fn == tf.train.piecewise_constant: decayed_lr = decay_fn(x=offset_global_step, **fn_kwargs) else: fn_kwargs_ = { "learning_rate": learning_rate, "global_step": offset_global_step } fn_kwargs_.update(fn_kwargs) decayed_lr = utils.call_function_with_redundant_kwargs( decay_fn, fn_kwargs_) decayed_lr = tf.maximum(decayed_lr, hparams["min_learning_rate"]) return decayed_lr
def _connect(self, encoder_results, features, labels, mode): """Transforms encoder final state into decoder initial state. """ enc_state = encoder_results["final_state"] possible_kwargs = { "inputs": enc_state, "batch_size": get_batch_size(enc_state) } outputs = utils.call_function_with_redundant_kwargs( self._connector._build, possible_kwargs) return outputs