Exemplo n.º 1
0
 def graph_fn(global_step):
     boundaries = [2, 3, 7]
     rates = [1.0, 2.0, 3.0, 4.0]
     learning_rate = learning_schedules.manual_stepping(
         global_step, boundaries, rates)
     assert learning_rate.op.name.endswith('learning_rate')
     return learning_rate,
Exemplo n.º 2
0
 def graph_fn(global_step):
     boundaries = [4, 6, 8]
     rates = [0.02, 0.10, 0.01, 0.001]
     learning_rate = learning_schedules.manual_stepping(global_step,
                                                        boundaries,
                                                        rates,
                                                        warmup=True)
     assert learning_rate.op.name.endswith('learning_rate')
     return learning_rate,
Exemplo n.º 3
0
def _create_learning_rate(learning_rate_config):
    """Create optimizer learning rate based on config.

  Args:
    learning_rate_config: A LearningRate proto message.

  Returns:
    A learning rate.

  Raises:
    ValueError: when using an unsupported input data type.
  """
    learning_rate = None
    learning_rate_type = learning_rate_config.WhichOneof('learning_rate')
    if learning_rate_type == 'constant_learning_rate':
        config = learning_rate_config.constant_learning_rate
        learning_rate = tf.constant(config.learning_rate,
                                    dtype=tf.float32,
                                    name='learning_rate')

    if learning_rate_type == 'exponential_decay_learning_rate':
        config = learning_rate_config.exponential_decay_learning_rate
        learning_rate = learning_schedules.exponential_decay_with_burnin(
            tf.train.get_or_create_global_step(),
            config.initial_learning_rate,
            config.decay_steps,
            config.decay_factor,
            burnin_learning_rate=config.burnin_learning_rate,
            burnin_steps=config.burnin_steps,
            min_learning_rate=config.min_learning_rate,
            staircase=config.staircase)

    if learning_rate_type == 'manual_step_learning_rate':
        config = learning_rate_config.manual_step_learning_rate
        if not config.schedule:
            raise ValueError('Empty learning rate schedule.')
        learning_rate_step_boundaries = [x.step for x in config.schedule]
        learning_rate_sequence = [config.initial_learning_rate]
        learning_rate_sequence += [x.learning_rate for x in config.schedule]
        learning_rate = learning_schedules.manual_stepping(
            tf.train.get_or_create_global_step(),
            learning_rate_step_boundaries, learning_rate_sequence,
            config.warmup)

    if learning_rate_type == 'cosine_decay_learning_rate':
        config = learning_rate_config.cosine_decay_learning_rate
        learning_rate = learning_schedules.cosine_decay_with_warmup(
            tf.train.get_or_create_global_step(), config.learning_rate_base,
            config.total_steps, config.warmup_learning_rate,
            config.warmup_steps, config.hold_base_rate_steps)

    if learning_rate is None:
        raise ValueError('Learning_rate %s not supported.' %
                         learning_rate_type)

    return learning_rate
Exemplo n.º 4
0
 def graph_fn(global_step):
     boundaries = []
     rates = [0.01]
     learning_rate = learning_schedules.manual_stepping(
         global_step, boundaries, rates)
     return learning_rate,