Exemple #1
0
    def test_switching(self):
        with self.test_session() as sess:
            # Create 100 phony x, y data points in NumPy, y = x * 0.1 + 0.3
            x_data = np.random.rand(100).astype(np.float32)
            y_data = x_data * 0.1 + 0.3

            # Try to find values for w and b that compute y_data = w * x_data + b
            # (We know that w should be 0.1 and b 0.3, but TensorFlow will
            # figure that out for us.)
            w = tf.Variable(tf.random_uniform([1], -1.0, 1.0))
            b = tf.Variable(tf.zeros([1]))
            y = w * x_data + b

            # Minimize the mean squared errors.
            loss = tf.reduce_mean(tf.square(y - y_data))

            # Set up optimizers.
            step = tf.get_variable("step",
                                   shape=[],
                                   initializer=tf.zeros_initializer(),
                                   trainable=False,
                                   dtype=tf.int32)
            optimizer1 = MockAdamOptimizer(0.05)
            optimizer2 = MockMomentumOptimizer(0.05, 0.5)
            switch = tf.less(step, 100)
            optimizer = composite_optimizer.CompositeOptimizer(
                optimizer1, optimizer2, switch)
            train_op = optimizer.minimize(loss)

            sess.run(tf.global_variables_initializer())

            # Fit the line.:
            for iteration in range(201):
                self.assertEqual(sess.run(switch), iteration < 100)
                sess.run(train_op)
                sess.run(tf.assign_add(step, 1))
                slot_names = optimizer.get_slot_names()
                adam_slots = ["c1-m", "c1-v", "c1-adam_counter"]
                momentum_slots = ["c2-momentum", "c2-momentum_counter"]
                self.assertItemsEqual(slot_names, adam_slots + momentum_slots)
                adam_counter = sess.run(
                    optimizer.get_slot(w, "c1-adam_counter"))
                momentum_counter = sess.run(
                    optimizer.get_slot(w, "c2-momentum_counter"))
                self.assertEqual(adam_counter, min(iteration + 1, 100))
                self.assertEqual(momentum_counter, max(iteration - 99, 0))
                if iteration % 20 == 0:
                    logging.info("%d %s %d %d", iteration,
                                 sess.run([switch, step, w, b]), adam_counter,
                                 momentum_counter)
Exemple #2
0
def _create_optimizer(hyperparams, learning_rate_var, step_var=None):
    """Creates an optimizer object for a given spec, learning rate and step var.

  Args:
    hyperparams: a GridPoint proto containing optimizer spec, particularly
      learning_method to determine optimizer class to use.
    learning_rate_var: a `tf.Tensor`, the learning rate.
    step_var: a `tf.Variable`, global training step.

  Returns:
    a `tf.train.Optimizer` object that was built.
  """
    if hyperparams.learning_method == 'gradient_descent':
        return tf.train.GradientDescentOptimizer(learning_rate_var,
                                                 use_locking=True)
    elif hyperparams.learning_method == 'adam':
        return tf.train.AdamOptimizer(learning_rate_var,
                                      beta1=hyperparams.adam_beta1,
                                      beta2=hyperparams.adam_beta2,
                                      epsilon=hyperparams.adam_eps,
                                      use_locking=True)
    elif hyperparams.learning_method == 'lazyadam':
        return tf.contrib.opt.LazyAdamOptimizer(learning_rate_var,
                                                beta1=hyperparams.adam_beta1,
                                                beta2=hyperparams.adam_beta2,
                                                epsilon=hyperparams.adam_eps,
                                                use_locking=True)
    elif hyperparams.learning_method == 'momentum':
        return tf.train.MomentumOptimizer(learning_rate_var,
                                          hyperparams.momentum,
                                          use_locking=True)
    elif hyperparams.learning_method == 'composite':
        spec = hyperparams.composite_optimizer_spec
        optimizer1 = _create_optimizer(spec.method1, learning_rate_var,
                                       step_var)
        optimizer2 = _create_optimizer(spec.method2, learning_rate_var,
                                       step_var)
        if step_var is None:
            logging.fatal('step_var is required for CompositeOptimizer')
        switch = tf.less(step_var, spec.switch_after_steps)
        return composite_optimizer.CompositeOptimizer(optimizer1,
                                                      optimizer2,
                                                      switch,
                                                      use_locking=True)
    else:
        logging.fatal('Unknown learning method (optimizer)')