Exemple #1
0
    def test_incorrect_inputs(self):
        var1 = tf.Variable([0.1, 0.2, 1.0])
        var2 = tf.Variable([-5.1, 0.1, 0])
        var3 = tf.Variable([-2.1, 1.3, 0 / 3])

        grads1 = tf.constant([0.1, 0.2, 1.0])
        grads2 = tf.constant([0.5, 0.0, -2.0])
        grads3 = tf.constant([-0.2, 0.0, -1.0])

        # Test same variable in two optimizers.
        composite_optimizer = CompositeOptimizer([
            (tf.keras.optimizers.Adam(), lambda: [var1]),
            (tf.keras.optimizers.Adagrad(), lambda: [var1, var2]),
        ])

        grads_and_vars = list(zip([grads1, grads2], [var1, var2]))

        with self.assertRaises(ValueError):
            composite_optimizer.apply_gradients(grads_and_vars)

        # Test missing variable (var3) in optimizers.
        composite_optimizer = CompositeOptimizer([
            (tf.keras.optimizers.Adam(), lambda: [var1]),
            (tf.keras.optimizers.Adagrad(), lambda: [var2]),
        ])

        grads_and_vars = list(zip([grads1, grads2, grads3],
                                  [var1, var2, var3]))

        with self.assertRaises(ValueError):
            composite_optimizer.apply_gradients(grads_and_vars)
Exemple #2
0
    def test_composite_optimizer(self, optimizer1_type, optimizer2_type):
        values1 = [1.0, 2.0, 3.0]
        values2 = [0.5, 0.0, -2.0]
        values3 = [0.1, 0.0, -1.0]

        grad1_values = [0.1, 0.2, 1.0]
        grad2_values = [-0.1, 0.05, 2.0]
        grad3_values = [2.1, 0.0, 0.3]

        var1 = tf.Variable(values1)
        var2 = tf.Variable(values2)
        var3 = tf.Variable(values3)

        grads1 = tf.constant(grad1_values)
        grads2 = tf.constant(grad2_values)
        grads3 = tf.constant(grad3_values)

        comp_optimizer1 = tf.keras.optimizers.get(optimizer1_type)
        comp_optimizer2 = tf.keras.optimizers.get(optimizer2_type)

        composite_optimizer = CompositeOptimizer([
            (comp_optimizer1, lambda: [var1]),
            (comp_optimizer2, lambda: [var2, var3]),
        ])

        self.assertSequenceEqual(composite_optimizer.optimizers,
                                 [comp_optimizer1, comp_optimizer2])

        optimizer1 = tf.keras.optimizers.get(optimizer1_type)
        optimizer2 = tf.keras.optimizers.get(optimizer2_type)

        grads_and_vars_1 = [(tf.constant(grad1_values), tf.Variable(values1))]
        grads_and_vars_2 = [(tf.constant(grad2_values), tf.Variable(values2)),
                            (tf.constant(grad3_values), tf.Variable(values3))]
        grads_and_vars = list(zip([grads1, grads2, grads3],
                                  [var1, var2, var3]))

        for _ in range(10):
            # Test that applying a composite optimizer has the same effect as
            # applying optimizer1 and optimizer2 separately on subset of gradients/
            # variables.
            composite_optimizer.apply_gradients(grads_and_vars)
            optimizer1.apply_gradients(grads_and_vars_1)
            optimizer2.apply_gradients(grads_and_vars_2)

            self.assertAllClose(grads_and_vars[:1], grads_and_vars_1)
            self.assertAllClose(grads_and_vars[1:], grads_and_vars_2)
Exemple #3
0
  def test_checkpoint_save_restore(self):
    # Using a simple Linear model to test checkpoint save/restore.
    model = tf.keras.experimental.LinearModel(units=10)

    composite_optimizer = CompositeOptimizer([
        (tf.keras.optimizers.Adam(), lambda: model.trainable_variables[:1]),
        (tf.keras.optimizers.Adagrad(), lambda: model.trainable_variables[1:]),
    ])

    checkpoint = tf.train.Checkpoint(model=model,
                                     optimizer=composite_optimizer)
    model.compile(optimizer=composite_optimizer,
                  loss=tf.keras.losses.MSE)

    batch_size = 16
    num_of_batches = 8

    x = tf.ones((num_of_batches * batch_size, 5))
    y = tf.zeros((num_of_batches * batch_size, 1))
    training_dataset = tf.data.Dataset.from_tensor_slices((x, y))
    training_dataset = training_dataset.batch(batch_size)

    model.fit(training_dataset, epochs=1)

    # Check that optimizer iterations matches dataset size.
    self.assertEqual(composite_optimizer.iterations.numpy(), num_of_batches)

    # Saving checkpoint.
    checkpoint_path = self.get_temp_dir()
    checkpoint.write(checkpoint_path)

    # Loading checkpoint after reinitializing the optimizer and checkpoint.
    composite_optimizer = CompositeOptimizer([
        (tf.keras.optimizers.Adam(), lambda: model.trainable_variables),
        (tf.keras.optimizers.Adagrad(), lambda: []),
    ])

    checkpoint = tf.train.Checkpoint(model=model,
                                     optimizer=composite_optimizer)

    checkpoint.read(checkpoint_path).assert_consumed()

    # After restoring the checkpoint, optimizer iterations should also be
    # restored to its original value. Right now this assertion is failing.
    self.assertEqual(composite_optimizer.iterations.numpy(), num_of_batches)
Exemple #4
0
        def get_model() -> tf.keras.Model:
            model = tf.keras.experimental.LinearModel(units=10)

            composite_optimizer = CompositeOptimizer([
                (tf.keras.optimizers.Adam(),
                 lambda: model.trainable_variables[:1]),
                (tf.keras.optimizers.Adagrad(),
                 lambda: model.trainable_variables[1:]),
            ])
            model.compile(optimizer=composite_optimizer,
                          loss=tf.keras.losses.MSE)
            return model