def test_config(): sgd_opt = tf.keras.optimizers.SGD(lr=2.0, nesterov=True, momentum=0.3, decay=0.1) opt = MovingAverage( sgd_opt, average_decay=0.5, num_updates=None, start_step=5, dynamic_decay=True, ) config = opt.get_config() assert config["average_decay"] == 0.5 assert config["num_updates"] is None assert config["start_step"] == 5 assert config["dynamic_decay"] is True new_opt = MovingAverage.from_config(config) old_sgd_config = opt._optimizer.get_config() new_sgd_config = new_opt._optimizer.get_config() for k1, k2 in zip(old_sgd_config, new_sgd_config): assert old_sgd_config[k1] == new_sgd_config[k2]
def test_config(self): sgd_opt = tf.keras.optimizers.SGD( lr=2.0, nesterov=True, momentum=0.3, decay=0.1) opt = MovingAverage( sgd_opt, average_decay=0.5, num_updates=None, sequential_update=False) config = opt.get_config() self.assertEqual(config['average_decay'], 0.5) self.assertEqual(config['num_updates'], None) self.assertEqual(config['sequential_update'], False) new_opt = MovingAverage.from_config(config) old_sgd_config = opt._optimizer.get_config() new_sgd_config = new_opt._optimizer.get_config() for k1, k2 in zip(old_sgd_config, new_sgd_config): self.assertEqual(old_sgd_config[k1], new_sgd_config[k2])