def test_config(): sgd_opt = tf.keras.optimizers.SGD(lr=2.0, nesterov=True, momentum=0.3, decay=0.1) opt = MovingAverage( sgd_opt, average_decay=0.5, num_updates=None, start_step=5, dynamic_decay=True, ) config = opt.get_config() assert config["average_decay"] == 0.5 assert config["num_updates"] is None assert config["start_step"] == 5 assert config["dynamic_decay"] is True new_opt = MovingAverage.from_config(config) old_sgd_config = opt._optimizer.get_config() new_sgd_config = new_opt._optimizer.get_config() for k1, k2 in zip(old_sgd_config, new_sgd_config): assert old_sgd_config[k1] == new_sgd_config[k2]
def test_serialization(): sgd_opt = tf.keras.optimizers.SGD(lr=2.0, nesterov=True, momentum=0.3, decay=0.1) optimizer = MovingAverage( sgd_opt, average_decay=0.5, num_updates=None, start_step=5, dynamic_decay=True, ) config = tf.keras.optimizers.serialize(optimizer) new_optimizer = tf.keras.optimizers.deserialize(config) assert new_optimizer.get_config() == optimizer.get_config()
def test_config(self): sgd_opt = tf.keras.optimizers.SGD( lr=2.0, nesterov=True, momentum=0.3, decay=0.1) opt = MovingAverage( sgd_opt, average_decay=0.5, num_updates=100, sequential_update=False) config = opt.get_config() self.assertEqual(config['average_decay'], 0.5) self.assertEqual(config['decay'], 0.1) self.assertEqual(config['learning_rate'], 2.0) self.assertEqual(config['momentum'], 0.3) self.assertEqual(config['name'], 'SGD') self.assertEqual(config['nesterov'], True) self.assertEqual(config['num_updates'], 100) self.assertEqual(config['sequential_update'], False)
def test_config(self): sgd_opt = tf.keras.optimizers.SGD( lr=2.0, nesterov=True, momentum=0.3, decay=0.1) opt = MovingAverage( sgd_opt, average_decay=0.5, num_updates=None, sequential_update=False) config = opt.get_config() self.assertEqual(config['average_decay'], 0.5) self.assertEqual(config['num_updates'], None) self.assertEqual(config['sequential_update'], False) new_opt = MovingAverage.from_config(config) old_sgd_config = opt._optimizer.get_config() new_sgd_config = new_opt._optimizer.get_config() for k1, k2 in zip(old_sgd_config, new_sgd_config): self.assertEqual(old_sgd_config[k1], new_sgd_config[k2])