def test_add_normalizer(): def dummy_normalization(grad): norm = K.mean(K.abs(grad)) + K.epsilon() return norm func_name = 'dummy' # add the function to the name list ClippedOptimizer.set_normalization_function(func_name, dummy_normalization) # check if it exists in the name list now name_list = ClippedOptimizer.get_normalization_functions() assert func_name in name_list # train a model on this new normalizer sgd = ClippedOptimizer('sgd', normalization=func_name) _test_optimizer(sgd) _test_no_grad(sgd)
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() # if the input optimizer is not a ClippedOptimizer, wrap the optimizer # with a default ClippedOptimizer if optimizer.__class__.__name__ != ClippedOptimizer.__name__: optimizer = ClippedOptimizer(optimizer, normalization='l2') model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1], ))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target # Test optimizer serialization and deserialization. config = optimizers.serialize(optimizer) optim = optimizers.deserialize(config) new_config = optimizers.serialize(optim) assert config == new_config # Test weights saving and loading. original_weights = optimizer.weights model.save('temp.h5') temp_model = load_model('temp.h5') loaded_weights = temp_model.optimizer.weights assert len(original_weights) == len(loaded_weights) os.remove('temp.h5') # Test constraints. model = Sequential() dense = Dense( 10, input_shape=(x_train.shape[1], ), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2., ) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
def test_wrong_normalization(): with pytest.raises(ValueError): ClippedOptimizer('sgd', normalization=None)
def test_tf_optimizer(): with pytest.raises(NotImplementedError): import tensorflow as tf tf_opt = optimizers.TFOptimizer(tf.train.GradientDescentOptimizer(0.1)) ClippedOptimizer(tf_opt, normalization='l2')
def test_clipvalue_clipped(): sgd = optimizers.SGD(lr=0.01, momentum=0.9, clipvalue=0.5) sgd = ClippedOptimizer(sgd, normalization='l2') _test_optimizer(sgd)
def test_clipnorm_clipped(): sgd = optimizers.SGD(lr=0.01, momentum=0.9) sgd = ClippedOptimizer(sgd, normalization='l2', clipnorm=0.5) _test_optimizer(sgd)
def test_sgd_clipped_average_l1_l2(): sgd = optimizers.SGD(lr=0.01, momentum=0.9, nesterov=True) sgd = ClippedOptimizer(sgd, normalization='avg_l1_l2') _test_optimizer(sgd) _test_no_grad(sgd)
def test_sgd_clipped_std(): sgd = optimizers.SGD(lr=0.01, momentum=0.9, nesterov=True) sgd = ClippedOptimizer(sgd, normalization='std') _test_optimizer(sgd) _test_no_grad(sgd)
def test_sgd_clipped_from_string(): sgd = ClippedOptimizer('sgd', normalization='l2') _test_optimizer(sgd) _test_no_grad(sgd)