예제 #1
0
    def test_bop_lr_scheduler(self):
        (x_train, y_train), _ = testing_utils.get_test_data(
            train_samples=100, test_samples=0, input_shape=(10,), num_classes=2
        )
        y_train = keras.utils.to_categorical(y_train)

        model = lq_testing_utils.get_small_bnn_model(
            x_train.shape[1], 10, y_train.shape[1]
        )
        model.compile(
            loss="categorical_crossentropy",
            optimizer=lq.optimizers.Bop(fp_optimizer=tf.keras.optimizers.Adam(0.01)),
        )

        model.fit(
            x_train,
            y_train,
            epochs=4,
            callbacks=[
                tf.keras.callbacks.LearningRateScheduler(lambda epoch: 1 / (1 + epoch)),
                AssertLRCallback(lambda epoch: 1 / (1 + epoch)),
            ],
            batch_size=8,
            verbose=0,
        )
예제 #2
0
def _test_optimizer(
    optimizer, target=0.75, test_kernels_are_binary=True, trainable_bn=True
):
    np.random.seed(1337)
    (x_train, y_train), _ = testing_utils.get_test_data(
        train_samples=1000, test_samples=0, input_shape=(10,), num_classes=2
    )
    y_train = keras.utils.to_categorical(y_train)

    model = lq_testing_utils.get_small_bnn_model(
        x_train.shape[1], 20, y_train.shape[1], trainable_bn=trainable_bn
    )
    model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["acc"])

    initial_vars = [tf.keras.backend.get_value(w) for w in model.trainable_weights]

    history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)

    trained_vars = [tf.keras.backend.get_value(w) for w in model.trainable_weights]

    # check all trainable variables have actually been updated
    for v0, v1 in zip(initial_vars, trained_vars):
        assert not np.all(v0 == v1)

    # Note that when kernels are treated as latent weights they need not be
    # binary (see https://arxiv.org/abs/1906.02107 for further discussion)
    if test_kernels_are_binary:
        for layer in model.layers:
            if "quant" in layer.name:
                for weight in layer.trainable_weights:
                    assert np.all(np.isin(tf.keras.backend.get_value(weight), [-1, 1]))

    assert history.history["acc"][-1] >= target
예제 #3
0
def test_saved_model(tmp_path):
    model_path = str(tmp_path / "model")
    x = np.random.normal(size=(4, 32))
    model = testing_utils.get_small_bnn_model(x.shape[1], 16, 10)
    weights = model.get_weights()
    model.save(model_path, save_format="tf")
    reloaded_model = tf.keras.models.load_model(model_path)
    reloaded_weights = reloaded_model.get_weights()
    assert_almost_equal(reloaded_model.predict(x), model.predict(x))
    assert len(reloaded_weights) == len(weights)
    for reloaded_weight, weight in zip(reloaded_weights, weights):
        assert_almost_equal(reloaded_weight, weight)
예제 #4
0
    def _create_data_and_model(self, train_samples=1000):
        np.random.seed(1337)
        (x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
            train_samples=train_samples,
            test_samples=0,
            input_shape=(10, ),
            num_classes=2,
        )
        y_train = tf.keras.utils.to_categorical(y_train)

        model = lq_testing_utils.get_small_bnn_model(x_train.shape[1], 20,
                                                     y_train.shape[1])

        return x_train, y_train, model
예제 #5
0
    def test_normal_optimizer(self):
        np.random.seed(1337)
        (x_train,
         y_train), (x_test,
                    y_test) = testing_utils.get_test_data(train_samples=1000,
                                                          test_samples=0,
                                                          input_shape=(10, ),
                                                          num_classes=2)

        y_train = tf.keras.utils.to_categorical(y_train)

        model = lq_testing_utils.get_small_bnn_model(x_train.shape[1], 20,
                                                     y_train.shape[1])

        model.compile(
            loss="categorical_crossentropy",
            optimizer=tf.keras.optimizers.Adam(0.01),
            metrics=["accuracy"],
        )

        def scheduler(x):
            return 1.0 / (1.0 + x)

        # We shouldn' t need to specify the optimizer
        test_scheduler = HyperparameterScheduler(
            schedule=scheduler,
            hyperparameter="lr",
            verbose=1,
        )

        num_epochs = 2
        model.fit(
            x_train,
            y_train,
            epochs=num_epochs,
            batch_size=16,
            callbacks=[test_scheduler],
            verbose=0,
        )

        np.testing.assert_almost_equal(
            tf.keras.backend.get_value(model.optimizer.lr),
            scheduler(num_epochs - 1),
            decimal=8,
        )
예제 #6
0
    def test_wrong_param(self):
        np.random.seed(1337)
        (x_train,
         y_train), (x_test,
                    y_test) = testing_utils.get_test_data(train_samples=1000,
                                                          test_samples=0,
                                                          input_shape=(10, ),
                                                          num_classes=2)

        y_train = tf.keras.utils.to_categorical(y_train)

        model = lq_testing_utils.get_small_bnn_model(x_train.shape[1], 20,
                                                     y_train.shape[1])

        model.compile(
            loss="categorical_crossentropy",
            optimizer=tf.keras.optimizers.Adam(0.01),
            metrics=["accuracy"],
        )

        def scheduler(x):
            return 1.0 / (1.0 + x)

        wrong_scheduler = HyperparameterScheduler(
            schedule=scheduler,
            hyperparameter="invalid_param",
            verbose=1,
        )

        with pytest.raises(ValueError):
            model.fit(
                x_train,
                y_train,
                epochs=1,
                batch_size=16,
                callbacks=[wrong_scheduler],
                verbose=0,
            )
예제 #7
0
    def test_hyper_parameter_scheduler(self):
        np.random.seed(1337)
        (x_train,
         y_train), (x_test,
                    y_test) = testing_utils.get_test_data(train_samples=1000,
                                                          test_samples=0,
                                                          input_shape=(10, ),
                                                          num_classes=2)

        y_train = tf.keras.utils.to_categorical(y_train)

        model = lq_testing_utils.get_small_bnn_model(x_train.shape[1], 20,
                                                     y_train.shape[1])

        bop = lq.optimizers.Bop(threshold=1e-6, gamma=1e-3)
        adam = tf.keras.optimizers.Adam(0.01)
        case_optimizer = lq.optimizers.CaseOptimizer(
            (lq.optimizers.Bop.is_binary_variable, bop),
            default_optimizer=adam,
        )

        model.compile(
            loss="categorical_crossentropy",
            optimizer=case_optimizer,
            metrics=["accuracy"],
        )

        def scheduler(x):
            return 1.0 / (1.0 + x)

        cbk_gamma_scheduler = HyperparameterScheduler(
            schedule=scheduler,
            optimizer=model.optimizer.optimizers[0],
            hyperparameter="gamma",
            verbose=1,
        )
        cbk_threshold_scheduler = HyperparameterScheduler(
            schedule=scheduler,
            optimizer=model.optimizer.optimizers[0],
            hyperparameter="threshold",
            verbose=1,
        )
        cbk_lr_scheduler = HyperparameterScheduler(
            schedule=scheduler,
            optimizer=model.optimizer.optimizers[1],
            hyperparameter="lr",
            verbose=1,
        )

        num_epochs = 10
        model.fit(
            x_train,
            y_train,
            epochs=num_epochs,
            batch_size=16,
            callbacks=[
                cbk_gamma_scheduler, cbk_lr_scheduler, cbk_threshold_scheduler
            ],
            verbose=0,
        )

        np.testing.assert_almost_equal(
            tf.keras.backend.get_value(model.optimizer.optimizers[0].gamma),
            scheduler(num_epochs - 1),
            decimal=8,
        )

        np.testing.assert_almost_equal(
            tf.keras.backend.get_value(
                model.optimizer.optimizers[0].threshold),
            scheduler(num_epochs - 1),
            decimal=8,
        )

        np.testing.assert_almost_equal(
            tf.keras.backend.get_value(model.optimizer.optimizers[1].lr),
            scheduler(num_epochs - 1),
            decimal=8,
        )