Beispiel #1
0
    def testMonotonicity10dSinOfSum(self):
        if self.disable_all:
            return
        monotonicities = [1] * 10
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 2,
            "input_dims": 10,
            "num_training_records": 1000,
            "num_training_epoch": 100,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._ScatterXUniformly,
            "y_function": self._SinOfSum,
            "monotonicities": [1] * 10,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.179642, delta=self.loss_eps)

        monotonicities = [0, 1, 0, 1, 1, 0, 1, 1, 1, 0]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config["monotonicities"] = monotonicities
        config["kernel_initializer"] = kernel_initializer
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.181125, delta=self.loss_eps)
Beispiel #2
0
    def testMonotonicityOneD(self):
        if self.disable_all:
            return
        monotonicities = [1]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 20,
            "input_dims": 1,
            "num_training_records": 128,
            "num_training_epoch": 50,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._ScatterXUniformly,
            "y_function": self._SinPlusX,
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.123856, delta=self.loss_eps)
        self._TestEnsemble(config)

        monotonicities = ["increasing"]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 20,
            "input_dims": 1,
            "num_training_records": 100,
            "num_training_epoch": 50,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._ScatterXUniformly,
            "y_function": lambda x: -self._SinPlusX(x),
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 2.841356, delta=self.loss_eps)
        self._TestEnsemble(config)

        monotonicities = [1]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 5,
            "input_dims": 1,
            "num_training_records": 100,
            "num_training_epoch": 200,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._ScatterXUniformly,
            "y_function": self._SinPlusLargeX,
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
            # Target function is strictly increasing.
            "target_monotonicity_diff": 1e-6,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.000780, delta=self.loss_eps)
Beispiel #3
0
    def testMonotonicity5d(self):
        if self.disable_all:
            return
        config = {
            "lattice_sizes": 2,
            "input_dims": 5,
            "num_training_records": 100,
            "num_training_epoch": 200,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._ScatterXUniformly,
            "y_function": self._ScaledSum,
            "monotonicities": [1, 1, 1, 1, 1],
            "kernel_initializer": keras.initializers.Constant(value=0.5),
            # Function is strictly increasing everywhere, so request monotonicity
            # diff to be strictly positive.
            "target_monotonicity_diff": 0.08,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.000524, delta=self.loss_eps)

        monotonicities = [1, 1, 1, 1, 1]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 2,
            "input_dims": 5,
            "num_training_records": 100,
            "num_training_epoch": 40,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._ScatterXUniformly,
            "y_function": lambda x: -self._ScaledSum(x),
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.015019, delta=self.loss_eps)
        self._TestEnsemble(config)

        monotonicities = [1, "increasing", 1, 1]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 3,
            "input_dims": 4,
            "num_training_records": 100,
            "num_training_epoch": 100,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._ScatterXUniformly,
            "y_function": self._SinOfSum,
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.377306, delta=self.loss_eps)
        self._TestEnsemble(config)
Beispiel #4
0
    def testInputOutOfBounds(self):
        if self.disable_all:
            return
        config = {
            "lattice_sizes": 6,
            "input_dims": 1,
            "num_training_records": 100,
            "num_training_epoch": 20,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._ScatterXUniformlyExtendedRange,
            "y_function": self._Sin,
            "kernel_initializer": keras.initializers.Zeros(),
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.018726, delta=self.loss_eps)
        self._TestEnsemble(config)

        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=None, seed=self.seed)
        config = {
            "lattice_sizes": 2,
            "input_dims": 2,
            "num_training_records": 100,
            "num_training_epoch": 20,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._TwoDMeshGridExtendedRange,
            "y_function": self._SinOfSum,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.130816, delta=self.loss_eps)
        self._TestEnsemble(config)
Beispiel #5
0
 def testInitializerType(self, initializer, expected_loss):
     if self.disable_all:
         return
     if initializer == "random_monotonic_initializer":
         initializer = kfll.RandomMonotonicInitializer(monotonicities=None,
                                                       seed=self.seed)
     config = {
         "lattice_sizes": 3,
         "input_dims": 2,
         "num_training_records": 100,
         "num_training_epoch": 0,
         "optimizer": tf.keras.optimizers.Adagrad,
         "learning_rate": 1.0,
         "x_generator": self._TwoDMeshGrid,
         "y_function": self._Max,
         "kernel_initializer": initializer,
     }  # pyformat: disable
     loss = self._TrainModel(config)
     self.assertAlmostEqual(loss, expected_loss, delta=self.loss_eps)
     self._TestEnsemble(config)
Beispiel #6
0
 def testHighDimensionsStressTest(self):
     if self.disable_all:
         return
     monotonicities = [0] * 16
     monotonicities[3], monotonicities[4], monotonicities[10] = (1, 1, 1)
     kernel_initializer = kfll.RandomMonotonicInitializer(
         monotonicities=monotonicities, seed=self.seed)
     config = {
         "lattice_sizes": 2,
         "input_dims": 16,
         "num_terms": 128,
         "units": 2,
         "monotonicities": monotonicities,
         "num_training_records": 100,
         "num_training_epoch": 3,
         "optimizer": tf.keras.optimizers.Adagrad,
         "learning_rate": 1.0,
         "x_generator": self._ScatterXUniformly,
         "y_function": self._SinOfSum,
         "kernel_initializer": kernel_initializer,
         "target_monotonicity_diff": -1e-5,
     }  # pyformat: disable
     loss = self._TrainModel(config)
     self.assertAlmostEqual(loss, 0.224262, delta=self.loss_eps)
Beispiel #7
0
    def testMonotonicityTwoD(self):
        if self.disable_all:
            return
        monotonicities = [1, 1]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 21,
            "input_dims": 2,
            "num_training_records": 900,
            "num_training_epoch": 100,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._TwoDMeshGrid,
            "y_function": self._SinPlusXNd,
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.562003, delta=self.loss_eps)
        self._TestEnsemble(config)

        monotonicities = ["none", "increasing"]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 21,
            "input_dims": 2,
            "num_training_records": 900,
            "num_training_epoch": 100,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._TwoDMeshGrid,
            "y_function": self._SinPlusXNd,
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.222727, delta=self.loss_eps)
        self._TestEnsemble(config)

        monotonicities = [1, 0]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 21,
            "input_dims": 2,
            "num_training_records": 900,
            "num_training_epoch": 100,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.5,
            "x_generator": self._TwoDMeshGrid,
            "y_function": self._SinPlusXNd,
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.498311, delta=self.loss_eps)
        self._TestEnsemble(config)

        monotonicities = [1, 1]
        kernel_initializer = kfll.RandomMonotonicInitializer(
            monotonicities=monotonicities, seed=self.seed)
        config = {
            "lattice_sizes": 2,
            "input_dims": 2,
            "num_training_records": 100,
            "num_training_epoch": 20,
            "optimizer": tf.keras.optimizers.Adagrad,
            "learning_rate": 1.0,
            "x_generator": self._TwoDMeshGrid,
            "y_function": lambda x: -self._ScaledSum(x),
            "monotonicities": monotonicities,
            "kernel_initializer": kernel_initializer,
        }  # pyformat: disable
        loss = self._TrainModel(config)
        self.assertAlmostEqual(loss, 0.050929, delta=self.loss_eps)
        self._TestEnsemble(config)