Пример #1
0
    def testGraphSize(self, lattice_sizes, input_dims, num_terms,
                      expected_graph_size):
        # If this test failed then you modified core lattice interpolation logic in
        # a way which increases number of ops in the graph. Or maybe Keras team
        # changed something under the hood. Please ensure that this increase is
        # unavoidable and try to minimize it.
        if self.disable_all:
            return
        tf.compat.v1.disable_eager_execution()
        tf.compat.v1.reset_default_graph()

        layer = kfll.KroneckerFactoredLattice(lattice_sizes=lattice_sizes,
                                              num_terms=num_terms)
        input_tensor = tf.ones(shape=(1, input_dims))
        layer(input_tensor)
        graph_size = len(tf.compat.v1.get_default_graph().as_graph_def().node)

        self.assertLessEqual(graph_size, expected_graph_size)
 def testSavingLoadingScale(self):
   # Create simple x --> x^2 dataset.
   train_data = [[[float(x)], float(x)**2] for x in range(100)]
   train_x, train_y = zip(*train_data)
   train_x, train_y = np.array(train_x), np.array(train_y)
   # Construct simple single lattice model. Must have monotonicities specified
   # or constraint will be None.
   keras_layer = kfll.KroneckerFactoredLattice(
       lattice_sizes=2, monotonicities=[1])
   model = keras.models.Sequential()
   model.add(keras_layer)
   # Compile and fit the model.
   model.compile(
       loss="mse", optimizer=keras.optimizers.Adam(learning_rate=0.1))
   model.fit(train_x, train_y)
   # Extract scale from layer and constraint before save.
   layer_scale = keras_layer.scale
   constraint_scale = keras_layer.kernel.constraint.scale
   self.assertIs(layer_scale, constraint_scale)
   # Save and load the model.
   with tempfile.NamedTemporaryFile(suffix=".h5") as f:
     keras.models.save_model(model, f.name)
     loaded_model = keras.models.load_model(
         f.name,
         custom_objects={
             "KroneckerFactoredLattice":
                 kfll.KroneckerFactoredLattice,
             "KroneckerFactoredLatticeConstraint":
                 kfll.KroneckerFactoredLatticeConstraints,
             "KFLRandomMonotonicInitializer":
                 kfll.KFLRandomMonotonicInitializer,
             "ScaleInitializer":
                 kfll.ScaleInitializer,
             "ScaleConstraints":
                 kfll.ScaleConstraints,
             "BiasInitializer":
                 kfll.BiasInitializer,
         })
   # Extract loaded layer.
   loaded_keras_layer = loaded_model.layers[0]
   # Extract scale from layer and constraint after load.
   loaded_layer_scale = loaded_keras_layer.scale
   loaded_constraint_scale = loaded_keras_layer.kernel.constraint.scale
   self.assertIs(loaded_layer_scale, loaded_constraint_scale)
Пример #3
0
 def testOutputShapeForDifferentInputTypes(self, batch_size, dims, units):
     expected_output_shape = (batch_size, units)
     # Create KFL Layer instance.
     kfl_layer = kfll.KroneckerFactoredLattice(lattice_sizes=2, units=units)
     # Input (batch_size, dims) or (batch_size, units, dims)
     if units == 1:
         example = [float(i) for i in range(dims)]
         examples = [example for _ in range(batch_size)]
     else:
         example = [[float(i) for i in range(dims)] for _ in range(units)]
         examples = [example for _ in range(batch_size)]
     inputs = tf.constant(examples)
     outputs = kfl_layer(inputs)
     self.assertEqual(outputs.shape, expected_output_shape)
     # Input length-dims list of (batch_size, 1) or (batch_size, units, 1)
     example = tf.constant(
         [[float(i) if units == 1 else [float(i)] for i in range(units)]
          for _ in range(batch_size)])
     list_inputs = [example for _ in range(dims)]
     list_outputs = kfl_layer(list_inputs)
     self.assertEqual(list_outputs.shape, expected_output_shape)
Пример #4
0
    def _TrainModel(self, config, plot_path=None):
        logging.info("Testing config:")
        logging.info(config)
        config = self._SetDefaults(config)
        self._ResetAllBackends()

        training_inputs, training_labels, raw_training_inputs = (
            self._GetTrainingInputsAndLabels(config))

        units = config["units"]
        input_dims = config["input_dims"]
        lattice_sizes = config["lattice_sizes"]
        if units > 1:
            # In order to test multi 'units' lattice replecate inputs 'units' times
            # and later use just one out of 'units' outputs in order to ensure that
            # multi 'units' lattice trains exactly similar to single 'units' one.
            training_inputs = [
                np.tile(np.expand_dims(x, axis=0), reps=[units, 1])
                for x in training_inputs
            ]
            input_shape = (units, input_dims)
        else:
            input_shape = (input_dims, )

        keras_layer = kfll.KroneckerFactoredLattice(
            lattice_sizes=lattice_sizes,
            units=units,
            num_terms=config["num_terms"],
            monotonicities=config["monotonicities"],
            satisfy_constraints_at_every_step=config[
                "satisfy_constraints_at_every_step"],
            kernel_initializer=config["kernel_initializer"],
            input_shape=input_shape,
            dtype=tf.float32)
        model = keras.models.Sequential()
        model.add(keras_layer)

        # When we use multi-unit lattices, we only extract a single lattice for
        # testing.
        if units > 1:
            lattice_index = config["lattice_index"]
            model.add(
                keras.layers.Lambda(
                    lambda x: x[:, lattice_index:lattice_index + 1]))

        optimizer = config["optimizer"](learning_rate=config["learning_rate"])
        model.compile(loss=keras.losses.mean_squared_error,
                      optimizer=optimizer)

        training_data = (training_inputs, training_labels, raw_training_inputs)
        loss = test_utils.run_training_loop(config=config,
                                            training_data=training_data,
                                            keras_model=model,
                                            plot_path=plot_path)

        if tf.executing_eagerly():
            tf.print("final weights: ", keras_layer.kernel)
        assetion_ops = keras_layer.assert_constraints(
            eps=-config["target_monotonicity_diff"])
        if not tf.executing_eagerly() and assetion_ops:
            tf.compat.v1.keras.backend.get_session().run(assetion_ops)

        return loss