def _get_clustered_model(self, preserve_sparsity):
        """Cluster the (sparse) model and return clustered_model."""
        tf.random.set_seed(1)
        original_model = tf.keras.Sequential([
            layers.Dense(5, activation='softmax', input_shape=(10, )),
            layers.Flatten(),
        ])

        # Manually set sparsity in the Dense layer if preserve_sparsity is on
        if preserve_sparsity:
            first_layer_weights = original_model.layers[0].get_weights()
            first_layer_weights[0][:][0:2] = 0.0
            original_model.layers[0].set_weights(first_layer_weights)

        # Start the sparsity-aware clustering
        clustering_params = {
            'number_of_clusters': 4,
            'cluster_centroids_init':
            cluster_config.CentroidInitialization.LINEAR,
            'preserve_sparsity': True
        }

        clustered_model = experimental_cluster.cluster_weights(
            original_model, **clustering_params)

        return clustered_model
Ejemplo n.º 2
0
def _cluster_model(model, number_of_clusters, preserve_sparsity=False):

    (x_train, y_train), _ = _get_dataset()

    clustering_params = {
        'number_of_clusters': number_of_clusters,
        'cluster_centroids_init':
        cluster_config.CentroidInitialization.KMEANS_PLUS_PLUS,
        'preserve_sparsity': preserve_sparsity,
    }

    # Cluster model
    clustered_model = experimental_cluster.cluster_weights(
        model, **clustering_params)

    # Use smaller learning rate for fine-tuning
    # clustered model
    opt = tf.keras.optimizers.Adam(learning_rate=1e-5)

    clustered_model.compile(
        loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
        optimizer=opt,
        metrics=['accuracy'])

    # Fine-tune clustered model
    clustered_model.fit(x_train, y_train, epochs=EPOCHS_FINE_TUNING)

    stripped_model = cluster.strip_clustering(clustered_model)
    stripped_model.compile(
        loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
        optimizer=opt,
        metrics=['accuracy'])

    return stripped_model
Ejemplo n.º 3
0
  def testClusterKerasClusterableLayerWithSparsityPreservation(self):
    """Verifies that a built-in keras layer marked as clusterable is being clustered correctly when sparsity preservation is enabled."""
    preserve_sparsity_params = {'preserve_sparsity': True}
    params = {**self.params, **preserve_sparsity_params}
    wrapped_layer = experimental_cluster.cluster_weights(
        self.keras_clusterable_layer, **params)

    self._validate_clustered_layer(self.keras_clusterable_layer, wrapped_layer)
    def testSparsityIsPreservedDuringTraining(self):
        """Set a specific random seed.

       Ensures that we get some null weights to test sparsity preservation with.
    """
        tf.random.set_seed(1)
        # Verifies that training a clustered model with null weights in it
        # does not destroy the sparsity of the weights.
        original_model = keras.Sequential([
            layers.Dense(5, input_shape=(5, )),
            layers.Flatten(),
        ])
        # Reset the kernel weights to reflect potential zero drifting of
        # the cluster centroids
        first_layer_weights = original_model.layers[0].get_weights()
        first_layer_weights[0][:][0:2] = 0.0
        first_layer_weights[0][:][3] = [-0.13, -0.08, -0.05, 0.005, 0.13]
        first_layer_weights[0][:][4] = [-0.13, -0.08, -0.05, 0.005, 0.13]
        original_model.layers[0].set_weights(first_layer_weights)
        clustering_params = {
            "number_of_clusters": 6,
            "cluster_centroids_init": CentroidInitialization.LINEAR,
            "preserve_sparsity": True
        }
        clustered_model = experimental_cluster.cluster_weights(
            original_model, **clustering_params)
        stripped_model_before_tuning = cluster.strip_clustering(
            clustered_model)
        nr_of_unique_weights_before = self._get_number_of_unique_weights(
            stripped_model_before_tuning, 0, "kernel")
        clustered_model.compile(
            loss=keras.losses.categorical_crossentropy,
            optimizer="adam",
            metrics=["accuracy"],
        )
        clustered_model.fit(x=self.dataset_generator(), steps_per_epoch=100)
        stripped_model_after_tuning = cluster.strip_clustering(clustered_model)
        weights_after_tuning = stripped_model_after_tuning.layers[0].kernel
        nr_of_unique_weights_after = self._get_number_of_unique_weights(
            stripped_model_after_tuning, 0, "kernel")
        # Check after sparsity-aware clustering, despite zero centroid can drift,
        # the final number of unique weights remains the same
        self.assertLessEqual(nr_of_unique_weights_after,
                             nr_of_unique_weights_before)
        # Check that the null weights stayed the same before and after tuning.
        # There might be new weights that become zeros but sparsity-aware
        # clustering preserves the original null weights in the original positions
        # of the weight array
        self.assertTrue(
            np.array_equal(first_layer_weights[0][:][0:2],
                           weights_after_tuning[:][0:2]))
        # Check that the number of unique weights matches the number of clusters.
        self.assertLessEqual(nr_of_unique_weights_after,
                             clustering_params["number_of_clusters"])
Ejemplo n.º 5
0
  def testClusterCustomClusterableLayerWithSparsityPreservation(self):
    """Verifies that a custom clusterable layer is being clustered correctly when sparsity preservation is enabled."""
    preserve_sparsity_params = {'preserve_sparsity': True}
    params = {**self.params, **preserve_sparsity_params}
    wrapped_layer = experimental_cluster.cluster_weights(
        self.custom_clusterable_layer, **params)
    self.model.add(wrapped_layer)
    self.model.build(input_shape=(10, 1))

    self._validate_clustered_layer(self.custom_clusterable_layer, wrapped_layer)
    self.assertEqual([('kernel', wrapped_layer.layer.kernel)],
                     wrapped_layer.layer.get_clusterable_weights())
Ejemplo n.º 6
0
  def testClusterModelValidLayersSuccessfulWithSparsityPreservation(self):
    """Verifies that clustering a sequential model results in all clusterable layers within the model being clustered when sparsity preservation is enabled."""
    preserve_sparsity_params = {'preserve_sparsity': True}
    params = {**self.params, **preserve_sparsity_params}
    model = keras.Sequential([
        self.keras_clusterable_layer, self.keras_non_clusterable_layer,
        self.custom_clusterable_layer
    ])
    clustered_model = experimental_cluster.cluster_weights(model, **params)
    clustered_model.build(input_shape=(1, 28, 28, 1))

    self.assertEqual(len(model.layers), len(clustered_model.layers))
    for layer, clustered_layer in zip(model.layers, clustered_model.layers):
      self._validate_clustered_layer(layer, clustered_layer)
    def testSparsityIsPreservedDuringTraining(self):
        """Set a specific random seed to ensure that we get some null weights to test sparsity preservation with."""
        tf.random.set_seed(1)

        # Verifies that training a clustered model does not destroy the sparsity of
        # the weights.
        original_model = keras.Sequential([
            layers.Dense(5, input_shape=(5, )),
            layers.Dense(5),
        ])

        # Using a mininum number of centroids to make it more likely that some
        # weights will be zero.
        clustering_params = {
            "number_of_clusters": 3,
            "cluster_centroids_init": CentroidInitialization.LINEAR,
            "preserve_sparsity": True
        }

        clustered_model = experimental_cluster.cluster_weights(
            original_model, **clustering_params)

        stripped_model_before_tuning = cluster.strip_clustering(
            clustered_model)
        weights_before_tuning = stripped_model_before_tuning.get_weights()[0]
        non_zero_weight_indices_before_tuning = np.nonzero(
            weights_before_tuning)

        clustered_model.compile(
            loss=keras.losses.categorical_crossentropy,
            optimizer="adam",
            metrics=["accuracy"],
        )
        clustered_model.fit(x=self.dataset_generator2(), steps_per_epoch=1)

        stripped_model_after_tuning = cluster.strip_clustering(clustered_model)
        weights_after_tuning = stripped_model_after_tuning.get_weights()[0]
        non_zero_weight_indices_after_tuning = np.nonzero(weights_after_tuning)
        weights_as_list_after_tuning = weights_after_tuning.reshape(
            -1, ).tolist()
        unique_weights_after_tuning = set(weights_as_list_after_tuning)

        # Check that the null weights stayed the same before and after tuning.
        self.assertTrue(
            np.array_equal(non_zero_weight_indices_before_tuning,
                           non_zero_weight_indices_after_tuning))

        # Check that the number of unique weights matches the number of clusters.
        self.assertLessEqual(len(unique_weights_after_tuning),
                             self.params["number_of_clusters"])
Ejemplo n.º 8
0
  def testClusterFunctionalModelSelectivelyWithSparsityPreservation(self):
    """Verifies that layers within a functional model can be clustered selectively when sparsity preservation is enabled."""
    preserve_sparsity_params = {'preserve_sparsity': True}
    params = {**self.params, **preserve_sparsity_params}
    i1 = keras.Input(shape=(10,))
    i2 = keras.Input(shape=(10,))
    x1 = experimental_cluster.cluster_weights(layers.Dense(10), **params)(i1)
    x2 = layers.Dense(10)(i2)
    outputs = layers.Add()([x1, x2])
    clustered_model = keras.Model(inputs=[i1, i2], outputs=outputs)

    self.assertIsInstance(clustered_model.layers[2],
                          cluster_wrapper.ClusterWeights)
    self.assertNotIsInstance(clustered_model.layers[3],
                             cluster_wrapper.ClusterWeights)
Ejemplo n.º 9
0
  def testClusterSequentialModelSelectivelyWithSparsityPreservation(self):
    """Verifies that layers within a sequential model can be clustered selectively when sparsity preservation is enabled."""
    preserve_sparsity_params = {'preserve_sparsity': True}
    params = {**self.params, **preserve_sparsity_params}
    clustered_model = keras.Sequential()
    clustered_model.add(
        experimental_cluster.cluster_weights(self.keras_clusterable_layer,
                                             **params))
    clustered_model.add(self.keras_clusterable_layer)
    clustered_model.build(input_shape=(1, 10))

    self.assertIsInstance(clustered_model.layers[0],
                          cluster_wrapper.ClusterWeights)
    self.assertNotIsInstance(clustered_model.layers[1],
                             cluster_wrapper.ClusterWeights)
def _cluster_model(original_model, sparsity_flag):
    """Apply the clustering wrapper, compile and train the model."""
    cluster_epoch = 1
    clustering_params = {
        'number_of_clusters':
        8,
        'cluster_centroids_init':
        (tfmot_cluster_config.CentroidInitialization.DENSITY_BASED),
        'preserve_sparsity':
        sparsity_flag,
    }
    cluster_model = exp_tfmot_cluster.cluster_weights(original_model,
                                                      **clustering_params)

    callbacks = []
    cluster_model = _train_model(cluster_model, callbacks, cluster_epoch)

    clustered_model_stripped = tfmot_cluster.strip_clustering(cluster_model)

    return cluster_model, clustered_model_stripped
Ejemplo n.º 11
0
    def testClusterSimpleDenseModel(self, distribution, clustering):
        """End-to-end test."""
        with distribution.scope():
            model = experimental_cluster.cluster_weights(
                keras_test_utils.build_simple_dense_model(), **clustering)
            model.compile(loss='categorical_crossentropy',
                          optimizer='sgd',
                          metrics=['accuracy'])

        model.summary()
        model.fit(np.random.rand(20, 10),
                  keras.utils.to_categorical(
                      np.random.randint(5, size=(20, 1)), 5),
                  epochs=1,
                  batch_size=20)
        model.predict(np.random.rand(20, 10))

        stripped_model = cluster.strip_clustering(model)
        weights_as_list = stripped_model.layers[0].kernel.numpy().reshape(
            -1, ).tolist()
        unique_weights = set(weights_as_list)
        self.assertLessEqual(len(unique_weights),
                             clustering['number_of_clusters'])
    def testPassingModelWithUniformWeightsToPCQAT(self, uniform_weights):
        """If pruned_clustered_model has uniform weights, it won't break PCQAT."""
        preserve_sparsity = True
        original_model = tf.keras.Sequential([
            layers.Dense(5, activation='softmax', input_shape=(10, )),
            layers.Flatten(),
        ])

        # Manually set all weights to the same value in the Dense layer
        first_layer_weights = original_model.layers[0].get_weights()
        first_layer_weights[0][:] = uniform_weights
        original_model.layers[0].set_weights(first_layer_weights)

        # Start the sparsity-aware clustering
        clustering_params = {
            'number_of_clusters': 4,
            'cluster_centroids_init':
            cluster_config.CentroidInitialization.LINEAR,
            'preserve_sparsity': True
        }

        clustered_model = experimental_cluster.cluster_weights(
            original_model, **clustering_params)
        clustered_model = cluster.strip_clustering(clustered_model)

        nr_of_unique_weights_after = self._get_number_of_unique_weights(
            clustered_model, 0, 'kernel')
        sparsity_pruning = self._get_sparsity(clustered_model)

        quant_aware_annotate_model = (
            quantize.quantize_annotate_model(clustered_model))

        sparsity_pcqat, unique_weights_pcqat = self._pcqat_training(
            preserve_sparsity, quant_aware_annotate_model)
        self.assertAllGreaterEqual(np.array(sparsity_pcqat),
                                   sparsity_pruning[0])
        self.assertAllEqual(nr_of_unique_weights_after, unique_weights_pcqat)
 def apply_clustering_to_conv2d(layer):
     if isinstance(layer, tf.keras.layers.Conv2D):
         return exp_tfmot_cluster.cluster_weights(layer,
                                                  **clustering_params)
     return layer