Esempio n. 1
0
    def testPruneStopAndRestartOnModel(self, save_restore_fn):
        params = {
            'pruning_schedule':
            pruning_schedule.PolynomialDecay(0.2, 0.6, 0, 4, 3, 1),
            'block_size': (1, 1),
            'block_pooling_type':
            'AVG'
        }
        model = prune.prune_low_magnitude(
            keras_test_utils.build_simple_dense_model(), **params)
        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])
        # Model hasn't been trained yet. Sparsity 0.0
        test_utils.assert_model_sparsity(self, 0.0, model)

        model.fit(np.random.rand(20, 10),
                  np_utils.to_categorical(np.random.randint(5, size=(20, 1)),
                                          5),
                  batch_size=20,
                  callbacks=[pruning_callbacks.UpdatePruningStep()])
        # Training has run only 1 step. Sparsity 0.2 (initial_sparsity)
        test_utils.assert_model_sparsity(self, 0.2, model)

        model = save_restore_fn(model)
        model.fit(np.random.rand(20, 10),
                  np_utils.to_categorical(np.random.randint(5, size=(20, 1)),
                                          5),
                  batch_size=20,
                  epochs=3,
                  callbacks=[pruning_callbacks.UpdatePruningStep()])
        # Training has run all 4 steps. Sparsity 0.6 (final_sparsity)
        test_utils.assert_model_sparsity(self, 0.6, model)

        self._check_strip_pruning_matches_original(model, 0.6)
    def testPrunesModel_CustomTrainingLoop_ReachesTargetSparsity(self):
        pruned_model = prune.prune_low_magnitude(
            keras_test_utils.build_simple_dense_model())

        batch_size = 20
        x_train = np.random.rand(20, 10)
        y_train = keras.utils.to_categorical(
            np.random.randint(5, size=(batch_size, 1)), 5)

        loss = keras.losses.categorical_crossentropy
        optimizer = keras.optimizers.SGD()

        unused_arg = -1

        step_callback = pruning_callbacks.UpdatePruningStep()
        step_callback.set_model(pruned_model)
        pruned_model.optimizer = optimizer

        step_callback.on_train_begin()
        # 2 epochs
        for _ in range(2):
            step_callback.on_train_batch_begin(batch=unused_arg)
            inp = np.reshape(x_train,
                             [batch_size, 10])  # original shape: from [10].
            with tf.GradientTape() as tape:
                logits = pruned_model(inp, training=True)
                loss_value = loss(y_train, logits)
                grads = tape.gradient(loss_value,
                                      pruned_model.trainable_variables)
                optimizer.apply_gradients(
                    zip(grads, pruned_model.trainable_variables))
            step_callback.on_epoch_end(batch=unused_arg)

        test_utils.assert_model_sparsity(self, 0.5, pruned_model)
Esempio n. 3
0
def train(model, x_train, y_train, x_test, y_test):
    model.compile(loss=tf.keras.losses.categorical_crossentropy,
                  optimizer='adam',
                  metrics=['accuracy'])

    # Print the model summary.
    model.summary()

    # Add a pruning step callback to peg the pruning step to the optimizer's
    # step. Also add a callback to add pruning summaries to tensorboard
    callbacks = [
        pruning_callbacks.UpdatePruningStep(),
        pruning_callbacks.PruningSummaries(log_dir='/tmp/logs')
    ]

    model.fit(x_train,
              y_train,
              batch_size=batch_size,
              epochs=epochs,
              verbose=1,
              callbacks=callbacks,
              validation_data=(x_test, y_test))
    score = model.evaluate(x_test, y_test, verbose=0)
    print('Test loss:', score[0])
    print('Test accuracy:', score[1])

    model = prune.strip_pruning(model)
    return model
    def testPrunesMnist_ReachesTargetSparsity(self, model_type):
        model = test_utils.build_mnist_model(model_type, self.params)
        if model_type == 'layer_list':
            model = keras.Sequential(
                prune.prune_low_magnitude(model, **self.params))
        elif model_type in ['sequential', 'functional']:
            model = prune.prune_low_magnitude(model, **self.params)

        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])
        test_utils.assert_model_sparsity(self,
                                         0.0,
                                         model,
                                         rtol=1e-4,
                                         atol=1e-4)
        model.fit(np.random.rand(32, 28, 28, 1),
                  keras.utils.to_categorical(
                      np.random.randint(10, size=(32, 1)), 10),
                  callbacks=[pruning_callbacks.UpdatePruningStep()])

        test_utils.assert_model_sparsity(self,
                                         0.5,
                                         model,
                                         rtol=1e-4,
                                         atol=1e-4)

        self._check_strip_pruning_matches_original(model, 0.5)
Esempio n. 5
0
  def testPruneCheckpoints_CheckpointsNotSparse(self):
    is_model_sparsity_not_list = []

    # Run multiple times since problem doesn't always happen.
    for _ in range(3):
      model = keras_test_utils.build_simple_dense_model()
      pruned_model = prune.prune_low_magnitude(model, **self.params)

      checkpoint_dir = tempfile.mkdtemp()
      checkpoint_path = checkpoint_dir + '/weights.{epoch:02d}.tf'

      callbacks = [
          pruning_callbacks.UpdatePruningStep(),
          tf.keras.callbacks.ModelCheckpoint(
              filepath=checkpoint_path, save_weights_only=True, save_freq=1)
      ]

      # Train one step. Sparsity reaches final sparsity.
      self._train_model(pruned_model, epochs=1, callbacks=callbacks)
      test_utils.assert_model_sparsity(self, 0.5, pruned_model)

      latest_checkpoint = tf.train.latest_checkpoint(checkpoint_dir)

      same_architecture_model = keras_test_utils.build_simple_dense_model()
      pruned_model = prune.prune_low_magnitude(same_architecture_model,
                                               **self.params)

      # Sanity check.
      test_utils.assert_model_sparsity(self, 0, pruned_model)

      pruned_model.load_weights(latest_checkpoint)
      is_model_sparsity_not_list.append(
          test_utils.is_model_sparsity_not(0.5, pruned_model))

    self.assertTrue(any(is_model_sparsity_not_list))
Esempio n. 6
0
  def testPrunesSimpleDenseModel(self, distribution):
    with distribution.scope():
      model = prune.prune_low_magnitude(
          keras_test_utils.build_simple_dense_model(), **self.params)
      model.compile(
          loss='categorical_crossentropy',
          optimizer='sgd',
          metrics=['accuracy'])

    # Model hasn't been trained yet. Sparsity 0.0
    test_utils.assert_model_sparsity(self, 0.0, model)

    # Simple unpruned model. No sparsity.
    model.fit(
        np.random.rand(20, 10),
        keras.utils.np_utils.to_categorical(
            np.random.randint(5, size=(20, 1)), 5),
        epochs=2,
        callbacks=[pruning_callbacks.UpdatePruningStep()],
        batch_size=20)
    model.predict(np.random.rand(20, 10))
    test_utils.assert_model_sparsity(self, 0.5, model)

    _, keras_file = tempfile.mkstemp('.h5')
    keras.models.save_model(model, keras_file)

    with prune.prune_scope():
      loaded_model = keras.models.load_model(keras_file)

    test_utils.assert_model_sparsity(self, 0.5, loaded_model)
Esempio n. 7
0
    def testRNNLayersWithRNNCellParams(self):
        model = keras.Sequential()
        model.add(
            prune.prune_low_magnitude(keras.layers.RNN([
                layers.LSTMCell(10),
                layers.GRUCell(10),
                layers.PeepholeLSTMCell(10),
                layers.SimpleRNNCell(10)
            ]),
                                      input_shape=(3, 4),
                                      **self.params))

        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])
        test_utils.assert_model_sparsity(self, 0.0, model)
        model.fit(np.random.randn(
            *self._batch(model.input.get_shape().as_list(), 32)),
                  np.random.randn(
                      *self._batch(model.output.get_shape().as_list(), 32)),
                  callbacks=[pruning_callbacks.UpdatePruningStep()])

        test_utils.assert_model_sparsity(self, 0.5, model)

        self._check_strip_pruning_matches_original(model, 0.5)
Esempio n. 8
0
    def testPrunesPreviouslyUnprunedModel(self):
        model = keras_test_utils.build_simple_dense_model()
        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])
        # Simple unpruned model. No sparsity.
        model.fit(np.random.rand(20, 10),
                  np_utils.to_categorical(np.random.randint(5, size=(20, 1)),
                                          5),
                  epochs=2,
                  batch_size=20)
        test_utils.assert_model_sparsity(self, 0.0, model)

        # Apply pruning to model.
        model = prune.prune_low_magnitude(model, **self.params)
        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])
        # Since newly compiled, iterations starts from 0.
        model.fit(np.random.rand(20, 10),
                  np_utils.to_categorical(np.random.randint(5, size=(20, 1)),
                                          5),
                  batch_size=20,
                  callbacks=[pruning_callbacks.UpdatePruningStep()])
        test_utils.assert_model_sparsity(self, 0.5, model)

        self._check_strip_pruning_matches_original(model, 0.5)
Esempio n. 9
0
def train_and_save(models, x_train, y_train, x_test, y_test):
    for model in models:
        model.compile(loss=tf.keras.losses.categorical_crossentropy,
                      optimizer='adam',
                      metrics=['accuracy'])
        # Print the model summary.
        model.summary()
        # Add a pruning step callback to peg the pruning step to the optimizer's
        # step. Also add a callback to add pruning summaries to tensorboard
        callbacks = [
            pruning_callbacks.UpdatePruningStep(),
            pruning_callbacks.PruningSummaries(log_dir=FLAGS.output_dir)
        ]
        model.fit(x_train,
                  y_train,
                  batch_size=batch_size,
                  epochs=epochs,
                  verbose=1,
                  callbacks=callbacks,
                  validation_data=(x_test, y_test))
        score = model.evaluate(x_test, y_test, verbose=0)
        print('Test loss:', score[0])
        print('Test accuracy:', score[1])
        # Export and import the model. Check that accuracy persists.
        _, keras_file = tempfile.mkstemp('.h5')
        print('Saving model to: ', keras_file)
        keras.models.save_model(model, keras_file)
        with prune.prune_scope():
            loaded_model = keras.models.load_model(keras_file)
        score = loaded_model.evaluate(x_test, y_test, verbose=0)
        print('Test loss:', score[0])
        print('Test accuracy:', score[1])
    def testMbyNSparsityPruning_SupportedLayers(self,
                                                layer_type,
                                                layer_arg,
                                                input_shape,
                                                m_by_n=(2, 4),
                                                sparsity_ratio=0.50):
        """Check that we prune supported layers with m by n sparsity."""
        self.params.update({'sparsity_m_by_n': m_by_n})

        model = keras.Sequential()
        model.add(
            prune.prune_low_magnitude(layer_type(*layer_arg),
                                      input_shape=input_shape,
                                      **self.params))
        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])

        test_utils.assert_model_sparsity(self, 0.0, model)
        model.fit(np.random.randn(
            *self._batch(model.input.get_shape().as_list(), 32)),
                  np.random.randn(
                      *self._batch(model.output.get_shape().as_list(), 32)),
                  callbacks=[pruning_callbacks.UpdatePruningStep()])

        test_utils.assert_model_sparsity_m_by_n(self, model, m_by_n)
        self._check_strip_pruning_matches_original(model, sparsity_ratio)
    def _train_model(model,
                     epochs=1,
                     x_train=None,
                     y_train=None,
                     callbacks=None):
        if x_train is None:
            x_train = np.random.rand(20, 10),
        if y_train is None:
            y_train = keras.utils.to_categorical(
                np.random.randint(5, size=(20, 1)), 5)

        if model.optimizer is None:
            model.compile(loss='categorical_crossentropy',
                          optimizer='sgd',
                          metrics=['accuracy'])

        if callbacks is None:
            callbacks = []
            if PruneIntegrationTest._is_pruned(model):
                callbacks = [pruning_callbacks.UpdatePruningStep()]

        model.fit(x_train,
                  y_train,
                  epochs=epochs,
                  batch_size=20,
                  callbacks=callbacks)
Esempio n. 12
0
def train_and_save(models, x_train, y_train, x_test, y_test):
    for model in models:
        model.compile(loss=tf.keras.losses.categorical_crossentropy,
                      optimizer='adam',
                      metrics=['accuracy'])

        # Print the model summary.
        model.summary()

        # Add a pruning step callback to peg the pruning step to the optimizer's
        # step. Also add a callback to add pruning summaries to tensorboard
        if not os.path.exists(FLAGS.output_dir):
            os.makedirs(FLAGS.output_dir)
        # print(FLAGS.output_dir)
        callbacks = [
            pruning_callbacks.UpdatePruningStep(),
            pruning_callbacks.PruningSummaries(log_dir=FLAGS.output_dir)
        ]

        model.fit(x_train,
                  y_train,
                  batch_size=batch_size,
                  epochs=epochs,
                  verbose=1,
                  callbacks=callbacks,
                  validation_data=(x_test, y_test))
        score = model.evaluate(x_test, y_test, verbose=0)
        print('Test loss:', score[0])
        print('Test accuracy:', score[1])

        # Export and import the model. Check that accuracy persists.
        saved_model_dir = '/tmp/saved_model'
        if not os.path.exists(saved_model_dir):
            model.fit(x_train,
                      y_train,
                      batch_size=batch_size,
                      epochs=epochs,
                      verbose=1,
                      callbacks=callbacks,
                      validation_data=(x_test, y_test))
            score = model.evaluate(x_test, y_test, verbose=0)
            print('Test loss:', score[0])
            print('Test accuracy:', score[1])
            os.makedirs(saved_model_dir)
            print('Saving model to: ', saved_model_dir)
            tf.keras.models.save_model(model,
                                       saved_model_dir,
                                       save_format='tf')
        print('Loading model from: ', saved_model_dir)
        loaded_model = tf.keras.models.load_model(saved_model_dir)

        score = loaded_model.evaluate(x_test, y_test, verbose=0)
        print('Test loss:', score[0])
        print('Test accuracy:', score[1])
Esempio n. 13
0
  def testDeepLayerUpdatePruningSteps(self):
    pruned_model, x_train, y_train = self._pruned_model_setup(has_deep_layer=True)
    pruned_model.fit(
        x_train,
        y_train,
        batch_size=self._BATCH_SIZE,
        epochs=3,
        callbacks=[
            pruning_callbacks.UpdatePruningStep()
        ])

    self.assertEqual(2, pruned_model.layers[-1]._layers[0].pruning_step)
Esempio n. 14
0
 def fit(self, X_train, y_train):
     with self.graph.as_default(), self.session.as_default():
         callbacks = None
         if self.prune_params:
             callbacks = [
                 pruning_callbacks.UpdatePruningStep(),
             ]
         self.model.fit(X_train,
                        y_train,
                        epochs=self.train_epochs,
                        batch_size=self.batch_size,
                        callbacks=callbacks)
Esempio n. 15
0
def fitModel(model, train_data, val_data, test_data, stepsPerEpoch,
             evalStepsPerEpoch):
    """Runs Keras fit and saves model.
    Arguments:
      STRATEGY: Mirrored strategy
      models: list of models to train
      train_data: training data
      val_data: validation data  
    Returns:
      None
    """

    if not os.path.exists(FLAGS.outdir + '/%s/' % model.name):
        os.system('mkdir ' + FLAGS.outdir + '/%s/' % model.name)

    callbacks = getCallbacks(FLAGS.outdir + '/%s/' % model.name)
    if FLAGS.prune == True:
        callbacks.append(pruning_callbacks.UpdatePruningStep())

    start = time.time()
    LOSS = tf.keras.losses.CategoricalCrossentropy()
    OPTIMIZER = Adam(learning_rate=FLAGS.lr,
                     beta_1=FLAGS.beta_1,
                     beta_2=FLAGS.beta_2,
                     epsilon=FLAGS.epsilon,
                     amsgrad=True)
    model.compile(loss=LOSS, optimizer=OPTIMIZER, metrics=["accuracy"])
    model.summary()

    history = model.fit(train_data,
                        epochs=FLAGS.epochs,
                        validation_data=val_data,
                        callbacks=callbacks,
                        verbose=1)
    model.load_weights(FLAGS.outdir + '/%s/weights_best.h5' % model.name)
    history_dict = history.history
    pd.DataFrame.from_dict(history.history).to_csv(
        FLAGS.outdir + '/%s/history_dict.csv' % model.name, index=False)
    test_score = model.evaluate(test_data)
    print("Done training model {}".format(model.name))
    print('\n Test loss:', test_score[0])
    print('\n Test accuracy:', test_score[1])
    np.savez(FLAGS.outdir + '/%s/scores' % model.name, test_score)

    if FLAGS.prune == True:
        model_stripped = strip_pruning(model)
        model_stripped.save(FLAGS.outdir + '/%s/%s.h5' %
                            (model.name, model.name))
    else:
        model.save(FLAGS.outdir + '/%s/%s.h5' % (model.name, model.name))
    end = time.time()
    print('\n It took {} minutes to train!\n'.format((end - start) / 60.))
def _prune_model(original_model):
    """Apply the pruning wrapper, compile and train the model."""
    prune_epoch = 1
    pruning_params = {
        'pruning_schedule':
        pruning_schedule.ConstantSparsity(0.50, begin_step=0, frequency=10)
    }
    pruning_model = prune.prune_low_magnitude(original_model, **pruning_params)
    callbacks = [pruning_callbacks.UpdatePruningStep()]
    pruning_model = _train_model(pruning_model, callbacks, prune_epoch)
    pruning_model_stripped = prune.strip_pruning(pruning_model)

    return pruning_model, pruning_model_stripped
    def testUpdatesPruningStep(self):
        model = prune.prune_low_magnitude(
            keras_test_utils.build_simple_dense_model())
        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])
        model.fit(np.random.rand(20, 10),
                  keras.utils.np_utils.to_categorical(
                      np.random.randint(5, size=(20, 1)), 5),
                  batch_size=20,
                  epochs=3,
                  callbacks=[pruning_callbacks.UpdatePruningStep()])

        self.assertEqual(2, K.get_value(model.layers[0].pruning_step))
        self.assertEqual(2, K.get_value(model.layers[1].pruning_step))
Esempio n. 18
0
  def testRNNLayersSingleCell_ReachesTargetSparsity(self, layer_type):
    model = keras.Sequential()
    model.add(
        prune.prune_low_magnitude(
            layer_type(10), input_shape=(3, 4), **self.params))

    model.compile(
        loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
    test_utils.assert_model_sparsity(self, 0.0, model)
    model.fit(
        np.random.randn(*self._batch(model.input.get_shape().as_list(), 32)),
        np.random.randn(*self._batch(model.output.get_shape().as_list(), 32)),
        callbacks=[pruning_callbacks.UpdatePruningStep()])

    test_utils.assert_model_sparsity(self, 0.5, model)

    self._check_strip_pruning_matches_original(model, 0.5)
Esempio n. 19
0
  def testUpdatePruningStepsAndLogsSummaries(self):
    log_dir = tempfile.mkdtemp()
    pruned_model, x_train, y_train = self._pruned_model_setup()
    pruned_model.fit(
        x_train,
        y_train,
        batch_size=self._BATCH_SIZE,
        epochs=3,
        callbacks=[
            pruning_callbacks.UpdatePruningStep(),
            pruning_callbacks.PruningSummaries(log_dir=log_dir)
        ])

    self.assertEqual(
        2, tf.keras.backend.get_value(pruned_model.layers[0].pruning_step))
    self.assertEqual(
        2, tf.keras.backend.get_value(pruned_model.layers[1].pruning_step))

    self._assertLogsExist(log_dir)
    def testSparsityPruningMbyN_SupportedSubclassLayers(self):
        """Check subclass layer that is supported for m by n sparsity."""
        m_by_n = (2, 4)
        self.params.update({'sparsity_m_by_n': m_by_n})

        class SubclassLayer(tf.keras.layers.Layer):
            def __init__(self):
                super(SubclassLayer, self).__init__()
                self.conv1 = tf.keras.layers.Conv2D(2,
                                                    3,
                                                    activation='relu',
                                                    padding='same',
                                                    input_shape=[7, 7, 3])
                self.conv2 = tf.keras.layers.DepthwiseConv2D(3)
                self.flatten = keras.layers.Flatten()
                self.dense = layers.Dense(10, activation='sigmoid')

            def call(self, inputs):
                x = self.conv1(inputs)
                x = self.conv2(x)
                x = self.flatten(x)
                x = self.dense(x)
                return x

        inputs = keras.Input(shape=(7, 7, 3))
        outputs = SubclassLayer()(inputs)
        model = keras.Model(inputs, outputs)
        with self.assertRaises(ValueError):
            model = prune.prune_low_magnitude(model, **self.params)

        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])

        test_utils.assert_model_sparsity(self, 0.0, model)
        model.fit(np.random.randn(
            *self._batch(model.input.get_shape().as_list(), 32)),
                  np.random.randn(
                      *self._batch(model.output.get_shape().as_list(), 32)),
                  callbacks=[pruning_callbacks.UpdatePruningStep()])

        test_utils.assert_model_sparsity_m_by_n(self, model, m_by_n)
        self._check_strip_pruning_matches_original(model, 0.5)
  def testPrunesSingleLayer_ReachesTargetSparsity(self, layer_type):
    model = keras.Sequential()
    args, input_shape = self._get_params_for_layer(layer_type)
    if args is None:
      return  # Test for layer not supported yet.
    model.add(prune.prune_low_magnitude(
        layer_type(*args), input_shape=input_shape, **self.params))

    model.compile(
        loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
    test_utils.assert_model_sparsity(self, 0.0, model)
    model.fit(
        np.random.randn(*self._batch(model.input.get_shape().as_list(), 32)),
        np.random.randn(*self._batch(model.output.get_shape().as_list(), 32)),
        callbacks=[pruning_callbacks.UpdatePruningStep()])

    test_utils.assert_model_sparsity(self, 0.5, model)

    self._check_strip_pruning_matches_original(model, 0.5)
Esempio n. 22
0
def train_and_save(model, x_train, y_train, x_test, y_test):
    model.compile(
        loss="categorical_crossentropy",
        optimizer="adam",
        metrics=["accuracy"])

    # Print the model summary.
    model.summary()

    # Add a pruning step callback to peg the pruning step to the optimizer's
    # step. Also add a callback to add pruning summaries to tensorboard
    callbacks = [
        pruning_callbacks.UpdatePruningStep(),
        #pruning_callbacks.PruningSummaries(log_dir=tempfile.mkdtemp())
        pruning_callbacks.PruningSummaries(log_dir="/tmp/mnist_prune")
    ]

    model.fit(
        x_train,
        y_train,
        batch_size=batch_size,
        epochs=epochs,
        verbose=1,
        callbacks=callbacks,
        validation_data=(x_test, y_test))
    score = model.evaluate(x_test, y_test, verbose=0)
    print("Test loss:", score[0])
    print("Test accuracy:", score[1])

    print_model_sparsity(model)

    # Export and import the model. Check that accuracy persists.
    _, keras_file = tempfile.mkstemp(".h5")
    print("Saving model to: ", keras_file)
    save_model(model, keras_file)
    
    print("Reloading model")
    with prune.prune_scope():
        loaded_model = load_qmodel(keras_file)
    score = loaded_model.evaluate(x_test, y_test, verbose=0)
    print("Test loss:", score[0])
    print("Test accuracy:", score[1])
    def testPruneRecursivelyReachesTargetSparsity(self):
        internal_model = keras.Sequential(
            [keras.layers.Dense(10, input_shape=(10, ))])
        model = keras.Sequential([
            internal_model,
            layers.Flatten(),
            layers.Dense(1),
        ])
        model.compile(loss='binary_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])
        test_utils.assert_model_sparsity(self, 0.0, model)
        model.fit(np.random.randint(10, size=(32, 10)),
                  np.random.randint(2, size=(32, 1)),
                  callbacks=[pruning_callbacks.UpdatePruningStep()])

        test_utils.assert_model_sparsity(self, 0.5, model)

        input_data = np.random.randint(10, size=(32, 10))
        self._check_strip_pruning_matches_original(model, 0.5, input_data)
    def testPrunesEmbedding(self):
        model = keras.Sequential()
        model.add(
            prune.prune_low_magnitude(keras.layers.Embedding(input_dim=10,
                                                             output_dim=3),
                                      input_shape=(5, ),
                                      **self.params))
        model.add(keras.layers.Flatten())
        model.add(keras.layers.Dense(1, activation='sigmoid'))

        model.compile(loss='binary_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])
        test_utils.assert_model_sparsity(self, 0.0, model)
        model.fit(np.random.randint(10, size=(32, 5)),
                  np.random.randint(2, size=(32, 1)),
                  callbacks=[pruning_callbacks.UpdatePruningStep()])

        test_utils.assert_model_sparsity(self, 0.5, model)

        self._check_strip_pruning_matches_original(model, 0.5)
    def testUpdatePruningStepsAndLogsSummaries_CustomTrainingLoop(self):
        log_dir = tempfile.mkdtemp()
        pruned_model, loss, optimizer, x_train, y_train = self._pruned_model_setup(
            custom_training_loop=True)

        unused_arg = -1

        step_callback = pruning_callbacks.UpdatePruningStep()
        log_callback = pruning_callbacks.PruningSummaries(log_dir=log_dir)
        # TODO(tfmot): we need a separate API for custom training loops
        # that doesn't rely on users setting the model and optimizer.
        #
        # Example is currently based on callbacks.py configure_callbacks
        # and model.compile internals.
        step_callback.set_model(pruned_model)
        log_callback.set_model(pruned_model)
        pruned_model.optimizer = optimizer

        step_callback.on_train_begin()
        for _ in range(3):
            log_callback.on_epoch_begin(epoch=unused_arg)
            # only one batch given batch_size = 20 and input shape.
            step_callback.on_train_batch_begin(batch=unused_arg)
            inp = np.reshape(
                x_train, [self._BATCH_SIZE, 10])  # original shape: from [10].
            with tf.GradientTape() as tape:
                logits = pruned_model(inp, training=True)
                loss_value = loss(y_train, logits)
                grads = tape.gradient(loss_value,
                                      pruned_model.trainable_variables)
                optimizer.apply_gradients(
                    zip(grads, pruned_model.trainable_variables))
            step_callback.on_epoch_end(batch=unused_arg)

        self.assertEqual(
            3, tf.keras.backend.get_value(pruned_model.layers[0].pruning_step))
        self.assertEqual(
            3, tf.keras.backend.get_value(pruned_model.layers[1].pruning_step))
        self._assertLogsExist(log_dir)
Esempio n. 26
0
  def testUpdatePruningStepsAndLogsSummaries(self):
    log_dir = tempfile.mkdtemp()
    model = prune.prune_low_magnitude(
        keras_test_utils.build_simple_dense_model())
    model.compile(
        loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
    model.fit(
        np.random.rand(20, 10),
        tf.keras.utils.to_categorical(np.random.randint(5, size=(20, 1)), 5),
        batch_size=20,
        epochs=3,
        callbacks=[
            pruning_callbacks.UpdatePruningStep(),
            pruning_callbacks.PruningSummaries(log_dir=log_dir)
        ])

    self.assertEqual(2,
                     tf.keras.backend.get_value(model.layers[0].pruning_step))
    self.assertEqual(2,
                     tf.keras.backend.get_value(model.layers[1].pruning_step))

    self._assertLogsExist(log_dir)
Esempio n. 27
0
    def testPruneTrainingRaisesError_PruningStepCallbackMissing(self):
        model = prune.prune_low_magnitude(
            keras.Sequential([
                layers.Dense(10, activation='relu', input_shape=(100, )),
                layers.Dense(2, activation='sigmoid')
            ]), **self.params)

        model.compile(loss=keras.losses.categorical_crossentropy,
                      optimizer=keras.optimizers.SGD(),
                      metrics=['accuracy'])

        # Throws an error since UpdatePruningStep is missing.
        with self.assertRaises(errors_impl.InvalidArgumentError):
            model.fit(
                np.random.rand(1000, 100),
                keras.utils.to_categorical(np.random.randint(2,
                                                             size=(1000, 1))))

        model.fit(
            np.random.rand(1000, 100),
            keras.utils.to_categorical(np.random.randint(2, size=(1000, 1))),
            # Works when callback is provided.
            callbacks=[pruning_callbacks.UpdatePruningStep()])
Esempio n. 28
0
def prune_model(original_model, train_images, train_labels):
  batch_size = 256
  epochs = 5

  pruning_params = {
      'pruning_schedule':
      pruning_schedule.ConstantSparsity(0.75, begin_step=0, frequency=100)
  }
  pruning_model = prune.prune_low_magnitude(original_model, **pruning_params)
  pruning_model.summary()

  callbacks = [pruning_callbacks.UpdatePruningStep()]
  fit_kwargs = {
      'batch_size': batch_size,
      'epochs': epochs,
      'callbacks': callbacks,
  }
  compile_and_fit(pruning_model,
                  train_images,
                  train_labels,
                  compile_kwargs={},
                  fit_kwargs=fit_kwargs)

  return pruning_model
Esempio n. 29
0
    
    callbacks=all_callbacks(stop_patience=yamlparameters["Training_early_stopping"], 
                            initial_lr=yamlparameters["Training_learning_rate"],
                            lr_factor=yamlparameters["Training_lr_factor"],
                            lr_patience=yamlparameters["Training_lr_patience"],
                            lr_epsilon=yamlparameters["Training_lr_min_delta"], 
                            lr_cooldown=yamlparameters["Training_lr_cooldown"], 
                            lr_minimum=yamlparameters["Training_lr_minimum"],
                            Prune_begin=experiment.get_parameter("pruning_begin_epoch"),
                            Prune_end=experiment.get_parameter("pruning_end_epoch"),
                            prune_lrs=[experiment.get_parameter("pruning_lr_factor_1"),
                                       experiment.get_parameter("pruning_lr_factor_2"),
                                       experiment.get_parameter("pruning_lr_factor_3")],
                            outputDir=yamlparameters["TrainDir"])

    callbacks.callbacks.append(pruning_callbacks.UpdatePruningStep())

    with experiment.train():
    
        keras_model.fit(X_train,y_train,
                        batch_size=yamlparameters["Training_batch_size"],
                        epochs=yamlparameters["Training_epochs"],
                        callbacks=callbacks.callbacks,
                        verbose=1,
                        validation_split=yamlparameters["Training_validation_split"],
                        shuffle=True)
 
    keras_model = strip_pruning(keras_model)
    keras_model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['binary_accuracy'])
    keras_model.save(yamlparameters["TrainDir"]+"/Best_model.h5")
Esempio n. 30
0
model.add(keras.layers.Embedding(max_features, 128, input_length=maxlen))
model.add(keras.layers.LSTM(128))  # try using a GRU instead, for fun
model.add(keras.layers.Dropout(0.5))
model.add(keras.layers.Dense(1))
model.add(keras.layers.Activation("sigmoid"))

model = prune.prune_low_magnitude(
    model,
    pruning_schedule.PolynomialDecay(initial_sparsity=0.3,
                                     final_sparsity=0.7,
                                     begin_step=1000,
                                     end_step=3000))

# try using different optimizers and different optimizer configs
model.compile(loss="binary_crossentropy",
              optimizer="adam",
              metrics=["accuracy"])
print_model_sparsity(model)

print("Train...")
model.fit(x_train,
          y_train,
          batch_size=batch_size,
          epochs=3,
          callbacks=[pruning_callbacks.UpdatePruningStep()],
          validation_data=(x_test, y_test))
score, acc = model.evaluate(x_test, y_test, batch_size=batch_size)
print_model_sparsity(model)
print("Test score:", score)
print("Test accuracy:", acc)