Пример #1
0
  def testPruningSummariesRaisesError_LogDirNotNonEmptyString(self):
    with self.assertRaises(ValueError):
      pruning_callbacks.PruningSummaries(log_dir='')

    with self.assertRaises(ValueError):
      pruning_callbacks.PruningSummaries(log_dir=None)

    with self.assertRaises(ValueError):
      pruning_callbacks.PruningSummaries(log_dir=object())
Пример #2
0
def train(model, x_train, y_train, x_test, y_test):
    model.compile(loss=tf.keras.losses.categorical_crossentropy,
                  optimizer='adam',
                  metrics=['accuracy'])

    # Print the model summary.
    model.summary()

    # Add a pruning step callback to peg the pruning step to the optimizer's
    # step. Also add a callback to add pruning summaries to tensorboard
    callbacks = [
        pruning_callbacks.UpdatePruningStep(),
        pruning_callbacks.PruningSummaries(log_dir='/tmp/logs')
    ]

    model.fit(x_train,
              y_train,
              batch_size=batch_size,
              epochs=epochs,
              verbose=1,
              callbacks=callbacks,
              validation_data=(x_test, y_test))
    score = model.evaluate(x_test, y_test, verbose=0)
    print('Test loss:', score[0])
    print('Test accuracy:', score[1])

    model = prune.strip_pruning(model)
    return model
Пример #3
0
def train_and_save(models, x_train, y_train, x_test, y_test):
    for model in models:
        model.compile(loss=tf.keras.losses.categorical_crossentropy,
                      optimizer='adam',
                      metrics=['accuracy'])
        # Print the model summary.
        model.summary()
        # Add a pruning step callback to peg the pruning step to the optimizer's
        # step. Also add a callback to add pruning summaries to tensorboard
        callbacks = [
            pruning_callbacks.UpdatePruningStep(),
            pruning_callbacks.PruningSummaries(log_dir=FLAGS.output_dir)
        ]
        model.fit(x_train,
                  y_train,
                  batch_size=batch_size,
                  epochs=epochs,
                  verbose=1,
                  callbacks=callbacks,
                  validation_data=(x_test, y_test))
        score = model.evaluate(x_test, y_test, verbose=0)
        print('Test loss:', score[0])
        print('Test accuracy:', score[1])
        # Export and import the model. Check that accuracy persists.
        _, keras_file = tempfile.mkstemp('.h5')
        print('Saving model to: ', keras_file)
        keras.models.save_model(model, keras_file)
        with prune.prune_scope():
            loaded_model = keras.models.load_model(keras_file)
        score = loaded_model.evaluate(x_test, y_test, verbose=0)
        print('Test loss:', score[0])
        print('Test accuracy:', score[1])
Пример #4
0
def train_and_save(models, x_train, y_train, x_test, y_test):
    for model in models:
        model.compile(loss=tf.keras.losses.categorical_crossentropy,
                      optimizer='adam',
                      metrics=['accuracy'])

        # Print the model summary.
        model.summary()

        # Add a pruning step callback to peg the pruning step to the optimizer's
        # step. Also add a callback to add pruning summaries to tensorboard
        if not os.path.exists(FLAGS.output_dir):
            os.makedirs(FLAGS.output_dir)
        # print(FLAGS.output_dir)
        callbacks = [
            pruning_callbacks.UpdatePruningStep(),
            pruning_callbacks.PruningSummaries(log_dir=FLAGS.output_dir)
        ]

        model.fit(x_train,
                  y_train,
                  batch_size=batch_size,
                  epochs=epochs,
                  verbose=1,
                  callbacks=callbacks,
                  validation_data=(x_test, y_test))
        score = model.evaluate(x_test, y_test, verbose=0)
        print('Test loss:', score[0])
        print('Test accuracy:', score[1])

        # Export and import the model. Check that accuracy persists.
        saved_model_dir = '/tmp/saved_model'
        if not os.path.exists(saved_model_dir):
            model.fit(x_train,
                      y_train,
                      batch_size=batch_size,
                      epochs=epochs,
                      verbose=1,
                      callbacks=callbacks,
                      validation_data=(x_test, y_test))
            score = model.evaluate(x_test, y_test, verbose=0)
            print('Test loss:', score[0])
            print('Test accuracy:', score[1])
            os.makedirs(saved_model_dir)
            print('Saving model to: ', saved_model_dir)
            tf.keras.models.save_model(model,
                                       saved_model_dir,
                                       save_format='tf')
        print('Loading model from: ', saved_model_dir)
        loaded_model = tf.keras.models.load_model(saved_model_dir)

        score = loaded_model.evaluate(x_test, y_test, verbose=0)
        print('Test loss:', score[0])
        print('Test accuracy:', score[1])
Пример #5
0
  def testUpdatePruningStepsAndLogsSummaries(self):
    log_dir = tempfile.mkdtemp()
    pruned_model, x_train, y_train = self._pruned_model_setup()
    pruned_model.fit(
        x_train,
        y_train,
        batch_size=self._BATCH_SIZE,
        epochs=3,
        callbacks=[
            pruning_callbacks.UpdatePruningStep(),
            pruning_callbacks.PruningSummaries(log_dir=log_dir)
        ])

    self.assertEqual(
        2, tf.keras.backend.get_value(pruned_model.layers[0].pruning_step))
    self.assertEqual(
        2, tf.keras.backend.get_value(pruned_model.layers[1].pruning_step))

    self._assertLogsExist(log_dir)
Пример #6
0
def train_and_save(model, x_train, y_train, x_test, y_test):
    model.compile(
        loss="categorical_crossentropy",
        optimizer="adam",
        metrics=["accuracy"])

    # Print the model summary.
    model.summary()

    # Add a pruning step callback to peg the pruning step to the optimizer's
    # step. Also add a callback to add pruning summaries to tensorboard
    callbacks = [
        pruning_callbacks.UpdatePruningStep(),
        #pruning_callbacks.PruningSummaries(log_dir=tempfile.mkdtemp())
        pruning_callbacks.PruningSummaries(log_dir="/tmp/mnist_prune")
    ]

    model.fit(
        x_train,
        y_train,
        batch_size=batch_size,
        epochs=epochs,
        verbose=1,
        callbacks=callbacks,
        validation_data=(x_test, y_test))
    score = model.evaluate(x_test, y_test, verbose=0)
    print("Test loss:", score[0])
    print("Test accuracy:", score[1])

    print_model_sparsity(model)

    # Export and import the model. Check that accuracy persists.
    _, keras_file = tempfile.mkstemp(".h5")
    print("Saving model to: ", keras_file)
    save_model(model, keras_file)
    
    print("Reloading model")
    with prune.prune_scope():
        loaded_model = load_qmodel(keras_file)
    score = loaded_model.evaluate(x_test, y_test, verbose=0)
    print("Test loss:", score[0])
    print("Test accuracy:", score[1])
    def testUpdatePruningStepsAndLogsSummaries_CustomTrainingLoop(self):
        log_dir = tempfile.mkdtemp()
        pruned_model, loss, optimizer, x_train, y_train = self._pruned_model_setup(
            custom_training_loop=True)

        unused_arg = -1

        step_callback = pruning_callbacks.UpdatePruningStep()
        log_callback = pruning_callbacks.PruningSummaries(log_dir=log_dir)
        # TODO(tfmot): we need a separate API for custom training loops
        # that doesn't rely on users setting the model and optimizer.
        #
        # Example is currently based on callbacks.py configure_callbacks
        # and model.compile internals.
        step_callback.set_model(pruned_model)
        log_callback.set_model(pruned_model)
        pruned_model.optimizer = optimizer

        step_callback.on_train_begin()
        for _ in range(3):
            log_callback.on_epoch_begin(epoch=unused_arg)
            # only one batch given batch_size = 20 and input shape.
            step_callback.on_train_batch_begin(batch=unused_arg)
            inp = np.reshape(
                x_train, [self._BATCH_SIZE, 10])  # original shape: from [10].
            with tf.GradientTape() as tape:
                logits = pruned_model(inp, training=True)
                loss_value = loss(y_train, logits)
                grads = tape.gradient(loss_value,
                                      pruned_model.trainable_variables)
                optimizer.apply_gradients(
                    zip(grads, pruned_model.trainable_variables))
            step_callback.on_epoch_end(batch=unused_arg)

        self.assertEqual(
            3, tf.keras.backend.get_value(pruned_model.layers[0].pruning_step))
        self.assertEqual(
            3, tf.keras.backend.get_value(pruned_model.layers[1].pruning_step))
        self._assertLogsExist(log_dir)
Пример #8
0
  def testUpdatePruningStepsAndLogsSummaries(self):
    log_dir = tempfile.mkdtemp()
    model = prune.prune_low_magnitude(
        keras_test_utils.build_simple_dense_model())
    model.compile(
        loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
    model.fit(
        np.random.rand(20, 10),
        tf.keras.utils.to_categorical(np.random.randint(5, size=(20, 1)), 5),
        batch_size=20,
        epochs=3,
        callbacks=[
            pruning_callbacks.UpdatePruningStep(),
            pruning_callbacks.PruningSummaries(log_dir=log_dir)
        ])

    self.assertEqual(2,
                     tf.keras.backend.get_value(model.layers[0].pruning_step))
    self.assertEqual(2,
                     tf.keras.backend.get_value(model.layers[1].pruning_step))

    self._assertLogsExist(log_dir)
Пример #9
0
                         float(x_train.shape[0]) *
                         config['pruning_begin_epoch'] / config['batch_size']),
                     frequency=config['pruning_freq'])
}

if config['pruning']:
    model = prune.prune_low_magnitude(model, **pruning_params)
model.compile(loss='categorical_crossentropy',
              optimizer=opt,
              metrics=['accuracy'])

csv_logger = CSVLogger('training.csv')
callbacks = [csv_logger]
if config['pruning']:
    callbacks.append(pruning_callbacks.UpdatePruningStep())
    callbacks.append(pruning_callbacks.PruningSummaries(log_dir='log'))

if not config['data_augmentation']:
    print('Not using data augmentation.')
    model.fit(x_train,
              y_train,
              batch_size=config['batch_size'],
              epochs=config['epochs'],
              validation_data=(x_test, y_test),
              shuffle=True)
else:
    print('Using real-time data augmentation.')
    # This will do preprocessing and realtime data augmentation:
    datagen = ImageDataGenerator(
        featurewise_center=False,  # set input mean to 0 over the dataset
        samplewise_center=False,  # set each sample mean to 0
Пример #10
0
model.summary()

import tensorflow as tf
from tensorflow import keras

from tensorflow.keras.callbacks import EarlyStopping
# The patience parameter is the amount of epochs to check for improvement
early_stop = EarlyStopping(monitor='val_loss', patience=10)
EPOCHS = 1000

from tensorflow_model_optimization.python.core.sparsity.keras import pruning_callbacks


callbacks = [
  early_stop, 
  PrintDot(),
  # Update the pruning step
  pruning_callbacks.UpdatePruningStep(),
  # Add summaries to keep track of the sparsity in different layers during training
  pruning_callbacks.PruningSummaries(log_dir='/.')]
early_history = model.fit(normed_train_X, train_y, 
                    epochs=EPOCHS, validation_split = 0.2, verbose=0, 
                    callbacks=callbacks)

# model_name = 'DeepCOOH.h5'
# model.save(model_name) 
# print(f"{model_name} saved")
model_name = 'DeepCOOH'
tf.keras.models.save_model(model, model_name)
print(f"{model_name} saved")
Пример #11
0
              metrics=['accuracy'])


class PredictionCallback(tensorflow.keras.callbacks.Callback):
    def on_epoch_end(self, epoch, logs={}):
        y_pred = model.predict(x_test)
        y_pred_bool = np.argmax(y_pred, axis=1)
        y_test_bool = np.argmax(y_test, axis=1)

        a = precision_score(y_test_bool, y_pred_bool, average="macro")
        f.write(str(a) + " ,")


callbacks = [
    pruning_callbacks.UpdatePruningStep(),
    pruning_callbacks.PruningSummaries(log_dir="saved_models"),
    PredictionCallback()
]

x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
"""model.fit(x_train, y_train,
              batch_size=batch_size,
              epochs=epochs,
              validation_data=(x_test, y_test), callbacks = callbacks)"""

steps_per_epoch = ceil(50000 / 32)
if not data_augmentation:
    print('Not using data augmentation.')
Пример #12
0
def buildAndTrain(yamlConfig,
                  input_shape,
                  train_data,
                  val_data,
                  steps_per_epoch,
                  eval_steps_per_epoch,
                  outdir,
                  prune=True):

    #Get full model
    model = getModel(yamlConfig['KerasModel'], yamlConfig, input_shape)
    model._name = "full"

    #Get pruned models
    prune = True
    if prune == True:
        #Prune dense layers only
        pruning_schedule = tfmot.sparsity.keras.PolynomialDecay(
            initial_sparsity=0.0,
            final_sparsity=0.5,
            begin_step=2000,
            end_step=4000)
        model_for_layerwise_pruning = getModel("float_cnn_densePrune",
                                               yamlConfig, input_shape)
        model_for_layerwise_pruning._name = "layerwise_pruning"

        #Prune full model
        model_for_full_pruning = tfmot.sparsity.keras.prune_low_magnitude(
            model, pruning_schedule=pruning_schedule)
        model_for_full_pruning._name = "full_pruning"

        models = [model, model_for_layerwise_pruning, model_for_full_pruning]
    else:
        models = [model]

    histories, scores = list(), list()
    for model in models:
        print("Training model: {} ".format(model.name))
        model.summary()
        callbacks = getCallbacks()
        if model.name.find("pruning") != -1:
            print("Model sparsity: {} ".format(model.name))
            print_model_sparsity(model)
            callbacks = [
                pruning_callbacks.UpdatePruningStep(),
                pruning_callbacks.PruningSummaries(log_dir=outdir +
                                                   '/logs_%s/' % model.name,
                                                   profile_batch=0)
            ]
        print("Start training loop:\n\n")
        toJSON(model, outdir + '/model_%s.json' % model.name)
        model.compile(loss=LOSS, optimizer=OPTIMIZER, metrics=["accuracy"])

        history = model.fit(train_data,
                            epochs=epochs,
                            validation_data=val_data,
                            steps_per_epoch=steps_per_epoch,
                            validation_steps=eval_steps_per_epoch,
                            callbacks=callbacks,
                            verbose=1)

        val_score = model.evaluate(val_data)
        print('\n Test loss:', val_score[0])
        print('\n Test accuracy:', val_score[1])
        histories.append(history)
        scores.append(val_score)
    return histories, scores