Esempio n. 1
0
  def test_save_load_trackable(self, distribution, optimizer):
    # TODO(b/123533246): Enable the test for TPU once bug is fixed
    if (isinstance(distribution,
                   (tpu_strategy.TPUStrategy, tpu_strategy.TPUStrategyV1)) and
        distribution.extended.steps_per_run > 1):
      self.skipTest('MultiStep TPU Strategy deadlocks with optimizer restore.')
    with self.cached_session():
      dataset = keras_test_lib.get_dataset(distribution)
      with distribution.scope():
        model = keras_test_lib.get_model()
        model.compile(
            optimizer(),
            'mse')
        model.fit(dataset, epochs=1, steps_per_epoch=1)

        weights_file = tempfile.mktemp()
        model.save_weights(weights_file)

        model_2 = keras_test_lib.get_model()
        model_2.compile(
            optimizer(),
            'mse')
        model_2.load_weights(weights_file)
        model_2.predict(
            keras_test_lib.get_predict_dataset(distribution), steps=2)
        model_2.fit(dataset, epochs=1, steps_per_epoch=1)
Esempio n. 2
0
  def test_calling_with_unsupported_predefined_callbacks(
      self, distribution, cloning):
    with self.cached_session():
      with distribution.scope():
        model = keras_test_lib.get_model()
        optimizer = gradient_descent.GradientDescentOptimizer(0.001)
        loss = 'mse'
        metrics = ['mae']
        model.compile(optimizer, loss, metrics=metrics, cloning=cloning)

      dataset = keras_test_lib.get_dataset(distribution)

      def schedule(_):
        return 0.001

      with self.assertRaisesRegexp(
          ValueError, 'You must specify a Keras Optimizer V2 when '
          'using'):
        model.fit(
            dataset,
            epochs=1,
            steps_per_epoch=2,
            verbose=0,
            callbacks=[keras.callbacks.LearningRateScheduler(schedule)])

      with self.assertRaisesRegexp(
          ValueError, 'You must specify a Keras Optimizer V2 when '
          'using'):
        model.fit(
            dataset,
            epochs=1,
            steps_per_epoch=2,
            verbose=0,
            callbacks=[keras.callbacks.ReduceLROnPlateau()])
  def test_calling_with_unsupported_predefined_callbacks(
      self, distribution, cloning):
    with self.cached_session():
      with distribution.scope():
        model = keras_test_lib.get_model()
        optimizer = gradient_descent.GradientDescentOptimizer(0.001)
        loss = 'mse'
        metrics = ['mae']
        model.compile(optimizer, loss, metrics=metrics, cloning=cloning)

      dataset = keras_test_lib.get_dataset(distribution)

      def schedule(_):
        return 0.001

      with self.assertRaisesRegexp(
          ValueError, 'You must specify a Keras Optimizer V2 when '
          'using'):
        model.fit(
            dataset,
            epochs=1,
            steps_per_epoch=2,
            verbose=0,
            callbacks=[keras.callbacks.LearningRateScheduler(schedule)])

      with self.assertRaisesRegexp(
          ValueError, 'You must specify a Keras Optimizer V2 when '
          'using'):
        model.fit(
            dataset,
            epochs=1,
            steps_per_epoch=2,
            verbose=0,
            callbacks=[keras.callbacks.ReduceLROnPlateau()])
    def testSummaryWithCustomTrainingLoop(self):
        resolver = tpu_cluster_resolver.TPUClusterResolver('')
        tpu_strategy_util.initialize_tpu_system(resolver)
        strategy = tpu_strategy_lib.TPUStrategy(resolver)

        with strategy.scope():
            model = distribute_strategy_test.get_model()
            model.compile('sgd', 'mse')
            writer = summary_ops_v2.create_file_writer_v2(self.summary_dir)

            @def_function.function
            def custom_function(dataset):
                def _custom_step(features, labels):
                    del labels
                    logits = model(features)
                    with summary_ops_v2.always_record_summaries(
                    ), writer.as_default():
                        summary_ops_v2.scalar('logits',
                                              logits,
                                              step=model.optimizer.iterations)
                    return logits

                iterator = iter(dataset)
                output = strategy.unwrap(
                    strategy.run(_custom_step, args=(next(iterator))))
                return output

            dataset = strategy.experimental_distribute_dataset(
                distribute_strategy_test.get_dataset(strategy))

            custom_function(dataset)
    def testSummaryWithCustomTrainingLoop(self):
        strategy = get_tpu_strategy()

        with strategy.scope():
            model = distribute_strategy_test.get_model()
            model.compile('sgd', 'mse')
            writer = summary_ops_v2.create_file_writer_v2(self.summary_dir)

            @def_function.function
            def custom_function(dataset):
                def _custom_step(features, labels):
                    del labels
                    logits = model(features)
                    with summary_ops_v2.always_record_summaries(
                    ), writer.as_default():
                        scalar_summary_v2.scalar(
                            'logits',
                            math_ops.reduce_sum(logits),
                            step=model.optimizer.iterations)
                    return logits

                iterator = iter(dataset)
                output = strategy.unwrap(
                    strategy.run(_custom_step, args=(next(iterator))))
                return output

            dataset = strategy.experimental_distribute_dataset(
                distribute_strategy_test.get_dataset(strategy))

            custom_function(dataset)
Esempio n. 6
0
  def test_callbacks_in_predict(self, distribution,
                                experimental_run_tf_function):
    with distribution.scope():
      model = keras_test_lib.get_model()
      model.compile(
          optimizer='sgd',
          loss='mse',
          metrics=['mae'],
          experimental_run_tf_function=experimental_run_tf_function)

    dataset = keras_test_lib.get_dataset(distribution)
    counter = Counter()

    model.predict(
        keras_test_lib.get_predict_dataset(dataset),
        steps=5,
        callbacks=[counter])

    self.assertDictEqual(
        counter.method_counts, {
            'on_predict_batch_begin': 5,
            'on_predict_batch_end': 5,
            'on_predict_begin': 1,
            'on_predict_end': 1
        })
Esempio n. 7
0
    def test_unsupported_features(self, distribution,
                                  experimental_run_tf_function, mode):
        with self.cached_session():
            with distribution.scope():
                model = keras_test_lib.get_model()
                optimizer = gradient_descent.GradientDescentOptimizer(0.001)
                loss = 'mse'
                metrics = ['mae']
                model.compile(
                    optimizer,
                    loss,
                    metrics=metrics,
                    experimental_run_tf_function=experimental_run_tf_function)

            dataset = keras_test_lib.get_dataset(distribution)
            exception_error_message = (
                '`validation_split` argument is not supported when input `x`'
                ' is a dataset or a dataset iterator.+')

            # Test with validation split
            with self.assertRaisesRegexp(ValueError, exception_error_message):
                model.fit(dataset,
                          epochs=1,
                          steps_per_epoch=2,
                          verbose=0,
                          validation_split=0.5,
                          validation_steps=2)

            # Test with sample weight.
            sample_weight = np.random.random((10, ))
            with self.assertRaisesRegexp(
                    ValueError,
                    '`sample_weight` argument is not supported when input '
                    '`x` is a dataset or a dataset iterator.'):
                model.fit(dataset,
                          epochs=1,
                          steps_per_epoch=2,
                          verbose=0,
                          sample_weight=sample_weight)

            # Test with not specifying the `steps` argument for dataset with infinite
            # cardinality.
            dataset = dataset.repeat()
            with self.assertRaisesRegexp(
                    ValueError, 'When passing an infinitely '
                    'repeating dataset, you must specify the '
                    '`steps_per_epoch` argument'):
                model.fit(dataset, epochs=1, verbose=0)
            with self.assertRaisesRegexp(
                    ValueError, 'When passing an infinitely '
                    'repeating dataset, you must specify the '
                    '`steps` argument'):
                model.evaluate(dataset, verbose=0)

            with self.assertRaisesRegexp(
                    ValueError, 'When passing an infinitely '
                    'repeating dataset, you must specify the '
                    '`steps` argument'):
                model.predict(dataset, verbose=0)
Esempio n. 8
0
  def test_save_load_h5(self, distribution, optimizer, cloning):
    with self.cached_session():
      dataset = keras_test_lib.get_dataset(distribution)
      with distribution.scope():
        model = keras_test_lib.get_model()
        model.compile(optimizer(), 'mse', cloning=cloning)
        model.fit(dataset, epochs=1, steps_per_epoch=1)

        weights_file = tempfile.mktemp('.h5')
        model.save_weights(weights_file)

        model_2 = keras_test_lib.get_model()
        model_2.compile(optimizer(), 'mse', cloning=cloning)
        model_2.load_weights(weights_file)
        model_2.predict(
            keras_test_lib.get_predict_dataset(distribution), steps=2)
        model_2.fit(dataset, epochs=1, steps_per_epoch=1)
Esempio n. 9
0
  def test_save_load_h5(self, distribution):
    with self.cached_session():
      dataset = keras_test_lib.get_dataset(distribution)
      with distribution.scope():
        model = keras_test_lib.get_model()
        model.compile(rms_prop_keras.RMSprop(learning_rate=0.01), 'mse')
        model.fit(dataset, epochs=1, steps_per_epoch=1)

        weights_file = tempfile.mktemp('.h5')
        model.save_weights(weights_file)

        model_2 = keras_test_lib.get_model()
        model_2.compile(rms_prop_keras.RMSprop(learning_rate=0.01), 'mse')
        model_2.load_weights(weights_file)
        model_2.predict(
            keras_test_lib.get_predict_dataset(distribution), steps=2)
        model_2.fit(dataset, epochs=1, steps_per_epoch=1)
  def test_save_load_h5(self, distribution, optimizer, cloning):
    with self.cached_session():
      dataset = keras_test_lib.get_dataset(distribution)
      with distribution.scope():
        model = keras_test_lib.get_model()
        model.compile(optimizer(), 'mse', cloning=cloning)
        model.fit(dataset, epochs=1, steps_per_epoch=1)

        weights_file = tempfile.mktemp('.h5')
        model.save_weights(weights_file)

        model_2 = keras_test_lib.get_model()
        model_2.compile(optimizer(), 'mse', cloning=cloning)
        model_2.load_weights(weights_file)
        model_2.predict(
            keras_test_lib.get_predict_dataset(distribution), steps=2)
        model_2.fit(dataset, epochs=1, steps_per_epoch=1)
Esempio n. 11
0
    def test_callbacks_in_fit(self, distribution, cloning):
        # These tests pass in Google's internal build, but certain combinations
        # fail in some of our open source builds. This next line is automatically
        # rewritten by our conversion script.
        in_tf_open_source = True
        if (not context.executing_eagerly() and in_tf_open_source
                and distribution.num_replicas_in_sync > 1):
            self.skipTest('Test broken; see b/129793413 and b/117920141')
        with distribution.scope():
            model = keras_test_lib.get_model()
            model.compile(optimizer='sgd',
                          loss='mse',
                          metrics=['mae'],
                          cloning=cloning)

        dataset = keras_test_lib.get_dataset(distribution)
        counter = Counter()

        epochs = 2
        steps_per_epoch = 5
        validation_steps = 3

        model.fit(dataset,
                  epochs=epochs,
                  steps_per_epoch=steps_per_epoch,
                  verbose=0,
                  validation_data=dataset,
                  validation_steps=validation_steps,
                  callbacks=[counter])

        if isinstance(distribution,
                      (tpu_strategy.TPUStrategy, tpu_strategy.TPUStrategyV1)):
            # TPU Strategy can have multi step training, from extended.steps_per_run
            # if steps_per_run = 1, then num_batch_call_per_epoch = steps_per_epoch
            steps_per_run = distribution.extended.steps_per_run
            num_batch_call_per_epoch = steps_per_epoch // steps_per_run
            if steps_per_epoch % steps_per_run:
                num_batch_call_per_epoch += 1
        else:
            num_batch_call_per_epoch = steps_per_epoch

        self.assertDictEqual(
            counter.method_counts, {
                'on_batch_begin': epochs * num_batch_call_per_epoch,
                'on_batch_end': epochs * num_batch_call_per_epoch,
                'on_epoch_begin': epochs,
                'on_epoch_end': epochs,
                'on_test_batch_begin': epochs * validation_steps,
                'on_test_batch_end': epochs * validation_steps,
                'on_test_begin': epochs,
                'on_test_end': epochs,
                'on_train_batch_begin': epochs * num_batch_call_per_epoch,
                'on_train_batch_end': epochs * num_batch_call_per_epoch,
                'on_train_begin': 1,
                'on_train_end': 1
            })
Esempio n. 12
0
  def test_unsupported_features(self, distribution, cloning):
    with self.cached_session():
      with distribution.scope():
        model = keras_test_lib.get_model()
        optimizer = gradient_descent.GradientDescentOptimizer(0.001)
        loss = 'mse'
        metrics = ['mae']
        model.compile(optimizer, loss, metrics=metrics, cloning=cloning)

      dataset = keras_test_lib.get_dataset(distribution)

      # Test with validation split
      with self.assertRaisesRegexp(
          ValueError, '`validation_split` argument is not '
          'supported when input `x` is a dataset or a '
          'dataset iterator.+'):
        model.fit(
            dataset,
            epochs=1,
            steps_per_epoch=2,
            verbose=0,
            validation_split=0.5,
            validation_steps=2)

      # Test with sample weight.
      sample_weight = np.random.random((10,))
      with self.assertRaisesRegexp(
          ValueError, '`sample_weight` argument is not supported when input '
          '`x` is a dataset or a dataset iterator.'):
        model.fit(
            dataset,
            epochs=1,
            steps_per_epoch=2,
            verbose=0,
            sample_weight=sample_weight)

      # Test with not specifying the `steps` argument for dataset with infinite
      # cardinality.
      dataset = dataset.repeat()
      with self.assertRaisesRegexp(
          ValueError, 'When passing an infinitely '
          'repeating dataset, you must specify the '
          '`steps_per_epoch` argument'):
        model.fit(dataset, epochs=1, verbose=0)
      with self.assertRaisesRegexp(
          ValueError, 'When passing an infinitely '
          'repeating dataset, you must specify the '
          '`steps` argument'):
        model.evaluate(dataset, verbose=0)

      with self.assertRaisesRegexp(
          ValueError, 'When passing an infinitely '
          'repeating dataset, you must specify the '
          '`steps` argument'):
        model.predict(dataset, verbose=0)
Esempio n. 13
0
  def test_save_load_trackable(self, distribution, optimizer, cloning):
    # TODO(b/123533246): Enable the test for TPU once bug is fixed
    if (isinstance(distribution, (tpu_strategy.TPUStrategy,
                                  tpu_strategy.TPUStrategyV1)) and
        distribution.extended.steps_per_run > 1):
      self.skipTest('MultiStep TPU Strategy deadlocks with optimizer restore.')
    with self.cached_session():
      dataset = keras_test_lib.get_dataset(distribution)
      with distribution.scope():
        model = keras_test_lib.get_model()
        model.compile(optimizer(), 'mse', cloning=cloning)
        model.fit(dataset, epochs=1, steps_per_epoch=1)

        weights_file = tempfile.mktemp()
        model.save_weights(weights_file)

        model_2 = keras_test_lib.get_model()
        model_2.compile(optimizer(), 'mse', cloning=cloning)
        model_2.load_weights(weights_file)
        model_2.predict(
            keras_test_lib.get_predict_dataset(distribution), steps=2)
        model_2.fit(dataset, epochs=1, steps_per_epoch=1)
    def test_callbacks_in_fit(self, distribution,
                              experimental_run_tf_function):
        with distribution.scope():
            model = keras_test_lib.get_model()
            model.compile(
                optimizer='sgd',
                loss='mse',
                metrics=['mae'],
                experimental_run_tf_function=experimental_run_tf_function)

        dataset = keras_test_lib.get_dataset(distribution)
        counter = Counter()

        epochs = 2
        steps_per_epoch = 5
        validation_steps = 3

        model.fit(dataset,
                  epochs=epochs,
                  steps_per_epoch=steps_per_epoch,
                  verbose=0,
                  validation_data=dataset,
                  validation_steps=validation_steps,
                  callbacks=[counter])

        if (isinstance(distribution, tpu_strategy.TPUStrategyV1)
                and not context.executing_eagerly()):
            # TPU Strategy can have multi step training, from extended.steps_per_run
            # if steps_per_run = 1, then num_batch_call_per_epoch = steps_per_epoch
            steps_per_run = distribution.extended.steps_per_run
            num_batch_call_per_epoch = steps_per_epoch // steps_per_run
            if steps_per_epoch % steps_per_run:
                num_batch_call_per_epoch += 1
        else:
            num_batch_call_per_epoch = steps_per_epoch

        self.assertDictEqual(
            counter.method_counts, {
                'on_batch_begin': epochs * num_batch_call_per_epoch,
                'on_batch_end': epochs * num_batch_call_per_epoch,
                'on_epoch_begin': epochs,
                'on_epoch_end': epochs,
                'on_test_batch_begin': epochs * validation_steps,
                'on_test_batch_end': epochs * validation_steps,
                'on_test_begin': epochs,
                'on_test_end': epochs,
                'on_train_batch_begin': epochs * num_batch_call_per_epoch,
                'on_train_batch_end': epochs * num_batch_call_per_epoch,
                'on_train_begin': 1,
                'on_train_end': 1
            })
Esempio n. 15
0
  def test_callbacks_in_fit(self, distribution, cloning):
    with distribution.scope():
      model = keras_test_lib.get_model()
      model.compile(
          optimizer='sgd', loss='mse', metrics=['mae'], cloning=cloning)

    dataset = keras_test_lib.get_dataset(distribution)
    counter = Counter()

    epochs = 2
    steps_per_epoch = 5
    validation_steps = 3

    model.fit(
        dataset,
        epochs=epochs,
        steps_per_epoch=steps_per_epoch,
        verbose=0,
        validation_data=dataset,
        validation_steps=validation_steps,
        callbacks=[counter])

    if isinstance(distribution, (tpu_strategy.TPUStrategy,
                                 tpu_strategy.TPUStrategyV1)):
      # TPU Strategy can have multi step training, from extended.steps_per_run
      # if steps_per_run = 1, then num_batch_call_per_epoch = steps_per_epoch
      steps_per_run = distribution.extended.steps_per_run
      num_batch_call_per_epoch = steps_per_epoch // steps_per_run
      if steps_per_epoch % steps_per_run:
        num_batch_call_per_epoch += 1
    else:
      num_batch_call_per_epoch = steps_per_epoch

    self.assertDictEqual(
        counter.method_counts, {
            'on_batch_begin': epochs * num_batch_call_per_epoch,
            'on_batch_end': epochs * num_batch_call_per_epoch,
            'on_epoch_begin': epochs,
            'on_epoch_end': epochs,
            'on_test_batch_begin': epochs * validation_steps,
            'on_test_batch_end': epochs * validation_steps,
            'on_test_begin': epochs,
            'on_test_end': epochs,
            'on_train_batch_begin': epochs * num_batch_call_per_epoch,
            'on_train_batch_end': epochs * num_batch_call_per_epoch,
            'on_train_begin': 1,
            'on_train_end': 1
        })
Esempio n. 16
0
  def test_callbacks_in_eval(self, distribution):
    with distribution.scope():
      model = keras_test_lib.get_model()
      model.compile(optimizer='sgd', loss='mse', metrics=['mae'])

    dataset = keras_test_lib.get_dataset(distribution)
    counter = Counter()

    model.evaluate(dataset, steps=5, callbacks=[counter])

    self.assertDictEqual(
        counter.method_counts, {
            'on_test_batch_begin': 5,
            'on_test_batch_end': 5,
            'on_test_begin': 1,
            'on_test_end': 1
        })
Esempio n. 17
0
    def test_callbacks_in_eval(self, distribution):
        with distribution.scope():
            model = keras_test_lib.get_model()
            model.compile(optimizer='sgd', loss='mse', metrics=['mae'])

        dataset = keras_test_lib.get_dataset(distribution)
        counter = Counter()

        model.evaluate(dataset, steps=5, callbacks=[counter])

        self.assertDictEqual(
            counter.method_counts, {
                'on_test_batch_begin': 5,
                'on_test_batch_end': 5,
                'on_test_begin': 1,
                'on_test_end': 1
            })
Esempio n. 18
0
  def test_unsupported_features(self, distribution, mode):
    with self.cached_session():
      with distribution.scope():
        model = keras_test_lib.get_model()
        optimizer = gradient_descent.GradientDescentOptimizer(0.001)
        loss = 'mse'
        metrics = ['mae']
        model.compile(
            optimizer,
            loss,
            metrics=metrics)

      dataset = keras_test_lib.get_dataset(distribution)
      # Test with validation split
      with self.assertRaises(ValueError):
        model.fit(
            dataset,
            epochs=1,
            steps_per_epoch=2,
            verbose=0,
            validation_split=0.5,
            validation_steps=2)

      # Test with sample weight.
      sample_weight = np.random.random((10,))
      with self.assertRaises(ValueError):
        model.fit(
            dataset,
            epochs=1,
            steps_per_epoch=2,
            verbose=0,
            sample_weight=sample_weight)

      # Test with not specifying the `steps` argument for dataset with infinite
      # cardinality.
      dataset = dataset.repeat()
      with self.assertRaises(ValueError):
        model.fit(dataset, epochs=1, verbose=0)
      with self.assertRaises(ValueError):
        model.evaluate(dataset, verbose=0)

      with self.assertRaises(ValueError):
        model.predict(dataset, verbose=0)
Esempio n. 19
0
  def testSummaryWithCustomTrainingLoop(self):
    strategy = get_tpu_strategy()

    writer = summary_ops_v2.create_file_writer_v2(self.summary_dir)
    with strategy.scope():
      model = distribute_strategy_test.get_model()
      model.compile('sgd', 'mse')

    @def_function.function
    def custom_function(dataset):

      def _custom_step(features, labels):
        del labels
        logits = model(features)
        with summary_ops_v2.record_if(True), writer.as_default():
          scalar_summary_v2.scalar(
              'logits',
              math_ops.reduce_sum(logits),
              step=model.optimizer.iterations)
        return logits

      iterator = iter(dataset)
      output = strategy.unwrap(
          strategy.run(_custom_step, args=(next(iterator))))
      return output

    dataset = strategy.experimental_distribute_dataset(
        distribute_strategy_test.get_dataset(strategy))

    custom_function(dataset)
    writer.close()

    event_files = file_io.get_matching_files_v2(
        os.path.join(self.summary_dir, 'event*'))
    events_count_dictionary = {
        ('logits'): 0,
    }
    self.validate_recorded_sumary_file(event_files, events_count_dictionary,
                                       1)
Esempio n. 20
0
def process_sample(sample_path):
    image = cv2.resize(cv2.imread(sample_path), (image_size, image_size))
    image = preprocess_data(image)
    return image


# loads existing network for further use
def load_network_model(network_model):
    print("Loading model...")
    network_model = load_model('models/model.hdf5')
    print("Model loaded!")
    return network_model


if __name__ == "__main__":
    model = get_model()
    model = load_network_model(model)

    while True:
        input1 = input()
        print(input1)
        if input1 == "q":
            break

        try:
            sample = process_sample(input1)
        finally:
            prediction_class = predict(model, sample).tolist()[0]

            if prediction_class == 0:
                print("Sample predicted class: NORMAL")