def _setup(self):
     """Setups up and resets flags before each test."""
     tf.logging.set_verbosity(tf.logging.DEBUG)
     if KerasImagenetBenchmarkTests.local_flags is None:
         keras_common.define_keras_flags()
         imagenet_main.define_imagenet_flags()
         # Loads flags to get defaults to then override. List cannot be empty.
         flags.FLAGS(['foo'])
         saved_flag_values = flagsaver.save_flag_values()
         KerasImagenetBenchmarkTests.local_flags = saved_flag_values
         return
     flagsaver.restore_flag_values(KerasImagenetBenchmarkTests.local_flags)
示例#2
0
  def __init__(self, output_dir=None, default_flags=None):
    flag_methods = [
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True,
                                                    fp16_implementation=True)
    ]

    super(Resnet50EstimatorBenchmark, self).__init__(
        output_dir=output_dir,
        default_flags=default_flags,
        flag_methods=flag_methods)
示例#3
0
    def __init__(self, output_dir=None, default_flags=None):
        flag_methods = [
            ctl_common.define_ctl_flags,
            lambda: imagenet_main.define_imagenet_flags()
        ]

        super(Resnet50CtlBenchmarkBase,
              self).__init__(output_dir=output_dir,
                             flag_methods=flag_methods,
                             default_flags=default_flags)
  def __init__(self, output_dir=None, default_flags=None):
    flag_methods = [
        keras_common.define_keras_flags,
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True)
    ]

    super(Resnet50KerasBenchmarkBase, self).__init__(
        output_dir=output_dir,
        flag_methods=flag_methods,
        default_flags=default_flags)
  def __init__(self, output_dir=None, default_flags=None):
    flag_methods = [
        keras_common.define_keras_flags,
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True)
    ]

    super(Resnet50KerasBenchmarkBase, self).__init__(
        output_dir=output_dir,
        flag_methods=flag_methods,
        default_flags=default_flags)
示例#6
0
  def __init__(self, output_dir=None, default_flags=None):
    flag_methods = [
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True,
                                                    fp16_implementation=True)
    ]

    super(Resnet50EstimatorBenchmark, self).__init__(
        output_dir=output_dir,
        default_flags=default_flags,
        flag_methods=flag_methods)
  def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
    flag_methods = [
        keras_common.define_keras_flags,
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True)
    ]
    def_flags = {}
    def_flags['skip_eval'] = True
    def_flags['dtype'] = 'fp16'
    def_flags['enable_xla'] = True
    def_flags['data_dir'] = os.path.join(root_data_dir, 'imagenet')
    def_flags['train_steps'] = 600
    def_flags['log_steps'] = 100
    def_flags['distribution_strategy'] = 'default'

    super(TrivialKerasBenchmarkReal, self).__init__(
        output_dir=output_dir,
        flag_methods=flag_methods,
        default_flags=def_flags)
示例#8
0
  def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
    flag_methods = [
        keras_common.define_keras_flags,
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True)
    ]
    def_flags = {}
    def_flags['skip_eval'] = True
    def_flags['dtype'] = 'fp16'
    def_flags['enable_xla'] = True
    def_flags['data_dir'] = os.path.join(root_data_dir, 'imagenet')
    def_flags['train_steps'] = 600
    def_flags['log_steps'] = 100
    def_flags['distribution_strategy'] = 'default'

    super(TrivialKerasBenchmarkReal, self).__init__(
        output_dir=output_dir,
        flag_methods=flag_methods,
        default_flags=def_flags)
示例#9
0
  def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
    """Benchmark accuracy tests for ResNet50 w/ Estimator.

    Args:
      output_dir: directory where to output e.g. log files
      root_data_dir: directory under which to look for dataset
      **kwargs: arbitrary named arguments. This is needed to make the
                constructor forward compatible in case PerfZero provides more
                named arguments before updating the constructor.
    """
    flag_methods = [
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True,
                                                    fp16_implementation=True)
    ]

    self.data_dir = os.path.join(root_data_dir, IMAGENET_DATA_DIR_NAME)
    super(Resnet50EstimatorAccuracy, self).__init__(
        output_dir=output_dir, flag_methods=flag_methods)
示例#10
0
  def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
    """Benchmark accuracy tests for ResNet50 w/ Estimator.

    Args:
      output_dir: directory where to output e.g. log files
      root_data_dir: directory under which to look for dataset
      **kwargs: arbitrary named arguments. This is needed to make the
                constructor forward compatible in case PerfZero provides more
                named arguments before updating the constructor.
    """
    flag_methods = [
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True,
                                                    fp16_implementation=True)
    ]

    self.data_dir = os.path.join(root_data_dir, IMAGENET_DATA_DIR_NAME)
    super(Resnet50EstimatorAccuracy, self).__init__(
        output_dir=output_dir, flag_methods=flag_methods)
  def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
    """A benchmark class.

    Args:
      output_dir: directory where to output e.g. log files
      root_data_dir: directory under which to look for dataset
      **kwargs: arbitrary named arguments. This is needed to make the
                constructor forward compatible in case PerfZero provides more
                named arguments before updating the constructor.
    """

    flag_methods = [
        keras_common.define_keras_flags,
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True)
    ]

    self.data_dir = os.path.join(root_data_dir, 'imagenet')
    super(Resnet50KerasAccuracy, self).__init__(
        output_dir=output_dir, flag_methods=flag_methods)
  def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
    """A benchmark class.

    Args:
      output_dir: directory where to output e.g. log files
      root_data_dir: directory under which to look for dataset
      **kwargs: arbitrary named arguments. This is needed to make the
                constructor forward compatible in case PerfZero provides more
                named arguments before updating the constructor.
    """

    flag_methods = [
        keras_common.define_keras_flags,
        lambda: imagenet_main.define_imagenet_flags(dynamic_loss_scale=True)
    ]

    self.data_dir = os.path.join(root_data_dir, 'imagenet')
    super(Resnet50KerasAccuracy, self).__init__(
        output_dir=output_dir, flag_methods=flag_methods)
示例#13
0
 def setUpClass(cls):  # pylint: disable=invalid-name
     super(CtlImagenetTest, cls).setUpClass()
     imagenet_main.define_imagenet_flags()
     ctl_common.define_ctl_flags()
示例#14
0
                      steps_per_epoch=train_steps,
                      callbacks=[
                          time_callback,
                          lr_callback,
                          tensorboard_callback
                      ],
                      validation_steps=num_eval_steps,
                      validation_data=validation_data,
                      validation_freq=flags_obj.epochs_between_evals,
                      verbose=2)

  eval_output = None
  if not flags_obj.skip_eval:
    eval_output = model.evaluate(eval_input_dataset,
                                 steps=num_eval_steps,
                                 verbose=2)
  stats = keras_common.build_stats(history, eval_output, time_callback)
  return stats


def main(_):
  with logger.benchmark_context(flags.FLAGS):
    return run(flags.FLAGS)


if __name__ == '__main__':
  tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO)
  imagenet_main.define_imagenet_flags()
  keras_common.define_keras_flags()
  absl_app.run(main)
示例#15
0
 def setUpClass(cls):  # pylint: disable=invalid-name
     super(BaseTest, cls).setUpClass()
     imagenet_main.define_imagenet_flags()
示例#16
0
def define_imagenet_keras_flags():
    imagenet_main.define_imagenet_flags(dynamic_loss_scale=True,
                                        enable_xla=True)
    keras_common.define_keras_flags()
示例#17
0
 def setUpClass(cls):  # pylint: disable=invalid-name
   super(BaseTest, cls).setUpClass()
   imagenet_main.define_imagenet_flags()
示例#18
0
 def setUpClass(cls):  # pylint: disable=invalid-name
     super(KerasImagenetTest, cls).setUpClass()
     imagenet_main.define_imagenet_flags()
     keras_common.define_keras_flags()
    history = model.fit(train_input_dataset,
                        epochs=train_epochs,
                        steps_per_epoch=train_steps,
                        callbacks=callbacks,
                        validation_steps=num_eval_steps,
                        validation_data=validation_data,
                        validation_freq=flags_obj.epochs_between_evals,
                        verbose=2)

    eval_output = None
    if not flags_obj.skip_eval:
        eval_output = model.evaluate(eval_input_dataset,
                                     steps=num_eval_steps,
                                     verbose=2)
    stats = keras_common.build_stats(history, eval_output, callbacks)
    return stats


def main(_):
    model_helpers.apply_clean(flags.FLAGS)
    with logger.benchmark_context(flags.FLAGS):
        return run(flags.FLAGS)


if __name__ == '__main__':
    tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO)
    imagenet_main.define_imagenet_flags(dynamic_loss_scale=True)
    keras_common.define_keras_flags()
    absl_app.run(main)
示例#20
0
  history = model.fit(train_input_dataset,
                      epochs=train_epochs,
                      steps_per_epoch=train_steps,
                      callbacks=callbacks,
                      validation_steps=num_eval_steps,
                      validation_data=validation_data,
                      validation_freq=flags_obj.epochs_between_evals,
                      verbose=2)

  eval_output = None
  if not flags_obj.skip_eval:
    eval_output = model.evaluate(eval_input_dataset,
                                 steps=num_eval_steps,
                                 verbose=2)
  stats = keras_common.build_stats(history, eval_output, callbacks)
  return stats


def main(_):
  model_helpers.apply_clean(flags.FLAGS)
  with logger.benchmark_context(flags.FLAGS):
    return run(flags.FLAGS)


if __name__ == '__main__':
  tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO)
  imagenet_main.define_imagenet_flags(dynamic_loss_scale=True)
  keras_common.define_keras_flags()
  absl_app.run(main)
    num_eval_steps = (imagenet_main.NUM_IMAGES['validation'] //
                      flags_obj.batch_size)

    validation_data = eval_input_dataset
    if flags_obj.skip_eval:
        num_eval_steps = None
        validation_data = None

    model.fit(train_input_dataset,
              epochs=train_epochs,
              steps_per_epoch=train_steps,
              callbacks=[time_callback, lr_callback, tensorboard_callback],
              validation_steps=num_eval_steps,
              validation_data=validation_data,
              verbose=1)

    if not flags_obj.skip_eval:
        model.evaluate(eval_input_dataset, steps=num_eval_steps, verbose=1)


def main(_):
    with logger.benchmark_context(flags.FLAGS):
        run(flags.FLAGS)


if __name__ == '__main__':
    tf.logging.set_verbosity(tf.logging.INFO)
    imagenet_main.define_imagenet_flags()
    keras_common.define_keras_flags()
    absl_app.run(main)
示例#22
0
 def setUpClass(cls):  # pylint: disable=invalid-name
   super(KerasImagenetTest, cls).setUpClass()
   imagenet_main.define_imagenet_flags()
   keras_common.define_keras_flags()