def test_create_serving_input_receiver_numpy(self):
        (model_dir, mock_t2r_model,
         prediction_ref) = self._train_and_eval_reference_model('numpy')
        exporter = default_export_generator.DefaultExportGenerator()
        exporter.set_specification_from_model(mock_t2r_model)

        # Export trained serving estimator.
        estimator_exporter = tf.estimator.Estimator(
            model_fn=mock_t2r_model.model_fn,
            config=tf.estimator.RunConfig(model_dir=model_dir))

        serving_input_receiver_fn = (
            exporter.create_serving_input_receiver_numpy_fn())
        exported_savedmodel_path = estimator_exporter.export_saved_model(
            export_dir_base=model_dir,
            serving_input_receiver_fn=serving_input_receiver_fn,
            checkpoint_path=tf.train.latest_checkpoint(model_dir))

        # Load trained and exported serving estimator, run prediction and assert
        # it is the same as before exporting.
        feed_predictor_fn = tf.contrib.predictor.from_saved_model(
            exported_savedmodel_path)
        mock_input_generator = mocks.MockInputGenerator(batch_size=BATCH_SIZE)
        features, labels = mock_input_generator.create_numpy_data()
        for pos, value in enumerate(prediction_ref):
            actual = feed_predictor_fn({'x': features[pos, :].reshape(1, -1)
                                        })['logit'].flatten()
            predicted = value['logit'].flatten()
            np.testing.assert_almost_equal(actual=actual,
                                           desired=predicted,
                                           decimal=4)
            if labels[pos] > 0:
                self.assertGreater(predicted[0], 0)
            else:
                self.assertLess(predicted[0], 0)
示例#2
0
def create_default_exporters(
    t2r_model,
    export_generator,
    compare_fn=create_valid_result_smaller):
  """Creates a list of Exporter to export saved models during evaluation.

  Args:
    t2r_model: The model to be exported.
    export_generator: An export_generator.AbstractExportGenerator.
    compare_fn: The function used to deterimne the best model to export.

  Returns:
    A list containing two exporters, one for numpy and another one for
      tf_example interface.
  """
  if export_generator is None:
    export_generator = default_export_generator.DefaultExportGenerator()
  # Create pkl of the input to save alongside the exported models
  tmpdir = tempfile.mkdtemp()
  in_feature_spec = t2r_model.get_feature_specification_for_packing(
      mode=tf.estimator.ModeKeys.PREDICT)
  in_label_spec = t2r_model.get_label_specification_for_packing(
      mode=tf.estimator.ModeKeys.PREDICT)
  t2r_assets = t2r_pb2.T2RAssets()
  t2r_assets.feature_spec.CopyFrom(in_feature_spec.to_proto())
  t2r_assets.label_spec.CopyFrom(in_label_spec.to_proto())
  t2r_assets_filename = os.path.join(tmpdir,
                                     tensorspec_utils.T2R_ASSETS_FILENAME)
  tensorspec_utils.write_t2r_assets_to_file(t2r_assets, t2r_assets_filename)
  assets = {tensorspec_utils.T2R_ASSETS_FILENAME: t2r_assets_filename}
  export_generator.set_specification_from_model(t2r_model)

  exporters = []
  exporters.append(
      tf.estimator.BestExporter(
          name='best_exporter_numpy',
          compare_fn=compare_fn(),
          serving_input_receiver_fn=export_generator
          .create_serving_input_receiver_numpy_fn(),
          assets_extra=assets))
  exporters.append(
      tf.estimator.BestExporter(
          name='best_exporter_tf_example',
          compare_fn=compare_fn(),
          serving_input_receiver_fn=export_generator
          .create_serving_input_receiver_tf_example_fn(),
          assets_extra=assets))
  exporters.append(
      tf.estimator.LatestExporter(
          name='latest_exporter_numpy',
          serving_input_receiver_fn=export_generator
          .create_serving_input_receiver_numpy_fn(),
          assets_extra=assets))
  exporters.append(
      tf.estimator.LatestExporter(
          name='latest_exporter_tf_example',
          serving_input_receiver_fn=export_generator
          .create_serving_input_receiver_tf_example_fn(),
          assets_extra=assets))
  return exporters
    def test_create_serving_input_receiver_tf_example(self, multi_dataset):
        (model_dir, mock_t2r_model,
         prediction_ref) = self._train_and_eval_reference_model(
             'tf_example', multi_dataset=multi_dataset)

        # Now we can actually export our serving estimator.
        estimator_exporter = tf.estimator.Estimator(
            model_fn=mock_t2r_model.model_fn,
            config=tf.estimator.RunConfig(model_dir=model_dir))

        exporter = default_export_generator.DefaultExportGenerator()
        exporter.set_specification_from_model(mock_t2r_model)
        serving_input_receiver_fn = (
            exporter.create_serving_input_receiver_tf_example_fn())
        exported_savedmodel_path = estimator_exporter.export_saved_model(
            export_dir_base=model_dir,
            serving_input_receiver_fn=serving_input_receiver_fn,
            checkpoint_path=tf.train.latest_checkpoint(model_dir))

        # Now we can load our exported estimator graph, there are no dependencies
        # on the model_fn or preprocessor anymore.
        feed_predictor_fn = tf.contrib.predictor.from_saved_model(
            exported_savedmodel_path)
        mock_input_generator = mocks.MockInputGenerator(batch_size=BATCH_SIZE)
        features, labels = mock_input_generator.create_numpy_data()
        for pos, value in enumerate(prediction_ref):
            # We have to create our serialized tf.Example proto.
            example = tf.train.Example()
            example.features.feature[
                'measured_position'].float_list.value.extend(features[pos])
            serialized_example = np.array(example.SerializeToString()).reshape(
                1, )
            if multi_dataset:
                feed_dict = {
                    'input_example_dataset1': serialized_example,
                    'input_example_dataset2': serialized_example
                }
            else:
                feed_dict = {'input_example_tensor': serialized_example}
            actual = feed_predictor_fn(feed_dict)['logit'].flatten()
            predicted = value['logit'].flatten()
            np.testing.assert_almost_equal(actual=actual,
                                           desired=predicted,
                                           decimal=4)
            if labels[pos] > 0:
                self.assertGreater(predicted[0], 0)
            else:
                self.assertLess(predicted[0], 0)
 def __init__(
     self,
     export_dir,
     save_secs = 90,
     num_versions = 3,
     create_export_fn = default_create_export_fn,
     export_generator = None,
 ):
   super(AsyncExportHookBuilder, self).__init__()
   self._save_secs = save_secs
   self._num_versions = num_versions
   self._export_dir = export_dir
   self._create_export_fn = create_export_fn
   if export_generator is None:
     self._export_generator = default_export_generator.DefaultExportGenerator()
   else:
     self._export_generator = export_generator
示例#5
0
 def __init__(
     self,
     export_dir,
     lagged_export_dir,
     batch_sizes_for_export,
     save_secs=90,
     num_versions=3,
     use_preprocessed_features=False,
     export_generator=None,
 ):
     super(TD3Hooks, self).__init__()
     self._save_secs = save_secs
     self._num_versions = num_versions
     self._export_dir = export_dir
     self._lagged_export_dir = lagged_export_dir
     self._batch_sizes_for_export = batch_sizes_for_export
     if export_generator is None:
         self._export_generator = default_export_generator.DefaultExportGenerator(
         )
     else:
         self._export_generator = export_generator
示例#6
0
def train_eval_model(
    t2r_model,
    input_generator_train = None,
    input_generator_eval = None,
    max_train_steps = 1000,
    model_dir = '/tmp/estimator_models_log_dir/',
    eval_steps = 100,
    eval_throttle_secs = 600,
    create_exporters_fn = None,
    export_generator = None,
    use_continuous_eval = True,
    train_hook_builders = None,
    eval_hook_builders = None,
):
  """Train and evaluate a T2RModel.

  We will either train, evaluate or train and evaluate a estimator model
  depending on the provided input generators.

  Args:
    t2r_model: An instance of the model we will train or evaluate.
    input_generator_train: An optional instance of an input generator. If
      provided then we will optimize the model until max_train_steps.
    input_generator_eval: An optional instance of an input generator. If
      provided then we will evaluate the model for at most eval_steps.
    max_train_steps: An optional maximum number of steps. For TPU training, it
      is a mandetory flag.
    model_dir: An optional location where we want to store or load our model
      from.
    eval_steps: An optional maximum number of evaluation steps.
    eval_throttle_secs: An optional number of seconds to wait before evaluating
      the next checkpoint.
    create_exporters_fn: An optional function which creates exporters for saved
      models during eval.
    export_generator: An export_generator.AbstractExportGenerator.
    use_continuous_eval: If True the evaluation job waits for new checkpoints
      and continuously evaluates every checkpoint. If False, only the latest
      checkpoint is evaluated or if None exists a model is initialized,
      evaluated and the job exists. Note, this parameter is only used if no
      input generator for training is provided.
    train_hook_builders: A optional list of HookBuilders to build trainer hooks
      to pass to the estimator.
    eval_hook_builders: A optional list of HookBuilders to build eval hooks to
      pass to the estimator.

  Raises:
    ValueError: If neither a input_generator for train nor eval is available.
  """

  # TODO(b/128860448): Document behavior in T2R README.
  use_tpu_tf_wrapper = t2r_model.is_device_tpu

  if use_tpu_tf_wrapper:
    t2r_model = tpu_model_wrapper.TPUT2RModelWrapper(t2r_model=t2r_model)

  print_specification(t2r_model)

  params = {}
  # Train Input Generator.
  train_batch_size = None
  train_spec = None
  if input_generator_train is not None:
    input_generator_train = provide_input_generator_with_model_information(
        input_generator_train,
        t2r_model,
        mode=tf.estimator.ModeKeys.TRAIN,
    )
    train_batch_size = input_generator_train.batch_size

  # Eval Input Generator.
  eval_batch_size = None
  eval_spec = None
  if input_generator_eval is not None:
    input_generator_eval = provide_input_generator_with_model_information(
        input_generator_eval, t2r_model, mode=tf.estimator.ModeKeys.EVAL)
    eval_batch_size = input_generator_eval.batch_size

  create_estimator_fn = create_estimator
  if t2r_model.is_device_tpu:
    create_estimator_fn = create_tpu_estimator

  estimator = create_estimator_fn(
      t2r_model=t2r_model,
      model_dir=model_dir,
      train_batch_size=train_batch_size,
      eval_batch_size=eval_batch_size,
      params=params)

  if export_generator is None:
    export_generator = default_export_generator.DefaultExportGenerator()

  # Inline helper function for building hooks.
  def _build_hooks(hook_builders):
    hooks = []
    if hook_builders:
      for builder in hook_builders:
        hooks.extend(
            builder.create_hooks(t2r_model, estimator, export_generator))
    return hooks

  # TrainSpec and Hooks.
  if input_generator_train is not None:
    train_hooks = _build_hooks(train_hook_builders)
    train_spec = tf.estimator.TrainSpec(
        input_fn=input_generator_train.create_dataset_input_fn(
            mode=tf.estimator.ModeKeys.TRAIN),
        max_steps=max_train_steps,
        hooks=train_hooks)

  # EvalSpec, Exporters, and Hooks.
  if input_generator_eval is not None:
    exporters = None
    if create_exporters_fn is not None:
      exporters = create_exporters_fn(t2r_model, export_generator)
    eval_hooks = _build_hooks(eval_hook_builders)
    eval_spec = gin_configurable_eval_spec(
        input_fn=input_generator_eval.create_dataset_input_fn(
            mode=tf.estimator.ModeKeys.EVAL),
        steps=eval_steps,
        throttle_secs=eval_throttle_secs,
        exporters=exporters,
        hooks=eval_hooks)
    # If the eval spec has a name we create the custom output dir such that
    # the metrics coincide with the summaries. Note, this is useful when
    # launching several separate evaluation processes.
    if eval_spec.name is not None:
      params['eval_name'] = 'eval_{}'.format(eval_spec.name)

  logging.info('gin operative configuration:')
  logging.info(gin.operative_config_str())

  if (train_spec is not None and eval_spec is not None):
    tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
  elif train_spec is not None:
    estimator.train(
        input_fn=train_spec.input_fn,
        hooks=train_spec.hooks,
        max_steps=train_spec.max_steps)
  elif eval_spec is not None:
    if not use_continuous_eval:
      estimator.evaluate(
          input_fn=eval_spec.input_fn, steps=eval_steps, name=eval_spec.name)
      return

    # This will start with the latest checkpoint and wait afterwards for a new
    # checkpoint for the next evaluation.
    for checkpoint_path in tf.contrib.training.checkpoints_iterator(
        estimator.model_dir):
      eval_result = estimator.evaluate(
          input_fn=eval_spec.input_fn,
          checkpoint_path=checkpoint_path,
          steps=eval_steps,
          name=eval_spec.name)
      if eval_spec.exporters:
        for exporter in eval_spec.exporters:
          export_path = os.path.join(estimator.model_dir, exporter.name)
          if eval_spec.name is not None:
            export_path = os.path.join(estimator.model_dir, 'eval_{}'.format(
                eval_spec.name), exporter.name)
          exporter.export(
              estimator=estimator,
              export_path=export_path,
              checkpoint_path=checkpoint_path,
              eval_result=eval_result,
              is_the_final_export=True)
  else:
    raise ValueError('Neither train nor eval was provided.')