def export(self, checkpoint_path=None, export_dir_base=None): """Exports a model. Args: checkpoint_path: The checkpoint path to export. If ``None``, the latest is used. export_dir_base: The base directory in which a timestamped subdirectory containing the exported model will be created. Defaults to ``$MODEL_DIR/export/manual``. Returns: The string path to the exported directory. """ estimator = self._make_estimator() if checkpoint_path is not None and tf.gfile.IsDirectory(checkpoint_path): checkpoint_path = tf.train.latest_checkpoint(checkpoint_path) if export_dir_base is None: export_dir_base = os.path.join(estimator.model_dir, "export", "manual") kwargs = {} if hasattr(estimator, "export_saved_model"): export_fn = estimator.export_saved_model else: export_fn = estimator.export_savedmodel if "strip_default_attrs" in fn_args(estimator.export_savedmodel): # Set strip_default_attrs to True for TensorFlow 1.6+ to stay consistent # with the behavior of tf.estimator.Exporter. kwargs["strip_default_attrs"] = True return export_fn( export_dir_base, estimator_util.make_serving_input_fn(self._model, metadata=self._config["data"]), assets_extra=self._get_model_assets(), checkpoint_path=checkpoint_path, **kwargs)
def serving_input_fn(self, metadata=None): """Returns the serving input function. Args: metadata: A dictionary containing additional metadata set by the user. Required if ``Model.initialize()`` has not been called. Returns: A callable that returns a ``tf.estimator.export.ServingInputReceiver``. """ if metadata is not None: self.initialize(metadata) return estimator.make_serving_input_fn(self)
def _build_eval_spec(self): eval_spec = tf.estimator.EvalSpec( input_fn=estimator_util.make_input_fn( self._model, tf.estimator.ModeKeys.EVAL, self._config["eval"]["batch_size"], features_file=self._config["data"]["eval_features_file"], labels_file=self._config["data"].get("eval_labels_file"), num_threads=self._config["eval"].get("num_threads"), prefetch_buffer_size=self._config["eval"].get("prefetch_buffer_size")), steps=None, exporters=_make_exporters( self._config["eval"]["exporters"], estimator_util.make_serving_input_fn(self._model), assets_extra=self._get_model_assets()), throttle_secs=self._config["eval"]["eval_delay"]) return eval_spec
def export(self, checkpoint_path=None, export_dir_base=None): """Exports a model. Args: checkpoint_path: The checkpoint path to export. If ``None``, the latest is used. export_dir_base: The base directory in which a timestamped subdirectory containing the exported model will be created. Defaults to ``$MODEL_DIR/export/manual``. Returns: The string path to the exported directory. """ estimator = self._make_estimator() if checkpoint_path is not None and tf.gfile.IsDirectory(checkpoint_path): checkpoint_path = tf.train.latest_checkpoint(checkpoint_path) if export_dir_base is None: export_dir_base = os.path.join(estimator.model_dir, "export", "manual") kwargs = {} if hasattr(estimator, "export_saved_model"): export_fn = estimator.export_saved_model else: export_fn = estimator.export_savedmodel if "strip_default_attrs" in fn_args(estimator.export_savedmodel): # Set strip_default_attrs to True for TensorFlow 1.6+ to stay consistent # with the behavior of tf.estimator.Exporter. kwargs["strip_default_attrs"] = True # This is a hack for SequenceRecordInputter that currently infers the input # depth from the data files. # TODO: This method should not require the training data. data_config = self._config["data"] if "train_features_file" in data_config: _ = model.features_inputter.make_dataset(data_config["train_features_file"]) return export_fn( export_dir_base, estimator_util.make_serving_input_fn(self._model), assets_extra=self._get_model_assets(), checkpoint_path=checkpoint_path, **kwargs)