def save_model_with_metadata(self,
                               file_path: Text,
                               drop_duplicate_features: bool = False,
                               group_duplicate_features: bool = False):
    """Saves the model and the generated metadata to the given file path.

    Args:
      file_path: Path to save the model and the metadata. It can be a GCS bucket
        or a local folder. The folder needs to be empty.
      drop_duplicate_features: If there are multiple inputs for the same
        feature column, then we will drop all but one if drop_duplicate_features
        is True. If False, we will include them all with unique suffix added
        to the input names to disambiguate.
      group_duplicate_features: If there are multiple inputs for the same
        feature column, then we will group them all as one feature group
        if this parameter is set to True.

    Returns:
      Full file path where the model and the metadata are written.
    """
    with self._monkey_patcher.exporting_context(self._output_key):
      model_path = self._estimator.export_saved_model(
          file_path, self._serving_input_fn, **self._save_args)

    metadata = self._create_metadata_dict(
        self._monkey_patcher.feature_tensors_dict,
        self._monkey_patcher.crossed_columns,
        [fc.name for fc in self._feature_columns],
        self._monkey_patcher.output_tensors_dict,
        drop_duplicate_features=drop_duplicate_features,
        group_duplicate_features=group_duplicate_features)
    utils.write_metadata_to_file(metadata, model_path)
    return model_path
    def save_model_with_metadata(self, file_path: str) -> str:
        """Saves the model and the generated metadata to the given file path.

    New metadata will not be generated for each call to this function since an
    Estimator is static. Calling this function with different paths will save
    the model and the same metadata to all paths.

    Args:
      file_path: Path to save the model and the metadata. It can be a GCS bucket
        or a local folder. The folder needs to be empty.

    Returns:
      Full file path where the model and the metadata are written.
    """
        monkey_patcher = monkey_patch_utils.EstimatorMonkeyPatchHelper()
        with monkey_patcher.exporting_context(self._output_key):
            model_path = self._estimator.export_saved_model(
                file_path, self._serving_input_fn, **self._save_args)

        if not self._metadata:
            self._metadata = self._create_metadata_from_tensors(
                monkey_patcher.feature_tensors_dict,
                monkey_patcher.crossed_columns,
                [fc.name for fc in self._feature_columns],
                monkey_patcher.output_tensors_dict)
        utils.write_metadata_to_file(self._metadata.to_dict(), model_path)
        return model_path
Example #3
0
    def save_model_with_metadata(self, file_path):
        """Saves the model and the generated metadata to the given file path.

    Args:
      file_path: Path to save the model and the metadata. It can be a GCS bucket
        or a local folder. The folder needs to be empty.

    Returns:
      Full file path where the model and the metadata are written.
    """
        md_dict = self.get_metadata()

        if not self._serving_inputs:
            self._serving_inputs = self._build_input_signature(
                self._inputs, self._session.graph)
        if not self._serving_outputs:
            self._serving_outputs = self._build_output_signature(
                self._outputs, self._session.graph)

        utils.save_graph_model(self._session, file_path, self._serving_inputs,
                               self._serving_outputs, self._tags,
                               **self._saved_model_args)

        common_utils.write_metadata_to_file(md_dict, file_path)
        return file_path
 def test_write_metadata_to_file(self):
     sample_dict = {'a': 123, 'b': '123', 'c': {'d': 0.12}}
     temp_folder = self.create_tempdir().full_path
     utils.write_metadata_to_file(sample_dict, temp_folder)
     metadata_file_path = os.path.join(temp_folder,
                                       'explanation_metadata.json')
     self.assertTrue(os.path.isfile(metadata_file_path))
     with tf.io.gfile.GFile(metadata_file_path, 'r') as f:
         written_md = json.load(f)
     self.assertEqual(sample_dict, written_md)
Example #5
0
    def save_metadata(self, file_path):
        """Saves model metadata to the given folder.

    Args:
      file_path: Path to save the model and the metadata. It can be a GCS bucket
        or a local folder. The folder needs to be empty.

    Raises:
      ValueError: If current number of outputs is greater than 1.
    """
        if len(self._outputs) > 1:
            raise ValueError("Number of outputs is more than 1.")
        utils.write_metadata_to_file(self.get_metadata(), file_path)
Example #6
0
    def save_model_with_metadata(self, file_path: Text):
        """Saves the model and the generated metadata to the given file path.

    Args:
      file_path: Path to save the model and the metadata. It can be a GCS bucket
        or a local folder. The folder needs to be empty.

    Returns:
      Full file path where the model and the metadata are written.
    """
        with self._monkey_patcher.exporting_context(self._output_key):
            model_path = self._estimator.export_saved_model(
                file_path, self._serving_input_fn, **self._save_args)

        metadata = self._create_metadata_dict(
            self._monkey_patcher.feature_tensors_dict,
            self._monkey_patcher.crossed_columns,
            [fc.name for fc in self._feature_columns],
            self._monkey_patcher.output_tensors_dict)
        utils.write_metadata_to_file(metadata, model_path)
        return model_path
 def save_metadata(self, file_path: str) -> None:
   """Saves the metadata to the given folder."""
   utils.write_metadata_to_file(self.get_metadata(), file_path)