def persist(self, file_name: Text, model_dir: Text) -> Dict[Text, Any]: """Persist this model into the passed directory. Return the metadata necessary to load the model again. """ if self.model is None: return {"file": None} model_dir = Path(model_dir) tf_model_file = model_dir / f"{file_name}.tf_model" io_utils.create_directory_for_file(tf_model_file) self.model.save(str(tf_model_file)) io_utils.pickle_dump(model_dir / f"{file_name}.data_example.pkl", self.data_example) io_utils.pickle_dump(model_dir / f"{file_name}.label_data.pkl", self._label_data) io_utils.json_pickle( model_dir / f"{file_name}.index_label_id_mapping.pkl", self.index_label_id_mapping, ) io_utils.json_pickle( model_dir / f"{file_name}.index_tag_id_mapping.pkl", self.index_tag_id_mapping, ) return {"file": file_name}
def persist(self, path: Text) -> None: """Persists the policy to a storage.""" if self.model is None: logger.debug("Method `persist(...)` was called " "without a trained model present. " "Nothing to persist then!") return model_path = Path(path) tf_model_file = model_path / f"{SAVE_MODEL_FILE_NAME}.tf_model" io_utils.create_directory_for_file(tf_model_file) self.featurizer.persist(path) self.model.save(str(tf_model_file)) io_utils.json_pickle( model_path / f"{SAVE_MODEL_FILE_NAME}.priority.pkl", self.priority) io_utils.pickle_dump(model_path / f"{SAVE_MODEL_FILE_NAME}.meta.pkl", self.config) io_utils.json_pickle( model_path / f"{SAVE_MODEL_FILE_NAME}.data_example.pkl", self.data_example) io_utils.json_pickle( model_path / f"{SAVE_MODEL_FILE_NAME}.label_data.pkl", self._label_data)
def persist(self, file_name: Text, model_dir: Text) -> Dict[Text, Any]: gensim_model_path = os.path.join(model_dir, f'{file_name}.model') hash_embedding_path = os.path.join(model_dir, f'{file_name}.hash_embedding.pkl') self.model.save(gensim_model_path) io_utils.pickle_dump(hash_embedding_path, self.hash_embedding) return {"file": file_name}
def persist(self, file_name: Text, model_dir: Text) -> Dict[Text, Any]: tf_model_path = os.path.join(model_dir, f'{file_name}.tf_model') vocab_path = os.path.join(model_dir, f'{file_name}.vocab.pkl') os.makedirs(tf_model_path) tf.keras.models.save_model(self.model, tf_model_path) io_utils.pickle_dump(vocab_path, self.vocab) return {"file": file_name}
def persist_model_utilities(self, model_path: Path) -> None: """Persists model's utility attributes like model weights, etc. Args: model_path: Path where model is to be persisted """ super().persist_model_utilities(model_path) io_utils.pickle_dump( model_path / f"{self._metadata_filename()}.label_quantiles.pkl", self.label_quantiles, )
def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]: from rasa.utils.io import pickle_dump file_name = file_name + ".pickle" path = os.path.join(model_dir, file_name) persisted = {"canonicals": self.canonicals} pickle_dump(path, persisted) return {"file": file_name}
def persist(self, path: Union[Text, Path]) -> None: """Persists the policy to a storage.""" if self.model is None: logger.debug( "Method `persist(...)` was called without a trained model present. " "Nothing to persist then!") return model_path = Path(path) tf_model_file = model_path / f"{SAVE_MODEL_FILE_NAME}.tf_model" rasa.shared.utils.io.create_directory_for_file(tf_model_file) self.featurizer.persist(path) if self.model.checkpoint_model: self.model.copy_best(str(tf_model_file)) else: self.model.save(str(tf_model_file)) io_utils.json_pickle( model_path / f"{SAVE_MODEL_FILE_NAME}.priority.pkl", self.priority) io_utils.pickle_dump(model_path / f"{SAVE_MODEL_FILE_NAME}.meta.pkl", self.config) io_utils.pickle_dump( model_path / f"{SAVE_MODEL_FILE_NAME}.data_example.pkl", self.data_example) io_utils.pickle_dump( model_path / f"{SAVE_MODEL_FILE_NAME}.zero_state_features.pkl", self.zero_state_features, ) io_utils.pickle_dump( model_path / f"{SAVE_MODEL_FILE_NAME}.label_data.pkl", dict(self._label_data.data), )
def persist_model_utilities(self, model_path: Path) -> None: """Persists model's utility attributes like model weights, etc. Args: model_path: Path where model is to be persisted """ model_filename = self._metadata_filename() io_utils.json_pickle(model_path / f"{model_filename}.priority.pkl", self.priority) io_utils.pickle_dump(model_path / f"{model_filename}.meta.pkl", self.config) io_utils.pickle_dump( model_path / f"{model_filename}.data_example.pkl", self.data_example, ) io_utils.pickle_dump( model_path / f"{model_filename}.fake_features.pkl", self.fake_features, ) io_utils.pickle_dump( model_path / f"{model_filename}.label_data.pkl", dict(self._label_data.data), ) entity_tag_specs = ([ tag_spec._asdict() for tag_spec in self._entity_tag_specs ] if self._entity_tag_specs else []) rasa.shared.utils.io.dump_obj_as_json_to_file( model_path / f"{model_filename}.entity_tag_specs.json", entity_tag_specs, )
def persist(self, path: Text) -> None: if self.model: self.featurizer.persist(path) meta = {"priority": self.priority} meta_file = os.path.join(path, "sklearn_policy.json") io_utils.dump_obj_as_json_to_file(meta_file, meta) filename = os.path.join(path, "sklearn_model.pkl") io_utils.pickle_dump(filename, self._state) zero_features_filename = os.path.join(path, "zero_state_features.pkl") io_utils.pickle_dump(zero_features_filename, self.zero_state_features) else: rasa.shared.utils.io.raise_warning( "Persist called without a trained model present. " "Nothing to persist then!")
def persist(self, path: Union[Text, Path]) -> None: """Persists the policy properties (see parent class for more information).""" if self.model: self.featurizer.persist(path) meta = {"priority": self.priority} path = Path(path) meta_file = path / "sklearn_policy.json" rasa.shared.utils.io.dump_obj_as_json_to_file(meta_file, meta) filename = path / "sklearn_model.pkl" rasa.utils.io.pickle_dump(filename, self._state) zero_features_filename = path / "zero_state_features.pkl" io_utils.pickle_dump(zero_features_filename, self.zero_state_features) else: rasa.shared.utils.io.raise_warning( "Persist called without a trained model present. " "Nothing to persist then!")
def persist(self, file_name: Text, model_dir: Text) -> Dict[Text, Any]: tf_model_path = os.path.join(model_dir, f'{file_name}.tf_model') os.makedirs(tf_model_path) tf.keras.models.save_model(self.model, tf_model_path) io_utils.pickle_dump( os.path.join(model_dir, f'{file_name}.char_to_idx.pkl'), self.char_to_idx) io_utils.pickle_dump( os.path.join(model_dir, f'{file_name}.index_label_id_mapping.pkl'), self.index_label_id_mapping) io_utils.pickle_dump( os.path.join(model_dir, f'{file_name}.index_tag_id_mapping .pkl'), self.index_tag_id_mapping) return {"file": file_name}
def persist(self, file_name, model_dir): """Persist this model into the passed directory.""" classifier_file = os.path.join(model_dir, SENTIMENT_MODEL_FILE_NAME) utils_io.pickle_dump(classifier_file, self) return {"classifier_file": SENTIMENT_MODEL_FILE_NAME}