Exemplo n.º 1
0
    def persist(self, file_name: Text,
                model_dir: Text) -> Optional[Dict[Text, Any]]:
        """Persist this model into the passed directory.

        Returns the metadata necessary to load the model again.
        """

        file_name = file_name + ".pkl"

        if self.vectorizers:
            # vectorizer instance was not None, some models could have been trained
            attribute_vocabularies = self._collect_vectorizer_vocabularies()
            if self._is_any_model_trained(attribute_vocabularies):
                # Definitely need to persist some vocabularies
                featurizer_file = os.path.join(model_dir, file_name)

                if self.use_shared_vocab:
                    # Only persist vocabulary from one attribute. Can be loaded and
                    # distributed to all attributes.
                    vocab = attribute_vocabularies[TEXT]
                else:
                    vocab = attribute_vocabularies

                io_utils.json_pickle(featurizer_file, vocab)

        return {"file": file_name}
Exemplo n.º 2
0
    def persist(self) -> None:
        """Persist this model into the passed directory.

        Returns the metadata necessary to load the model again.
        """
        if not self.vectorizers:
            return

        with self._model_storage.write_to(self._resource) as model_dir:
            # vectorizer instance was not None, some models could have been trained
            attribute_vocabularies = self._collect_vectorizer_vocabularies()
            if self._is_any_model_trained(attribute_vocabularies):
                # Definitely need to persist some vocabularies
                featurizer_file = model_dir / "vocabularies.pkl"

                if self.use_shared_vocab:
                    # Only persist vocabulary from one attribute. Can be loaded and
                    # distributed to all attributes.
                    vocab = attribute_vocabularies[TEXT]
                else:
                    vocab = attribute_vocabularies

                io_utils.json_pickle(featurizer_file, vocab)

                # Dump OOV words separately as they might have been modified during
                # training
                rasa.shared.utils.io.dump_obj_as_json_to_file(
                    model_dir / "oov_words.json", self.OOV_words)
Exemplo n.º 3
0
    def persist(self, file_name: Text, model_dir: Text) -> Dict[Text, Any]:
        """Persist this model into the passed directory.

        Return the metadata necessary to load the model again.
        """

        if self.model is None:
            return {"file": None}

        model_dir = Path(model_dir)
        tf_model_file = model_dir / f"{file_name}.tf_model"

        io_utils.create_directory_for_file(tf_model_file)

        self.model.save(str(tf_model_file))

        io_utils.pickle_dump(model_dir / f"{file_name}.data_example.pkl",
                             self.data_example)
        io_utils.pickle_dump(model_dir / f"{file_name}.label_data.pkl",
                             self._label_data)
        io_utils.json_pickle(
            model_dir / f"{file_name}.index_label_id_mapping.pkl",
            self.index_label_id_mapping,
        )
        io_utils.json_pickle(
            model_dir / f"{file_name}.index_tag_id_mapping.pkl",
            self.index_tag_id_mapping,
        )

        return {"file": file_name}
Exemplo n.º 4
0
    def persist(self, path: Union[Text, Path]) -> None:
        """Persists the policy to a storage."""
        if self.model is None:
            logger.debug(
                "Method `persist(...)` was called without a trained model present. "
                "Nothing to persist then!")
            return

        model_path = Path(path)
        tf_model_file = model_path / f"{SAVE_MODEL_FILE_NAME}.tf_model"

        rasa.shared.utils.io.create_directory_for_file(tf_model_file)

        self.featurizer.persist(path)

        if self.model.checkpoint_model:
            self.model.copy_best(str(tf_model_file))
        else:
            self.model.save(str(tf_model_file))

        io_utils.json_pickle(
            model_path / f"{SAVE_MODEL_FILE_NAME}.priority.pkl", self.priority)
        io_utils.pickle_dump(model_path / f"{SAVE_MODEL_FILE_NAME}.meta.pkl",
                             self.config)
        io_utils.pickle_dump(
            model_path / f"{SAVE_MODEL_FILE_NAME}.data_example.pkl",
            self.data_example)
        io_utils.pickle_dump(
            model_path / f"{SAVE_MODEL_FILE_NAME}.zero_state_features.pkl",
            self.zero_state_features,
        )
        io_utils.pickle_dump(
            model_path / f"{SAVE_MODEL_FILE_NAME}.label_data.pkl",
            dict(self._label_data.data),
        )
Exemplo n.º 5
0
    def persist_model_utilities(self, model_path: Path) -> None:
        """Persists model's utility attributes like model weights, etc.

        Args:
            model_path: Path where model is to be persisted
        """
        model_filename = self._metadata_filename()
        io_utils.json_pickle(model_path / f"{model_filename}.priority.pkl",
                             self.priority)
        io_utils.pickle_dump(model_path / f"{model_filename}.meta.pkl",
                             self.config)
        io_utils.pickle_dump(
            model_path / f"{model_filename}.data_example.pkl",
            self.data_example,
        )
        io_utils.pickle_dump(
            model_path / f"{model_filename}.fake_features.pkl",
            self.fake_features,
        )
        io_utils.pickle_dump(
            model_path / f"{model_filename}.label_data.pkl",
            dict(self._label_data.data),
        )
        entity_tag_specs = ([
            tag_spec._asdict() for tag_spec in self._entity_tag_specs
        ] if self._entity_tag_specs else [])
        rasa.shared.utils.io.dump_obj_as_json_to_file(
            model_path / f"{model_filename}.entity_tag_specs.json",
            entity_tag_specs,
        )
Exemplo n.º 6
0
    def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]:
        """Persist this model into the passed directory."""

        class_encoder_file_name = file_name + "class_encoder.pkl"
        if self.class_encoder:
            io_utils.json_pickle(os.path.join(model_dir, class_encoder_file_name), self.class_encoder)
        return {"class_encoder": class_encoder_file_name}
    def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]:
        """Persist this model into the passed directory.
        Return the metadata necessary to load the model again."""

        feature_to_idx_file = Path(model_dir) / f"{file_name}.feature_to_idx_dict.pkl"
        io_utils.json_pickle(feature_to_idx_file, self.feature_to_idx_dict)

        return {"file": file_name}
Exemplo n.º 8
0
    def persist(self) -> None:
        """Persist this model into the passed directory."""
        with self._model_storage.write_to(self._resource) as model_dir:
            file_name = self.__class__.__name__
            classifier_file_name = model_dir / f"{file_name}_classifier.pkl"
            encoder_file_name = model_dir / f"{file_name}_encoder.pkl"

            if self.clf and self.le:
                io_utils.json_pickle(encoder_file_name, self.le.classes_)
                io_utils.json_pickle(classifier_file_name, self.clf.best_estimator_)
    def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]:
        """Persist this model into the passed directory."""

        classifier_file_name = file_name + "_classifier.pkl"
        encoder_file_name = file_name + "_encoder.pkl"
        if self.clf and self.le:
            io_utils.json_pickle(
                os.path.join(model_dir, encoder_file_name), self.le.classes_
            )
            io_utils.json_pickle(
                os.path.join(model_dir, classifier_file_name), self.clf.best_estimator_
            )
        return {"classifier": classifier_file_name, "encoder": encoder_file_name}
Exemplo n.º 10
0
    def persist(self, file_name: Text, model_dir: Text) -> Dict[Text, Any]:
        """Persist this model into the passed directory.

        Return the metadata necessary to load the model again.
        """
        if self.model is None:
            return {"file": None}

        super().persist(file_name, model_dir)

        model_dir = Path(model_dir)

        io_utils.json_pickle(
            model_dir / f"{file_name}.retrieval_intent_mapping.pkl",
            self.retrieval_intent_mapping,
        )

        return {"file": file_name}
Exemplo n.º 11
0
    def persist(self, path: Text) -> None:
        """Persists the policy to a storage."""

        if self.model is None:
            logger.debug("Method `persist(...)` was called "
                         "without a trained model present. "
                         "Nothing to persist then!")
            return

        model_path = Path(path)
        tf_model_file = model_path / f"{SAVE_MODEL_FILE_NAME}.tf_model"

        io_utils.create_directory_for_file(tf_model_file)

        self.featurizer.persist(path)

        self.model.save(str(tf_model_file))

        io_utils.json_pickle(
            model_path / f"{SAVE_MODEL_FILE_NAME}.priority.pkl", self.priority)
        io_utils.pickle_dump(model_path / f"{SAVE_MODEL_FILE_NAME}.meta.pkl",
                             self.config)
        io_utils.json_pickle(
            model_path / f"{SAVE_MODEL_FILE_NAME}.data_example.pkl",
            self.data_example)
        io_utils.json_pickle(
            model_path / f"{SAVE_MODEL_FILE_NAME}.label_data.pkl",
            self._label_data)
 def persist(self, file_name, model_dir):
     """Persist this model into the passed directory."""
     classifier_file = os.path.join(model_dir, SENTIMENT_MODEL_FILE_NAME)
     json_pickle(classifier_file, self)
     return {"classifier_file": SENTIMENT_MODEL_FILE_NAME}