def persist(self, path: Text) -> None: if self.model: self.featurizer.persist(path) meta = { "priority": self.priority, "model": "keras_model.h5", "epochs": self.current_epoch, } meta_file = os.path.join(path, "keras_policy.json") utils.dump_obj_as_json_to_file(meta_file, meta) model_file = os.path.join(path, meta["model"]) # makes sure the model directory exists utils.create_dir_for_file(model_file) with self.graph.as_default(), self.session.as_default(): self.model.save(model_file, overwrite=True) tf_config_file = os.path.join(path, "keras_policy.tf_config.pkl") with open(tf_config_file, "wb") as f: pickle.dump(self._tf_config, f) else: warnings.warn("Method `persist(...)` was called " "without a trained model present. " "Nothing to persist then!")
def _persist_metadata( self, path: Text, dump_flattened_stories: bool = False ) -> None: """Persists the domain specification to storage.""" # make sure the directory we persist exists domain_spec_path = os.path.join(path, "metadata.json") training_data_path = os.path.join(path, "stories.md") utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self._training_events_from_trackers(self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "python": ".".join([str(s) for s in sys.version_info[:3]]), "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names, "trained_at": self.date_trained, } self._add_package_version_info(metadata) utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
def persist(self, path: Text) -> None: """Only persists the priority.""" config_file = os.path.join(path, "mapping_policy.json") meta = {"priority": self.priority} utils.create_dir_for_file(config_file) utils.dump_obj_as_json_to_file(config_file, meta)
def persist_specification(self, model_path: Text) -> None: """Persists the domain specification to storage.""" domain_spec_path = os.path.join(model_path, "domain.json") utils.create_dir_for_file(domain_spec_path) metadata = {"states": self.input_states} utils.dump_obj_as_json_to_file(domain_spec_path, metadata)
def persist(self, path: Text) -> None: """Persists the policy to storage.""" config_file = os.path.join(path, 'fallback_policy.json') meta = { "priority": self.priority, "nlu_threshold": self.nlu_threshold, "core_threshold": self.core_threshold, "fallback_action_name": self.fallback_action_name } utils.create_dir_for_file(config_file) utils.dump_obj_as_json_to_file(config_file, meta)
def persist(self, path: Text) -> None: self.featurizer.persist(path) memorized_file = os.path.join(path, 'memorized_turns.json') data = { "priority": self.priority, "max_history": self.max_history, "lookup": self.lookup } utils.create_dir_for_file(memorized_file) utils.dump_obj_as_json_to_file(memorized_file, data)
def persist(self, path: Text) -> None: """Persists the policy to storage.""" config_file = os.path.join(path, "two_stage_fallback_policy.json") meta = { "priority": self.priority, "nlu_threshold": self.nlu_threshold, "core_threshold": self.core_threshold, "fallback_core_action_name": self.fallback_action_name, "fallback_nlu_action_name": self.fallback_nlu_action_name, "deny_suggestion_intent_name": self.deny_suggestion_intent_name, } utils.create_dir_for_file(config_file) utils.dump_obj_as_json_to_file(config_file, meta)
def persist(self, path): featurizer_file = os.path.join(path, "featurizer.json") utils.create_dir_for_file(featurizer_file) with open(featurizer_file, 'w', encoding="utf-8") as f: # noinspection PyTypeChecker f.write(str(jsonpickle.encode(self)))