def _persist_metadata(self, path, dump_flattened_stories=False): # type: (Text) -> None """Persists the domain specification to storage.""" # make sure the directory we persist to exists domain_spec_path = os.path.join(path, 'policy_metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "rasa_core": rasa_core.__version__, "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
def _persist_metadata(self, path: Text, dump_flattened_stories: bool = False) -> None: """Persists the domain specification to storage.""" # make sure the directory we persist exists domain_spec_path = os.path.join(path, 'metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "python": ".".join([str(s) for s in sys.version_info[:3]]), "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names, "trained_at": self.date_trained } self._add_package_version_info(metadata) utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
def _persist_metadata(self, path, dump_flattened_stories=False): # type: (Text) -> None """Persists the domain specification to storage.""" # make sure the directory we persist to exists domain_spec_path = os.path.join(path, 'policy_metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [ utils.module_path_from_instance(p) for p in self.policies ] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "rasa_core": rasa_core.__version__, "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
def persist(self, path: Text) -> None: if self.model: self.featurizer.persist(path) meta = { "priority": self.priority, "model": "keras_model.h5", "epochs": self.current_epoch } meta_file = os.path.join(path, 'keras_policy.json') utils.dump_obj_as_json_to_file(meta_file, meta) model_file = os.path.join(path, meta['model']) # makes sure the model directory exists utils.create_dir_for_file(model_file) with self.graph.as_default(), self.session.as_default(): self.model.save(model_file, overwrite=True) tf_config_file = os.path.join(path, "keras_policy.tf_config.pkl") with open(tf_config_file, 'wb') as f: pickle.dump(self._tf_config, f) else: warnings.warn("Persist called without a trained model present. " "Nothing to persist then!")
def persist_specification(self, model_path): # type: (Text, List[Text]) -> None """Persists the domain specification to storage.""" domain_spec_path = os.path.join(model_path, 'domain.json') utils.create_dir_for_file(domain_spec_path) metadata = {"features": self.input_features} utils.dump_obj_as_json_to_file(domain_spec_path, metadata)
def persist(self, path: Text) -> None: self.featurizer.persist(path) memorized_file = os.path.join(path, 'memorized_turns.json') data = {"max_history": self.max_history, "lookup": self.lookup} utils.create_dir_for_file(memorized_file) utils.dump_obj_as_json_to_file(memorized_file, data)
def persist_specification(self, model_path: Text) -> None: """Persists the domain specification to storage.""" domain_spec_path = os.path.join(model_path, 'domain.json') utils.create_dir_for_file(domain_spec_path) metadata = {"states": self.input_states} utils.dump_obj_as_json_to_file(domain_spec_path, metadata)
def persist_specification(self, model_path): # type: (Text, List[Text]) -> None """Persists the domain specification to storage.""" domain_spec_path = os.path.join(model_path, 'domain.json') utils.create_dir_for_file(domain_spec_path) metadata = {"features": self.input_features} with io.open(domain_spec_path, 'w') as f: f.write(str(json.dumps(metadata, indent=2)))
def persist(self, path: Text) -> None: """Persists the policy to storage.""" config_file = os.path.join(path, 'fallback_policy.json') meta = { "nlu_threshold": self.nlu_threshold, "core_threshold": self.core_threshold, "fallback_action_name": self.fallback_action_name } utils.create_dir_for_file(config_file) utils.dump_obj_as_json_to_file(config_file, meta)
def persist(self, path): # type: (Text) -> None """Persists the policy to storage.""" config_file = os.path.join(path, 'confirmation_policy.json') meta = { "nlu_threshold": self.nlu_threshold, "confirmation_action_name": self.confirmation_action_name } utils.create_dir_for_file(config_file) utils.dump_obj_as_json_to_file(config_file, meta)
def persist(self, path): # type: (Text) -> None """Persists the policy to storage.""" config_file = os.path.join(path, 'fallback_policy.json') meta = { "nlu_threshold": self.nlu_threshold, "core_threshold": self.core_threshold, "fallback_action_name": self.fallback_action_name } utils.create_dir_for_file(config_file) utils.dump_obj_as_json_to_file(config_file, meta)
def persist_specification(self, model_path): # type: (Text) -> None """Persists the domain specification to storage.""" domain_spec_path = os.path.join(model_path, 'domain.json') utils.create_dir_for_file(domain_spec_path) metadata = { "states": self.input_states } utils.dump_obj_as_json_to_file(domain_spec_path, metadata)
def persist(self, path: Text) -> None: """Persists the policy to storage.""" config_file = os.path.join(path, "bottis_policy.json") meta = { "priority": self.priority, "nlu_threshold": self.nlu_threshold, "core_threshold": self.core_threshold, "custom_response_action_name": self.custom_response_action_name, } utils.create_dir_for_file(config_file) utils.dump_obj_as_json_to_file(config_file, meta)
def persist(self, path): # type: (Text) -> None self.featurizer.persist(path) memorized_file = os.path.join(path, 'memorized_turns.json') data = { "max_history": self.max_history, "lookup": self.lookup } utils.create_dir_for_file(memorized_file) utils.dump_obj_as_json_to_file(memorized_file, data)
def persist(self, path: Text) -> None: """Persists the policy to storage.""" config_file = os.path.join(path, 'two_stage_fallback_policy.json') meta = { "priority": self.priority, "nlu_threshold": self.nlu_threshold, "core_threshold": self.core_threshold, "fallback_core_action_name": self.fallback_action_name, "fallback_nlu_action_name": self.fallback_nlu_action_name, "deny_suggestion_intent_name": self.deny_suggestion_intent_name, } utils.create_dir_for_file(config_file) utils.dump_obj_as_json_to_file(config_file, meta)
def persist(self, path): # tupe: (Text) -> None if self.model: self.featurizer.persist(path) meta = {"model": "torch_model.h5", "epochs": self.current_epoch} config_file = os.path.join(path, "torch_policy.json") utils.dump_obj_as_json_to_file(config_file, meta) model_file = os.path.join(path, meta["model"]) utils.create_dir_for_file(model_file) torch.save(self.model, model_file) else: warnings.warn("Persist called without a trained model present. " "Nothing to persist.")
def persist(self, path): if self.model: arch_file = os.path.join(path, 'keras_arch.json') weights_file = os.path.join(path, 'keras_weights.h5') config_file = os.path.join(path, 'keras_policy.json') # makes sure the model directory exists utils.create_dir_for_file(weights_file) utils.dump_obj_as_str_to_file(arch_file, self.model.to_json()) self._persist_configuration(config_file) self.model.save_weights(weights_file, overwrite=True) else: warnings.warn("Persist called without a trained model present. " "Nothing to persist then!")
def persist(self, path): if self.model: arch_file = os.path.join(path, 'keras_arch.json') weights_file = os.path.join(path, 'keras_weights.h5') utils.create_dir_for_file(weights_file) with io.open(arch_file, 'w') as f: f.write(str(self.model.to_json())) with io.open(os.path.join(path, 'keras_policy.json'), 'w') as f: f.write(str(json.dumps({ "arch": "keras_arch.json", "weights": "keras_weights.h5", "epochs": self.current_epoch}))) self.model.save_weights(weights_file, overwrite=True) else: warnings.warn("Persist called without a trained model present. " "Nothing to persist then!")
def _persist_metadata(self, path, max_history): # type: (Text, Optional[int]) -> None """Persists the domain specification to storage.""" domain_spec_path = os.path.join(path, 'policy_metadata.json') utils.create_dir_for_file(domain_spec_path) policy_names = [ p.__module__ + "." + p.__class__.__name__ for p in self.policies ] metadata = { "rasa_core": rasa_core.__version__, "max_history": max_history, "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } with io.open(domain_spec_path, 'w') as f: f.write(str(json.dumps(metadata, indent=2)))
def persist(self, path): # type: (Text) -> None if self.model: self.featurizer.persist(path) arch_file = os.path.join(path, 'keras_arch.json') weights_file = os.path.join(path, 'keras_weights.h5') config_file = os.path.join(path, 'keras_policy.json') # makes sure the model directory exists utils.create_dir_for_file(weights_file) utils.dump_obj_as_str_to_file(arch_file, self.model.to_json()) self._persist_configuration(config_file) self.model.save_weights(weights_file, overwrite=True) else: warnings.warn("Persist called without a trained model present. " "Nothing to persist then!")
def persist(self, path): # type: (Text) -> None if self.model: self.featurizer.persist(path) meta = {"model": "keras_model.h5", "epochs": self.current_epoch} config_file = os.path.join(path, 'keras_policy.json') utils.dump_obj_as_json_to_file(config_file, meta) model_file = os.path.join(path, meta['model']) # makes sure the model directory exists utils.create_dir_for_file(model_file) with self.graph.as_default(), self.session.as_default(): self.model.save(model_file, overwrite=True) else: warnings.warn("Persist called without a trained model present. " "Nothing to persist then!")
def _persist_metadata(self, path, max_history): # type: (Text, Optional[int]) -> None """Persists the domain specification to storage.""" # make sure the directory we persist to exists domain_spec_path = os.path.join(path, 'policy_metadata.json') utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self.training_metadata.get("events", {}) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "rasa_core": rasa_core.__version__, "max_history": max_history, "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } utils.dump_obj_as_json_to_file(domain_spec_path, metadata)
def persist(self, path): featurizer_file = os.path.join(path, "featurizer.json") utils.create_dir_for_file(featurizer_file) with io.open(featurizer_file, 'w', encoding="utf-8") as f: # noinspection PyTypeChecker f.write(str(jsonpickle.encode(self)))
def persist(self, path): memorized_file = os.path.join(path, 'memorized_turns.json') data = {"lookup": self.lookup} utils.create_dir_for_file(memorized_file) utils.dump_obj_as_json_to_file(memorized_file, data)
def persist(self, path): featurizer_file = os.path.join(path, "featurizer.json") utils.create_dir_for_file(featurizer_file) with io.open(featurizer_file, 'w') as f: f.write(str(jsonpickle.encode(self)))
def persist(self, path): memorized_file = os.path.join(path, 'memorized_turns.json') data = {"lookup": self.lookup} utils.create_dir_for_file(memorized_file) with io.open(memorized_file, 'w') as f: f.write(str(json.dumps(data, indent=2)))