def _persist_metadata(self, path, dump_flattened_stories=False): # type: (Text) -> None """Persists the domain specification to storage.""" # make sure the directory we persist to exists domain_spec_path = os.path.join(path, 'policy_metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "rasa_core": rasa_core.__version__, "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
def _persist_metadata(self, path, dump_flattened_stories=False): # type: (Text) -> None """Persists the domain specification to storage.""" # make sure the directory we persist to exists domain_spec_path = os.path.join(path, 'policy_metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [ utils.module_path_from_instance(p) for p in self.policies ] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "rasa_core": rasa_core.__version__, "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
def _persist_metadata(self, path: Text, dump_flattened_stories: bool = False) -> None: """Persists the domain specification to storage.""" # make sure the directory we persist exists domain_spec_path = os.path.join(path, 'metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "python": ".".join([str(s) for s in sys.version_info[:3]]), "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names, "trained_at": self.date_trained } self._add_package_version_info(metadata) utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)