def _persist_metadata(self, path, dump_flattened_stories=False): # type: (Text) -> None """Persists the domain specification to storage.""" # make sure the directory we persist to exists domain_spec_path = os.path.join(path, 'policy_metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "rasa_core": rasa_core.__version__, "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
def _persist_metadata(self, path, dump_flattened_stories=False): # type: (Text) -> None """Persists the domain specification to storage.""" # make sure the directory we persist to exists domain_spec_path = os.path.join(path, 'policy_metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [ utils.module_path_from_instance(p) for p in self.policies ] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "rasa_core": rasa_core.__version__, "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
def test_agent_with_model_server(tmpdir, zipped_moodbot_model, moodbot_domain, moodbot_metadata): fingerprint = 'somehash' model_endpoint_config = EndpointConfig.from_dict( {"url": 'http://server.com/model/default_core@latest'} ) # mock a response that returns a zipped model with io.open(zipped_moodbot_model, 'rb') as f: responses.add(responses.GET, model_endpoint_config.url, headers={"ETag": fingerprint}, body=f.read(), content_type='application/zip', stream=True) agent = rasa_core.agent.load_from_server( model_server=model_endpoint_config) assert agent.fingerprint == fingerprint assert agent.domain.as_dict() == moodbot_domain.as_dict() agent_policies = {utils.module_path_from_instance(p) for p in agent.policy_ensemble.policies} moodbot_policies = set(moodbot_metadata["policy_names"]) assert agent_policies == moodbot_policies
def _persist_metadata(self, path: Text, dump_flattened_stories: bool = False) -> None: """Persists the domain specification to storage.""" # make sure the directory we persist exists domain_spec_path = os.path.join(path, 'metadata.json') training_data_path = os.path.join(path, 'stories.md') utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self._training_events_from_trackers( self.training_trackers) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "python": ".".join([str(s) for s in sys.version_info[:3]]), "max_histories": self._max_histories(), "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names, "trained_at": self.date_trained } self._add_package_version_info(metadata) utils.dump_obj_as_json_to_file(domain_spec_path, metadata) # if there are lots of stories, saving flattened stories takes a long # time, so this is turned off by default if dump_flattened_stories: training.persist_data(self.training_trackers, training_data_path)
async def test_agent_with_model_server_in_thread(model_server, tmpdir, zipped_moodbot_model, moodbot_domain, moodbot_metadata): model_endpoint_config = EndpointConfig.from_dict({ "url": model_server.make_url('/model'), "wait_time_between_pulls": 2 }) agent = Agent() agent = await rasa_core.agent.load_from_server( agent, model_server=model_endpoint_config) await asyncio.sleep(3) assert agent.fingerprint == "somehash" assert agent.domain.as_dict() == moodbot_domain.as_dict() agent_policies = { utils.module_path_from_instance(p) for p in agent.policy_ensemble.policies } moodbot_policies = set(moodbot_metadata["policy_names"]) assert agent_policies == moodbot_policies assert model_server.app.number_of_model_requests == 1 jobs.kill_scheduler()
def _persist_metadata(self, path, max_history): # type: (Text, Optional[int]) -> None """Persists the domain specification to storage.""" # make sure the directory we persist to exists domain_spec_path = os.path.join(path, 'policy_metadata.json') utils.create_dir_for_file(domain_spec_path) policy_names = [utils.module_path_from_instance(p) for p in self.policies] training_events = self.training_metadata.get("events", {}) action_fingerprints = self._create_action_fingerprints(training_events) metadata = { "action_fingerprints": action_fingerprints, "rasa_core": rasa_core.__version__, "max_history": max_history, "ensemble_name": self.__module__ + "." + self.__class__.__name__, "policy_names": policy_names } utils.dump_obj_as_json_to_file(domain_spec_path, metadata)
def persistence_info(self): return {"type": utils.module_path_from_instance(self), "initial_value": self.initial_value}
def persistence_info(self): return {"type": utils.module_path_from_instance(self), "initial_value": self.initial_value}
def persistence_info(self): return {**{"type": utils.module_path_from_instance(self), "initial_value": self.initial_value}, **{k: getattr(self, k) for k in self._additional}}