async def test_persist_and_read_test_story(tmpdir, default_domain): graph = await training.extract_story_graph("data/test_stories/stories.md", default_domain) out_path = tmpdir.join("persisted_story.md") Story(graph.story_steps).dump_to_file(out_path.strpath) recovered_trackers = await training.load_data( out_path.strpath, default_domain, use_story_concatenation=False, tracker_limit=1000, remove_duplicates=False, ) existing_trackers = await training.load_data( "data/test_stories/stories.md", default_domain, use_story_concatenation=False, tracker_limit=1000, remove_duplicates=False, ) existing_stories = {t.export_stories() for t in existing_trackers} for t in recovered_trackers: story_str = t.export_stories() assert story_str in existing_stories existing_stories.discard(story_str)
def export_stories(self, e2e=False) -> Text: """Dump the tracker as a story in the Rasa Core story format. Returns the dumped tracker as a string.""" from rasa.core.training.structures import Story story = Story.from_events(self.applied_events(), self.sender_id) return story.as_story_string(flat=True, e2e=e2e)
async def get_conversation_tracker_impl(request: Request, conversation_id: Text, user: Dict[Text, Any] = None): event_service = _event_service(request) if not _has_access_to_conversation(event_service, conversation_id, user): return rasa_x_utils.error(HTTPStatus.UNAUTHORIZED, "NoPermission", "Access denied") until_time = rasa_x_utils.float_arg(request, "until", None) since_time = rasa_x_utils.float_arg(request, "since", None) rasa_environment_query = rasa_x_utils.default_arg( request, "rasa_environment", DEFAULT_RASA_ENVIRONMENT) event_verbosity = _event_verbosity_from_request(request) exclude_leading_action_session_start = rasa_x_utils.bool_arg( request, "exclude_leading_action_session_start", False) tracker = event_service.get_tracker_with_message_flags( conversation_id, until_time, since_time, event_verbosity, rasa_environment_query, exclude_leading_action_session_start, ) if not tracker: return rasa_x_utils.error( HTTPStatus.NOT_FOUND, "ClientNotFound", f"Client for conversation_id '{conversation_id}' could not be found", ) requested_format = request.headers.get("Accept") if requested_format == "application/json": dispo = f"attachment;filename={conversation_id}-dump.json" return response.json( tracker, content_type="application/json", headers={"Content-Disposition": dispo}, ) elif requested_format == "text/markdown": _events = events.deserialise_events(tracker["events"]) story = Story.from_events(_events) exported = story.as_story_string(flat=True) return response.text( exported, content_type="text/markdown", headers={ "Content-Disposition": f"attachment;filename={conversation_id}-story.md" }, ) else: return response.json(tracker, headers={"Content-Disposition": "inline"})
def as_story(self, include_source: bool = False) -> "Story": """Dump the tracker as a story in the Rasa Core story format. Returns the dumped tracker as a string.""" from rasa.core.training.structures import Story story_name = (f"{self.sender_id} ({self.sender_source})" if include_source else self.sender_id) return Story.from_events(self.applied_events(), story_name)
async def _write_stories_to_file(export_story_path: Text, evts: List[Dict[Text, Any]]) -> None: """Write the conversation of the sender_id to the file paths.""" sub_conversations = _split_conversation_at_restarts(evts) with open(export_story_path, "a", encoding="utf-8") as f: for conversation in sub_conversations: parsed_events = events.deserialise_events(conversation) s = Story.from_events(parsed_events) f.write(s.as_story_string(flat=True) + "\n")
def export_stories(self, e2e: bool = False, include_source: bool = False) -> Text: """Dump the tracker as a story in the Rasa Core story format. Returns the dumped tracker as a string.""" from rasa.core.training.structures import Story story_name = (f"{self.sender_id} ({self.sender_source})" if include_source else self.sender_id) story = Story.from_events(self.applied_events(), story_name) return story.as_story_string(flat=True, e2e=e2e)
async def _write_stories_to_file( export_story_path: Text, events: List[Dict[Text, Any]] ) -> None: """Write the conversation of the sender_id to the file paths.""" sub_conversations = _split_conversation_at_restarts(events) create_path(export_story_path) if os.path.exists(export_story_path): append_write = "a" # append if already exists else: append_write = "w" # make a new file if not with open(export_story_path, append_write, encoding="utf-8") as f: for conversation in sub_conversations: parsed_events = rasa.core.events.deserialise_events(conversation) s = Story.from_events(parsed_events) f.write("\n" + s.as_story_string(flat=True))
def test_session_start_is_not_serialised(default_domain: Domain): tracker = DialogueStateTracker("default", default_domain.slots) # the retrieved tracker should be empty assert len(tracker.events) == 0 # add SlotSet event tracker.update(SlotSet("slot", "value")) # add the two SessionStarted events and a user event tracker.update(ActionExecuted(ACTION_SESSION_START_NAME)) tracker.update(SessionStarted()) tracker.update(UserUttered("say something")) # make sure session start is not serialised story = Story.from_events(tracker.events, "some-story01") expected = """## some-story01 - slot{"slot": "value"} * say something """ assert story.as_story_string(flat=True) == expected