def test_slot_mapping_intent_is_desired(domain: Domain): domain = Domain.from_file("examples/formbot/domain.yml") tracker = DialogueStateTracker("sender_id_test", slots=domain.slots) event1 = UserUttered( text="I'd like to book a restaurant for 2 people.", intent={ "name": "request_restaurant", "confidence": 0.9604260921478271 }, entities=[{ "entity": "number", "value": 2 }], ) tracker.update(event1, domain) mappings_for_num_people = ( domain.as_dict().get("slots").get("num_people").get("mappings")) assert SlotMapping.intent_is_desired(mappings_for_num_people[0], tracker, domain) event2 = UserUttered( text="Yes, 2 please", intent={ "name": "affirm", "confidence": 0.9604260921478271 }, entities=[{ "entity": "number", "value": 2 }], ) tracker.update(event2, domain) assert (SlotMapping.intent_is_desired(mappings_for_num_people[0], tracker, domain) is False) event3 = UserUttered( text="Yes, please", intent={ "name": "affirm", "confidence": 0.9604260921478271 }, entities=[], ) tracker.update(event3, domain) mappings_for_preferences = ( domain.as_dict().get("slots").get("preferences").get("mappings")) assert (SlotMapping.intent_is_desired(mappings_for_preferences[0], tracker, domain) is False)
async def test_agent_with_model_server_in_thread( model_server: TestClient, default_domain: Domain, unpacked_trained_rasa_model: Text): model_endpoint_config = EndpointConfig.from_dict({ "url": model_server.make_url("/model"), "wait_time_between_pulls": 2 }) agent = Agent() agent = await rasa.core.agent.load_from_server( agent, model_server=model_endpoint_config) await asyncio.sleep(5) assert agent.fingerprint == "somehash" assert agent.domain.as_dict() == default_domain.as_dict() expected_policies = PolicyEnsemble.load_metadata( str(Path(unpacked_trained_rasa_model, "core")))["policy_names"] agent_policies = { rasa.shared.utils.common.module_path_from_instance(p) for p in agent.policy_ensemble.policies } assert agent_policies == set(expected_policies) assert model_server.app.number_of_model_requests == 1 jobs.kill_scheduler()
async def test_remote_action_runs( default_channel: OutputChannel, default_nlg: NaturalLanguageGenerator, default_tracker: DialogueStateTracker, domain: Domain, ): endpoint = EndpointConfig("https://example.com/webhooks/actions") remote_action = action.RemoteAction("my_action", endpoint) with aioresponses() as mocked: mocked.post( "https://example.com/webhooks/actions", payload={ "events": [], "responses": [] }, ) await remote_action.run(default_channel, default_nlg, default_tracker, domain) r = latest_request(mocked, "post", "https://example.com/webhooks/actions") assert r assert json_of_latest_request(r) == { "domain": domain.as_dict(), "next_action": "my_action", "sender_id": "my-sender", "version": rasa.__version__, "tracker": { "latest_message": { "entities": [], "intent": {}, "text": None, "message_id": None, "metadata": {}, }, ACTIVE_LOOP: {}, "latest_action": {}, "latest_action_name": None, "sender_id": "my-sender", "paused": False, "latest_event_time": None, FOLLOWUP_ACTION: "action_listen", "slots": { "name": None, REQUESTED_SLOT: None, SESSION_START_METADATA_SLOT: None, }, "events": [], "latest_input_channel": None, }, }
def provide(self, domain: Domain) -> Domain: """Recreates the given domain but acts as if responses have not been specified. Args: domain: A domain. Returns: Domain that has been created from the same parameters as the given domain but with an empty set of responses. """ serialized_domain = domain.as_dict() del serialized_domain[KEY_RESPONSES] return Domain.from_dict(serialized_domain)
def get_mappings_for_slot(self, slot_to_fill: Text, domain: Domain) -> List[Dict[Text, Any]]: """Get mappings for requested slot. If None, map requested slot to an entity with the same name """ domain_slots = domain.as_dict().get(KEY_SLOTS) requested_slot_mappings = domain_slots.get(slot_to_fill).get( "mappings") # check provided slot mappings for requested_slot_mapping in requested_slot_mappings: if (not isinstance(requested_slot_mapping, dict) or requested_slot_mapping.get("type") is None): raise TypeError("Provided incompatible slot mapping") return requested_slot_mappings
def _create_unique_entity_mappings(self, domain: Domain) -> Set[Text]: """Finds mappings of type `from_entity` that uniquely set a slot. For example in the following form: some_form: departure_city: - type: from_entity entity: city role: from - type: from_entity entity: city arrival_city: - type: from_entity entity: city role: to - type: from_entity entity: city An entity `city` with a role `from` uniquely sets the slot `departure_city` and an entity `city` with a role `to` uniquely sets the slot `arrival_city`, so corresponding mappings are unique. But an entity `city` without a role can fill both `departure_city` and `arrival_city`, so corresponding mapping is not unique. Args: domain: The domain. Returns: A set of json dumps of unique mappings of type `from_entity`. """ unique_entity_slot_mappings = set() duplicate_entity_slot_mappings = set() domain_slots = domain.as_dict().get(KEY_SLOTS) for slot in domain.required_slots_for_form(self.name()): for slot_mapping in domain_slots.get(slot).get(SLOT_MAPPINGS): if slot_mapping.get(MAPPING_TYPE) == str( SlotMappingType.FROM_ENTITY): mapping_as_string = json.dumps(slot_mapping, sort_keys=True) if mapping_as_string in unique_entity_slot_mappings: unique_entity_slot_mappings.remove(mapping_as_string) duplicate_entity_slot_mappings.add(mapping_as_string) elif mapping_as_string not in duplicate_entity_slot_mappings: unique_entity_slot_mappings.add(mapping_as_string) return unique_entity_slot_mappings
def create_pruned_version(domain: Domain) -> Domain: """Recreates the given domain but drops information that is irrelevant for core. Args: domain: A domain. Returns: A similar domain without information that is irrelevant for core training. """ serialized_domain = copy.deepcopy(domain.as_dict()) serialized_domain.pop("config", None) # `store_entities_as_slots` serialized_domain.pop(SESSION_CONFIG_KEY, None) for response_name in serialized_domain[KEY_RESPONSES]: serialized_domain[KEY_RESPONSES][response_name] = [] for form_name in serialized_domain[KEY_FORMS]: serialized_domain[KEY_FORMS][form_name] = {REQUIRED_SLOTS_KEY: []} return Domain.from_dict(serialized_domain)
async def test_agent_with_model_server_in_thread( model_server: TestClient, domain: Domain ): model_endpoint_config = EndpointConfig.from_dict( {"url": model_server.make_url("/model"), "wait_time_between_pulls": 2} ) agent = Agent() agent = await rasa.core.agent.load_from_server( agent, model_server=model_endpoint_config ) await asyncio.sleep(5) assert agent.fingerprint == "somehash" assert agent.domain.as_dict() == domain.as_dict() assert agent.processor.graph_runner assert model_server.app.number_of_model_requests == 1 jobs.kill_scheduler()
def test_create_model_package( tmp_path_factory: TempPathFactory, domain: Domain, ): train_model_storage = LocalModelStorage( tmp_path_factory.mktemp("train model storage")) train_schema = GraphSchema({ "train": SchemaNode( needs={}, uses=PersistableTestComponent, fn="train", constructor_name="create", config={ "some_config": 123455, "some more config": [{ "nested": "hi" }] }, ), "load": SchemaNode( needs={"resource": "train"}, uses=PersistableTestComponent, fn="run_inference", constructor_name="load", config={}, is_target=True, ), }) predict_schema = GraphSchema({ "run": SchemaNode( needs={}, uses=PersistableTestComponent, fn="run", constructor_name="load", config={ "some_config": 123455, "some more config": [{ "nested": "hi" }] }, ), }) # Fill model Storage with train_model_storage.write_to(Resource("resource1")) as directory: file = directory / "file.txt" file.write_text("test") # Package model persisted_model_dir = tmp_path_factory.mktemp("persisted models") archive_path = persisted_model_dir / "my-model.tar.gz" trained_at = datetime.utcnow() with freezegun.freeze_time(trained_at): train_model_storage.create_model_package(archive_path, train_schema, predict_schema, domain) # Unpack and inspect packaged model load_model_storage_dir = tmp_path_factory.mktemp("load model storage") ( load_model_storage, packaged_metadata, ) = LocalModelStorage.from_model_archive(load_model_storage_dir, archive_path) assert packaged_metadata.train_schema == train_schema assert packaged_metadata.predict_schema == predict_schema assert packaged_metadata.domain.as_dict() == domain.as_dict() assert packaged_metadata.rasa_open_source_version == rasa.__version__ assert packaged_metadata.trained_at == trained_at assert packaged_metadata.model_id persisted_resources = load_model_storage_dir.glob("*") assert list(persisted_resources) == [ Path(load_model_storage_dir, "resource1") ]
def test_metadata_serialization(domain: Domain, tmp_path: Path): train_schema = GraphSchema({ "train": SchemaNode( needs={}, uses=PersistableTestComponent, fn="train", constructor_name="create", config={ "some_config": 123455, "some more config": [{ "nested": "hi" }] }, ), "load": SchemaNode( needs={"resource": "train"}, uses=PersistableTestComponent, fn="run_inference", constructor_name="load", config={}, is_target=True, ), }) predict_schema = GraphSchema({ "run": SchemaNode( needs={}, uses=PersistableTestComponent, fn="run", constructor_name="load", config={ "some_config": 123455, "some more config": [{ "nested": "hi" }] }, ), }) trained_at = datetime.utcnow() rasa_version = rasa.__version__ model_id = "some unique model id" metadata = ModelMetadata( trained_at, rasa_version, model_id, domain, train_schema, predict_schema, project_fingerprint="some_fingerprint", training_type=TrainingType.NLU, core_target="core", nlu_target="nlu", language="zh", ) serialized = metadata.as_dict() # Dump and Load to make sure it's serializable dump_path = tmp_path / "metadata.json" rasa.shared.utils.io.dump_obj_as_json_to_file(dump_path, serialized) loaded_serialized = rasa.shared.utils.io.read_json_file(dump_path) loaded_metadata = ModelMetadata.from_dict(loaded_serialized) assert loaded_metadata.domain.as_dict() == domain.as_dict() assert loaded_metadata.model_id == model_id assert loaded_metadata.rasa_open_source_version == rasa_version assert loaded_metadata.trained_at == trained_at assert loaded_metadata.train_schema == train_schema assert loaded_metadata.predict_schema == predict_schema assert loaded_metadata.project_fingerprint == "some_fingerprint" assert loaded_metadata.training_type == TrainingType.NLU assert loaded_metadata.core_target == "core" assert loaded_metadata.nlu_target == "nlu" assert loaded_metadata.language == "zh"
async def test_remote_action_logs_events( default_channel: OutputChannel, default_nlg: NaturalLanguageGenerator, default_tracker: DialogueStateTracker, domain: Domain, ): endpoint = EndpointConfig("https://example.com/webhooks/actions") remote_action = action.RemoteAction("my_action", endpoint) response = { "events": [{ "event": "slot", "value": "rasa", "name": "name" }], "responses": [ { "text": "test text", "response": None, "buttons": [{ "title": "cheap", "payload": "cheap" }], }, { "response": "utter_greet" }, ], } with aioresponses() as mocked: mocked.post("https://example.com/webhooks/actions", payload=response) events = await remote_action.run(default_channel, default_nlg, default_tracker, domain) r = latest_request(mocked, "post", "https://example.com/webhooks/actions") assert r assert json_of_latest_request(r) == { "domain": domain.as_dict(), "next_action": "my_action", "sender_id": "my-sender", "version": rasa.__version__, "tracker": { "latest_message": { "entities": [], "intent": {}, "text": None, "message_id": None, "metadata": {}, }, ACTIVE_LOOP: {}, "latest_action": {}, "latest_action_name": None, "sender_id": "my-sender", "paused": False, FOLLOWUP_ACTION: ACTION_LISTEN_NAME, "latest_event_time": None, "slots": { "name": None, REQUESTED_SLOT: None, SESSION_START_METADATA_SLOT: None, }, "events": [], "latest_input_channel": None, }, } assert len(events) == 3 # first two events are bot utterances assert events[0] == BotUttered( "test text", {"buttons": [{ "title": "cheap", "payload": "cheap" }]}) assert events[1] == BotUttered("hey there None!", metadata={"utter_action": "utter_greet"}) assert events[2] == SlotSet("name", "rasa")