Exemple #1
0
        async def deactivate(self, *args) -> List[Event]:
            events_so_far = args[-1]
            assert events_so_far == [
                ActiveLoop(form_name),
                *expected_activation_events,
                *expected_do_events,
                ActiveLoop(None),
            ]

            return expected_deactivation_events
Exemple #2
0
    async def run(
        self,
        output_channel: "OutputChannel",
        nlg: "NaturalLanguageGenerator",
        tracker: "DialogueStateTracker",
        domain: "Domain",
    ) -> List[Event]:
        # attempt retrieving spec
        if not len(self.form_spec):
            self.form_spec = clean_none_values(domain.slot_mapping_for_form(self.name()))
            if not len(self.form_spec):
                logger.debug(
                    f"Could not retrieve form '{tracker.active_loop}', there is something wrong with your domain."
                )
                return [ActiveLoop(None)]

        # activate the form
        events = await self._activate_if_required(output_channel, nlg, tracker, domain)
        # validate user input
        events.extend(
            await self._validate_if_required(output_channel, nlg, tracker, domain)
        )
        # check that the form wasn't deactivated in validation
        if ActiveLoop(None) not in events:

            # create temp tracker with populated slots from `validate` method
            temp_tracker = tracker.copy()
            temp_tracker.sender_id = (
                tracker.sender_id
            )  # copy() doesn't necessarily copy sender_id
            for e in events:
                if isinstance(e, SlotSet):
                    temp_tracker.slots[e.key].value = e.value

            next_slot_events = await self.request_next_slot(
                output_channel, nlg, temp_tracker, domain
            )

            if next_slot_events is not None:
                # request next slot
                events.extend(next_slot_events)
            else:
                # there is nothing more to request, so we can submit
                self._log_form_slots(temp_tracker)
                logger.debug(f"Submitting the form '{self.name()}'")
                events.extend(
                    await self.submit(output_channel, nlg, temp_tracker, domain)
                )
                # deactivate the form after submission
                events.extend(self.deactivate())

        return events
Exemple #3
0
async def test_validate_slots_on_activation_with_other_action_after_user_utterance():
    form_name = "my form"
    slot_name = "num_people"
    slot_value = "hi"
    events = [
        ActionExecuted(ACTION_LISTEN_NAME),
        UserUttered(slot_value, entities=[{"entity": "num_tables", "value": 5}]),
        ActionExecuted("action_in_between"),
    ]
    tracker = DialogueStateTracker.from_events(sender_id="bla", evts=events)

    domain = f"""
    slots:
      {slot_name}:
        type: unfeaturized
    forms:
      {form_name}:
        {slot_name}:
        - type: from_text
    actions:
    - validate_{form_name}
    """
    domain = Domain.from_yaml(domain)
    action_server_url = "http:/my-action-server:5055/webhook"

    expected_slot_value = "✅"
    with aioresponses() as mocked:
        mocked.post(
            action_server_url,
            payload={
                "events": [
                    {"event": "slot", "name": slot_name, "value": expected_slot_value}
                ]
            },
        )

        action_server = EndpointConfig(action_server_url)
        action = FormAction(form_name, action_server)

        events = await action.run(
            CollectingOutputChannel(),
            TemplatedNaturalLanguageGenerator(domain.templates),
            tracker,
            domain,
        )

    assert events == [
        ActiveLoop(form_name),
        SlotSet(slot_name, expected_slot_value),
        SlotSet(REQUESTED_SLOT, None),
        ActiveLoop(None),
    ]
Exemple #4
0
def test_form_submit_rule():
    form_name = "some_form"
    submit_action_name = "utter_submit"
    domain = Domain.from_yaml(
        f"""
        intents:
        - {GREET_INTENT_NAME}
        actions:
        - {UTTER_GREET_ACTION}
        - some-action
        - {submit_action_name}
        slots:
          {REQUESTED_SLOT}:
            type: unfeaturized
        forms:
        - {form_name}
    """
    )

    form_submit_rule = _form_submit_rule(domain, submit_action_name, form_name)

    policy = RulePolicy()
    policy.train([GREET_RULE, form_submit_rule], domain, RegexInterpreter())

    form_conversation = DialogueStateTracker.from_events(
        "in a form",
        evts=[
            # Form was activated
            ActionExecuted(ACTION_LISTEN_NAME),
            UserUttered("haha", {"name": GREET_INTENT_NAME}),
            ActionExecuted(form_name),
            ActiveLoop(form_name),
            SlotSet(REQUESTED_SLOT, "some value"),
            ActionExecuted(ACTION_LISTEN_NAME),
            # User responds and fills requested slot
            UserUttered("haha", {"name": GREET_INTENT_NAME}),
            ActionExecuted(form_name),
            # Form get's deactivated
            ActiveLoop(None),
            SlotSet(REQUESTED_SLOT, None),
        ],
        slots=domain.slots,
    )

    # RulePolicy predicts action which handles submit
    prediction = policy.predict_action_probabilities(
        form_conversation, domain, RegexInterpreter()
    )
    assert_predicted_action(prediction, domain, submit_action_name)
Exemple #5
0
async def test_dont_predict_form_if_already_finished():
    form_name = "some_form"

    domain = Domain.from_yaml(
        f"""
    intents:
    - {GREET_INTENT_NAME}
    actions:
    - {UTTER_GREET_ACTION}
    - some-action
    slots:
      {REQUESTED_SLOT}:
        type: unfeaturized
    forms:
    - {form_name}
"""
    )

    policy = RulePolicy()
    policy.train([GREET_RULE], domain, RegexInterpreter())

    form_conversation = DialogueStateTracker.from_events(
        "in a form",
        evts=[
            # We are in an activate form
            ActionExecuted(form_name),
            ActiveLoop(form_name),
            SlotSet(REQUESTED_SLOT, "some value"),
            ActionExecuted(ACTION_LISTEN_NAME),
            # User sends message as response to a requested slot
            UserUttered("haha", {"name": GREET_INTENT_NAME}),
            # Form is happy and deactivates itself
            ActionExecuted(form_name),
            ActiveLoop(None),
            SlotSet(REQUESTED_SLOT, None),
            # User sends another message. Form is already done. Shouldn't get triggered
            # again
            ActionExecuted(ACTION_LISTEN_NAME),
            UserUttered("haha", {"name": GREET_INTENT_NAME}),
        ],
        slots=domain.slots,
    )

    # RulePolicy triggers form again
    prediction = policy.predict_action_probabilities(
        form_conversation, domain, RegexInterpreter()
    )
    assert_predicted_action(prediction, domain, UTTER_GREET_ACTION)
Exemple #6
0
def test_slot_mappings_ignored_intents_during_active_loop():
    domain = Domain.from_yaml("""
    version: "{LATEST_TRAINING_DATA_FORMAT_VERSION}"
    intents:
    - greet
    - chitchat
    slots:
      cuisine:
        type: text
        mappings:
        - type: from_text
          conditions:
          - active_loop: restaurant_form
    forms:
      restaurant_form:
        ignored_intents:
        - chitchat
        required_slots:
        - cuisine
    """)
    tracker = DialogueStateTracker("sender_id", slots=domain.slots)
    event1 = ActiveLoop("restaurant_form")
    event2 = UserUttered(
        text="The weather is sunny today",
        intent={
            "name": "chitchat",
            "confidence": 0.9604260921478271
        },
        entities=[],
    )
    tracker.update_with_events([event1, event2], domain)
    mappings_for_cuisine = domain.as_dict().get("slots").get("cuisine").get(
        "mappings")
    assert (SlotMapping.intent_is_desired(mappings_for_cuisine[0], tracker,
                                          domain) is False)
Exemple #7
0
async def test_read_rules_without_stories(domain: Domain):
    story_steps = await loading.load_data_from_files(
        ["data/test_stories/rules_without_stories.md"], domain
    )

    # this file contains three rules and two ML stories
    assert len(story_steps) == 3

    ml_steps = [s for s in story_steps if not isinstance(s, RuleStep)]
    rule_steps = [s for s in story_steps if isinstance(s, RuleStep)]

    assert len(ml_steps) == 0
    assert len(rule_steps) == 3

    assert rule_steps[0].block_name == "rule 1"
    assert rule_steps[1].block_name == "rule 2"
    assert rule_steps[2].block_name == "rule 3"

    # inspect the first rule and make sure all events were picked up correctly
    events = rule_steps[0].events

    assert len(events) == 5

    assert events[0] == ActiveLoop("loop_q_form")
    assert events[1] == SlotSet("requested_slot", "some_slot")
    assert events[2] == ActionExecuted("...")
    assert events[3] == UserUttered(
        intent={"name": "inform", "confidence": 1.0},
        entities=[{"entity": "some_slot", "start": 6, "end": 25, "value": "bla"}],
    )
    assert events[4] == ActionExecuted("loop_q_form")
Exemple #8
0
        async def do(self, *args: Any) -> List[Event]:
            events_so_far = args[-1]
            assert events_so_far == [
                ActiveLoop(form_name), *expected_activation_events
            ]

            return expected_do_events
Exemple #9
0
    async def is_done(
        self,
        output_channel: "OutputChannel",
        nlg: "NaturalLanguageGenerator",
        tracker: "DialogueStateTracker",
        domain: "Domain",
        events_so_far: List[Event],
    ) -> bool:
        if any(
                isinstance(event, ActionExecutionRejected)
                for event in events_so_far):
            return False

        # Custom validation actions can decide to terminate the loop early by
        # setting the requested slot to `None` or setting `ActiveLoop(None)`.
        # We explicitly check only the last occurrences for each possible termination
        # event instead of doing `return event in events_so_far` to make it possible
        # to override termination events which were returned earlier.
        return next(
            (event for event in reversed(events_so_far)
             if isinstance(event, SlotSet) and event.key == REQUESTED_SLOT),
            None,
        ) == SlotSet(REQUESTED_SLOT, None) or next(
            (event for event in reversed(events_so_far)
             if isinstance(event, ActiveLoop)),
            None,
        ) == ActiveLoop(None)
Exemple #10
0
def test_reading_of_trackers_with_legacy_form_events():
    loop_name1 = "my loop"
    loop_name2 = "my form"
    tracker = DialogueStateTracker.from_dict(
        "sender",
        events_as_dict=[
            {
                "event": ActiveLoop.type_name,
                LOOP_NAME: loop_name1
            },
            {
                "event": LegacyForm.type_name,
                LOOP_NAME: None
            },
            {
                "event": LegacyForm.type_name,
                LOOP_NAME: loop_name2
            },
        ],
    )

    expected_events = [
        ActiveLoop(loop_name1),
        LegacyForm(None),
        LegacyForm(loop_name2)
    ]
    assert list(tracker.events) == expected_events
    assert tracker.active_loop[LOOP_NAME] == loop_name2
Exemple #11
0
def test_form_wins_over_everything_else(ensemble: SimplePolicyEnsemble):
    form_name = "test-form"
    domain = f"""
    forms:
    - {form_name}
    """
    domain = Domain.from_yaml(domain)

    events = [
        ActiveLoop("test-form"),
        ActionExecuted(ACTION_LISTEN_NAME),
        utilities.user_uttered("test", 1),
    ]
    tracker = DialogueStateTracker.from_events("test", events, [])
    prediction = ensemble.probabilities_using_best_policy(
        tracker, domain, RegexInterpreter()
    )

    next_action = rasa.core.actions.action.action_for_index(
        prediction.max_confidence_index, domain, None
    )

    index_of_form_policy = 0
    assert (
        prediction.policy_name == f"policy_{index_of_form_policy}_{FormPolicy.__name__}"
    )
    assert next_action.name() == form_name
Exemple #12
0
 async def is_done(self, *args) -> bool:
     events_so_far = args[-1]
     return events_so_far == [
         ActiveLoop(form_name),
         *expected_activation_events,
         *expected_do_events,
     ]
Exemple #13
0
def test_extract_requested_slot_when_mapping_applies(
    slot_mapping: Dict, expected_value: Text
):
    form_name = "some_form"
    entity_name = "some_slot"
    form = FormAction(form_name, None)

    domain = Domain.from_dict({"forms": {form_name: {entity_name: [slot_mapping]}}})

    tracker = DialogueStateTracker.from_events(
        "default",
        [
            ActiveLoop(form_name),
            SlotSet(REQUESTED_SLOT, "some_slot"),
            UserUttered(
                "bla",
                intent={"name": "greet", "confidence": 1.0},
                entities=[{"entity": entity_name, "value": "some_value"}],
            ),
            ActionExecuted(ACTION_LISTEN_NAME),
        ],
    )

    slot_values = form.extract_requested_slot(tracker, domain, "some_slot")
    # check that the value was extracted for correct intent
    assert slot_values == {"some_slot": expected_value}
Exemple #14
0
async def test_action_rejection():
    form_name = "my form"
    slot_to_fill = "some slot"
    tracker = DialogueStateTracker.from_events(
        sender_id="bla",
        evts=[
            ActiveLoop(form_name),
            SlotSet(REQUESTED_SLOT, slot_to_fill),
            ActionExecuted(ACTION_LISTEN_NAME),
            UserUttered("haha", {"name": "greet"}),
        ],
    )
    form_name = "my form"
    action = FormAction(form_name, None)
    domain = f"""
    forms:
      {form_name}:
        {slot_to_fill}:
        - type: from_entity
          entity: some_entity
    slots:
      {slot_to_fill}:
        type: unfeaturized
    """
    domain = Domain.from_yaml(domain)

    with pytest.raises(ActionExecutionRejection):
        await action.run(
            CollectingOutputChannel(),
            TemplatedNaturalLanguageGenerator(domain.templates),
            tracker,
            domain,
        )
Exemple #15
0
async def test_activate():
    tracker = DialogueStateTracker.from_events(sender_id="bla", evts=[])
    form_name = "my form"
    action = FormAction(form_name, None)
    slot_name = "num_people"
    domain = f"""
forms:
  {form_name}:
    {slot_name}:
    - type: from_entity
      entity: number
responses:
    utter_ask_num_people:
    - text: "How many people?"
"""
    domain = Domain.from_yaml(domain)

    events = await action.run(
        CollectingOutputChannel(),
        TemplatedNaturalLanguageGenerator(domain.templates),
        tracker,
        domain,
    )
    assert events[:-1] == [
        ActiveLoop(form_name),
        SlotSet(REQUESTED_SLOT, slot_name)
    ]
    assert isinstance(events[-1], BotUttered)
Exemple #16
0
async def test_activate_with_prefilled_slot():
    slot_name = "num_people"
    slot_value = 5

    tracker = DialogueStateTracker.from_events(
        sender_id="bla", evts=[SlotSet(slot_name, slot_value)])
    form_name = "my form"
    action = FormAction(form_name, None)

    next_slot_to_request = "next slot to request"
    domain = f"""
    forms:
      {form_name}:
        {slot_name}:
        - type: from_entity
          entity: {slot_name}
        {next_slot_to_request}:
        - type: from_text
    slots:
      {slot_name}:
        type: unfeaturized
    """
    domain = Domain.from_yaml(domain)
    events = await action.run(
        CollectingOutputChannel(),
        TemplatedNaturalLanguageGenerator(domain.templates),
        tracker,
        domain,
    )
    assert events == [
        ActiveLoop(form_name),
        SlotSet(slot_name, slot_value),
        SlotSet(REQUESTED_SLOT, next_slot_to_request),
    ]
Exemple #17
0
async def test_ask_affirm_after_rephrasing():
    tracker = DialogueStateTracker.from_events(
        "some-sender",
        evts=[
            # User sends message with low NLU confidence
            *_message_requiring_fallback(),
            ActiveLoop(ACTION_TWO_STAGE_FALLBACK_NAME),
            # Action asks user to affirm
            *_two_stage_clarification_request(),
            ActionExecuted(ACTION_LISTEN_NAME),
            # User denies suggested intents
            UserUttered("hi", {"name": USER_INTENT_OUT_OF_SCOPE}),
            # Action asks user to rephrase
            ActionExecuted(ACTION_TWO_STAGE_FALLBACK_NAME),
            BotUttered("please rephrase"),
            # User rephrased with low confidence
            *_message_requiring_fallback(),
        ],
    )
    domain = Domain.empty()
    action = TwoStageFallbackAction()

    events = await action.run(
        CollectingOutputChannel(),
        TemplatedNaturalLanguageGenerator(domain.responses),
        tracker,
        domain,
    )

    assert len(events) == 1
    assert isinstance(events[0], BotUttered)
Exemple #18
0
 async def run(
     self,
     output_channel: "OutputChannel",
     nlg: "NaturalLanguageGenerator",
     tracker: "DialogueStateTracker",
     domain: "Domain",
 ) -> List[Event]:
     return [ActiveLoop(None), SlotSet(REQUESTED_SLOT, None)]
Exemple #19
0
def test_immediate_submit():
    form_name = "some_form"
    submit_action_name = "utter_submit"
    entity = "some_entity"
    slot = "some_slot"
    domain = Domain.from_yaml(f"""
        intents:
        - {GREET_INTENT_NAME}
        actions:
        - {UTTER_GREET_ACTION}
        - some-action
        - {submit_action_name}
        slots:
          {REQUESTED_SLOT}:
            type: unfeaturized
          {slot}:
            type: unfeaturized
        forms:
        - {form_name}
        entities:
        - {entity}
    """)

    form_activation_rule = _form_activation_rule(domain, form_name,
                                                 GREET_INTENT_NAME)
    form_submit_rule = _form_submit_rule(domain, submit_action_name, form_name)

    policy = RulePolicy()
    policy.train([form_activation_rule, form_submit_rule], domain,
                 RegexInterpreter())

    form_conversation = DialogueStateTracker.from_events(
        "in a form",
        evts=[
            # Form was activated
            ActionExecuted(ACTION_LISTEN_NAME),
            # The same intent which activates the form also deactivates it
            UserUttered(
                "haha",
                {"name": GREET_INTENT_NAME},
                entities=[{
                    "entity": entity,
                    "value": "Bruce Wayne"
                }],
            ),
            SlotSet(slot, "Bruce"),
            ActionExecuted(form_name),
            SlotSet("bla", "bla"),
            ActiveLoop(None),
            SlotSet(REQUESTED_SLOT, None),
        ],
        slots=domain.slots,
    )

    # RulePolicy predicts action which handles submit
    action_probabilities = policy.predict_action_probabilities(
        form_conversation, domain, RegexInterpreter())
    assert_predicted_action(action_probabilities, domain, submit_action_name)
Exemple #20
0
 async def run(
     self,
     output_channel: "OutputChannel",
     nlg: "NaturalLanguageGenerator",
     tracker: "DialogueStateTracker",
     domain: "Domain",
 ) -> List[Event]:
     """Runs action. Please see parent class for the full docstring."""
     return [ActiveLoop(None), SlotSet(REQUESTED_SLOT, None)]
Exemple #21
0
def test_writing_trackers_with_legacy_form_events():
    loop_name = "my loop"
    tracker = DialogueStateTracker.from_events(
        "sender", evts=[ActiveLoop(loop_name), LegacyForm(None), LegacyForm("some")]
    )

    events_as_dict = [event.as_dict() for event in tracker.events]

    for event in events_as_dict:
        assert event["event"] == ActiveLoop.type_name
Exemple #22
0
def test_rule_without_condition(rule_steps_without_stories: List[StoryStep]):
    rule = rule_steps_without_stories[1]
    assert rule.block_name == "Rule without condition"
    assert rule.events == [
        ActionExecuted(RULE_SNIPPET_ACTION_NAME),
        UserUttered(intent={"name": "explain", "confidence": 1.0}),
        ActionExecuted("utter_explain_some_slot"),
        ActionExecuted("loop_q_form"),
        ActiveLoop("loop_q_form"),
    ]
Exemple #23
0
async def test_activate_and_immediate_deactivate():
    slot_name = "num_people"
    slot_value = 5

    tracker = DialogueStateTracker.from_events(
        sender_id="bla",
        evts=[
            ActionExecuted(ACTION_LISTEN_NAME),
            UserUttered(
                "haha",
                {"name": "greet"},
                entities=[{
                    "entity": slot_name,
                    "value": slot_value
                }],
            ),
        ],
    )
    form_name = "my form"
    action = FormAction(form_name, None)
    domain = f"""
    forms:
      {form_name}:
        {slot_name}:
        - type: from_entity
          entity: {slot_name}
    slots:
      {slot_name}:
        type: unfeaturized
    """
    domain = Domain.from_yaml(domain)
    events = await action.run(
        CollectingOutputChannel(),
        TemplatedNaturalLanguageGenerator(domain.templates),
        tracker,
        domain,
    )
    assert events == [
        ActiveLoop(form_name),
        SlotSet(slot_name, slot_value),
        SlotSet(REQUESTED_SLOT, None),
        ActiveLoop(None),
    ]
Exemple #24
0
def _form_submit_rule(domain: Domain, submit_action_name: Text,
                      form_name: Text) -> TrackerWithCachedStates:
    return TrackerWithCachedStates.from_events(
        "form submit rule",
        domain=domain,
        slots=domain.slots,
        evts=[
            ActiveLoop(form_name),
            # Any events in between
            ActionExecuted(RULE_SNIPPET_ACTION_NAME),
            # Form runs and deactivates itself
            ActionExecuted(form_name),
            ActiveLoop(None),
            SlotSet(REQUESTED_SLOT, None),
            ActionExecuted(submit_action_name),
            ActionExecuted(ACTION_LISTEN_NAME),
        ],
        is_rule_tracker=True,
    )
Exemple #25
0
async def test_form_unhappy_path_without_rule():
    form_name = "some_form"
    other_intent = "bye"
    domain = Domain.from_yaml(
        f"""
        intents:
        - {GREET_INTENT_NAME}
        - {other_intent}
        actions:
        - {UTTER_GREET_ACTION}
        - some-action
        slots:
          {REQUESTED_SLOT}:
            type: unfeaturized
        forms:
        - {form_name}
    """
    )

    policy = RulePolicy()
    policy.train([GREET_RULE], domain, RegexInterpreter())

    conversation_events = [
        ActionExecuted(form_name),
        ActiveLoop(form_name),
        SlotSet(REQUESTED_SLOT, "some value"),
        ActionExecuted(ACTION_LISTEN_NAME),
        UserUttered("haha", {"name": other_intent}),
        ActiveLoop(form_name),
        ActionExecutionRejected(form_name),
    ]

    # Unhappy path is not handled. No rule matches. Let's hope ML fixes our problems 🤞
    prediction = policy.predict_action_probabilities(
        DialogueStateTracker.from_events(
            "casd", evts=conversation_events, slots=domain.slots
        ),
        domain,
        RegexInterpreter(),
    )

    assert prediction.max_confidence == policy._core_fallback_threshold
Exemple #26
0
async def test_form_unhappy_path_from_general_rule():
    form_name = "some_form"

    domain = Domain.from_yaml(
        f"""
        intents:
        - {GREET_INTENT_NAME}
        actions:
        - {UTTER_GREET_ACTION}
        - some-action
        slots:
          {REQUESTED_SLOT}:
            type: unfeaturized
        forms:
        - {form_name}
    """
    )

    policy = RulePolicy()
    # RulePolicy should memorize that unhappy_rule overrides GREET_RULE
    policy.train([GREET_RULE], domain, RegexInterpreter())

    # Check that RulePolicy predicts action to handle unhappy path
    conversation_events = [
        ActionExecuted(form_name),
        ActiveLoop(form_name),
        SlotSet(REQUESTED_SLOT, "some value"),
        ActionExecuted(ACTION_LISTEN_NAME),
        UserUttered("haha", {"name": GREET_INTENT_NAME}),
        ActionExecutionRejected(form_name),
    ]

    prediction = policy.predict_action_probabilities(
        DialogueStateTracker.from_events(
            "casd", evts=conversation_events, slots=domain.slots
        ),
        domain,
        RegexInterpreter(),
    )
    # check that general rule action is predicted
    assert_predicted_action(prediction, domain, UTTER_GREET_ACTION)

    # Check that RulePolicy triggers form again after handling unhappy path
    conversation_events.append(ActionExecuted(UTTER_GREET_ACTION))
    prediction = policy.predict_action_probabilities(
        DialogueStateTracker.from_events(
            "casd", evts=conversation_events, slots=domain.slots
        ),
        domain,
        RegexInterpreter(),
    )
    # check that action_listen from general rule is overwritten by form action
    assert_predicted_action(prediction, domain, form_name)
Exemple #27
0
async def test_2nd_affirm_successful(default_processor: MessageProcessor):
    tracker = DialogueStateTracker.from_events(
        "some-sender",
        evts=[
            ActionExecuted(ACTION_LISTEN_NAME),
            UserUttered("my name is John", {
                "name": "say_name",
                "confidence": 1.0
            }),
            SlotSet("some_slot", "example_value"),
            # User sends message with low NLU confidence
            *_message_requiring_fallback(),
            ActiveLoop(ACTION_TWO_STAGE_FALLBACK_NAME),
            # Action asks user to affirm
            *_two_stage_clarification_request(),
            ActionExecuted(ACTION_LISTEN_NAME),
            # User denies suggested intents
            UserUttered("hi", {"name": USER_INTENT_OUT_OF_SCOPE}),
            # Action asks user to rephrase
            *_two_stage_clarification_request(),
            # User rephrased with low confidence
            *_message_requiring_fallback(),
            *_two_stage_clarification_request(),
            # Actions asks user to affirm for the last time
            ActionExecuted(ACTION_LISTEN_NAME),
            # User affirms successfully
            UserUttered("hi", {"name": "greet"}),
        ],
    )
    domain = Domain.empty()
    action = TwoStageFallbackAction()

    await default_processor._run_action(
        action,
        tracker,
        CollectingOutputChannel(),
        TemplatedNaturalLanguageGenerator(domain.responses),
        PolicyPrediction([], "some policy"),
    )

    applied_events = tracker.applied_events()

    assert applied_events == [
        ActionExecuted(ACTION_LISTEN_NAME),
        UserUttered("my name is John", {
            "name": "say_name",
            "confidence": 1.0
        }),
        SlotSet("some_slot", "example_value"),
        ActionExecuted(ACTION_LISTEN_NAME),
        UserUttered("hi", {"name": "greet"}),
    ]
Exemple #28
0
def test_rule_with_condition(rule_steps_without_stories: List[StoryStep]):
    rule = rule_steps_without_stories[0]
    assert rule.block_name == "Rule with condition"
    assert rule.events == [
        ActiveLoop("loop_q_form"),
        SlotSet("requested_slot", "some_slot"),
        ActionExecuted(RULE_SNIPPET_ACTION_NAME),
        UserUttered(
            intent={"name": "inform", "confidence": 1.0},
            entities=[{"entity": "some_slot", "value": "bla"}],
        ),
        ActionExecuted("loop_q_form"),
    ]
Exemple #29
0
async def test_validate_slots(validate_return_events: List[Dict],
                              expected_events: List[Event]):
    form_name = "my form"
    slot_name = "num_people"
    slot_value = "hi"
    events = [
        ActiveLoop(form_name),
        SlotSet(REQUESTED_SLOT, slot_name),
        ActionExecuted(ACTION_LISTEN_NAME),
        UserUttered(slot_value,
                    entities=[{
                        "entity": "num_tables",
                        "value": 5
                    }]),
    ]
    tracker = DialogueStateTracker.from_events(sender_id="bla", evts=events)

    domain = f"""
    slots:
      {slot_name}:
        type: any
      num_tables:
        type: any
    forms:
      {form_name}:
        {slot_name}:
        - type: from_text
        num_tables:
        - type: from_entity
          entity: num_tables
    actions:
    - validate_{form_name}
    """
    domain = Domain.from_yaml(domain)
    action_server_url = "http:/my-action-server:5055/webhook"

    with aioresponses() as mocked:
        mocked.post(action_server_url,
                    payload={"events": validate_return_events})

        action_server = EndpointConfig(action_server_url)
        action = FormAction(form_name, action_server)

        events = await action.run(
            CollectingOutputChannel(),
            TemplatedNaturalLanguageGenerator(domain.templates),
            tracker,
            domain,
        )
        assert events == expected_events
Exemple #30
0
async def test_remote_action_with_template_param(
    default_channel: OutputChannel,
    default_tracker: DialogueStateTracker,
    domain: Domain,
):
    endpoint = EndpointConfig("https://example.com/webhooks/actions")
    remote_action = action.RemoteAction("my_action", endpoint)

    response = {
        "events": [
            {
                "event": "form",
                "name": "restaurant_form",
                "timestamp": None
            },
            {
                "event": "slot",
                "timestamp": None,
                "name": "requested_slot",
                "value": "cuisine",
            },
        ],
        "responses": [{
            "text": None,
            "buttons": [],
            "elements": [],
            "custom": {},
            "template": "utter_ask_cuisine",
            "image": None,
            "attachment": None,
        }],
    }

    nlg = TemplatedNaturalLanguageGenerator(
        {"utter_ask_cuisine": [{
            "text": "what dou want to eat?"
        }]})
    with aioresponses() as mocked:
        mocked.post("https://example.com/webhooks/actions", payload=response)

        with pytest.warns(FutureWarning):
            events = await remote_action.run(default_channel, nlg,
                                             default_tracker, domain)

    assert events == [
        BotUttered("what dou want to eat?",
                   metadata={"utter_action": "utter_ask_cuisine"}),
        ActiveLoop("restaurant_form"),
        SlotSet("requested_slot", "cuisine"),
    ]