Esempio n. 1
0
def test_facebook_channel():
    # START DOC INCLUDE
    from rasa.core.channels.facebook import FacebookInput
    from rasa.core.agent import Agent
    from rasa.core.interpreter import RegexInterpreter

    # load your trained agent
    agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

    input_channel = FacebookInput(
        fb_verify="YOUR_FB_VERIFY",
        # you need tell facebook this token, to confirm your URL
        fb_secret="YOUR_FB_SECRET",  # your app secret
        fb_access_token="YOUR_FB_PAGE_ACCESS_TOKEN"
        # token for the page you subscribed to
    )

    s = agent.handle_channels([input_channel], 5004)
    # END DOC INCLUDE
    # the above marker marks the end of the code snipped included
    # in the docs
    routes_list = utils.list_routes(s)

    assert routes_list.get("fb_webhook.health").startswith(
        "/webhooks/facebook")
    assert routes_list.get("fb_webhook.webhook").startswith(
        "/webhooks/facebook/webhook")
Esempio n. 2
0
def test_webexteams_channel():
    # START DOC INCLUDE
    from rasa.core.channels.webexteams import WebexTeamsInput
    from rasa.core.agent import Agent
    from rasa.core.interpreter import RegexInterpreter

    # load your trained agent
    agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

    input_channel = WebexTeamsInput(
        access_token="YOUR_ACCESS_TOKEN",
        # this is the `bot access token`
        room="YOUR_WEBEX_ROOM"
        # the name of your channel to which the bot posts (optional)
    )

    s = agent.handle_channels([input_channel], 5004)
    # END DOC INCLUDE
    # the above marker marks the end of the code snipped included
    # in the docs
    routes_list = utils.list_routes(s)
    assert routes_list.get("webexteams_webhook.health").startswith(
        "/webhooks/webexteams")
    assert routes_list.get("webexteams_webhook.webhook").startswith(
        "/webhooks/webexteams/webhook")
Esempio n. 3
0
def test_socketio_channel():
    with mock.patch.object(sanic.Sanic, "run", fake_sanic_run):
        # START DOC INCLUDE
        from rasa.core.channels.socketio import SocketIOInput
        from rasa.core.agent import Agent
        from rasa.core.interpreter import RegexInterpreter

        # load your trained agent
        agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

        input_channel = SocketIOInput(
            # event name for messages sent from the user
            user_message_evt="user_uttered",
            # event name for messages sent from the bot
            bot_message_evt="bot_uttered",
            # socket.io namespace to use for the messages
            namespace=None,
        )

        s = agent.handle_channels([input_channel], 5004)
        # END DOC INCLUDE
        # the above marker marks the end of the code snipped included
        # in the docs
        routes_list = utils.list_routes(s)
        assert routes_list.get("socketio_webhook.health").startswith(
            "/webhooks/socketio"
        )
        assert routes_list.get("handle_request").startswith("/socket.io")
Esempio n. 4
0
async def _core_model_for_finetuning(
    model_to_finetune: Text,
    file_importer: TrainingDataImporter,
    finetuning_epoch_fraction: float = 1.0,
) -> Optional[Agent]:
    path_to_archive = model.get_model_for_finetuning(model_to_finetune)
    if not path_to_archive:
        return None

    rasa.shared.utils.cli.print_info(
        f"Loading Core model from {path_to_archive} for finetuning...",
    )

    with model.unpack_model(path_to_archive) as unpacked:
        new_fingerprint = await model.model_fingerprint(file_importer)
        old_fingerprint = model.fingerprint_from_path(unpacked)
        if not model.can_finetune(old_fingerprint, new_fingerprint, core=True):
            rasa.shared.utils.cli.print_error_and_exit(
                "Core model can not be finetuned."
            )

        config = await file_importer.get_config()
        agent = Agent.load(
            unpacked,
            new_config=config,
            finetuning_epoch_fraction=finetuning_epoch_fraction,
        )
        # Agent might be empty if no underlying Core model was found.
        if agent.domain is not None and agent.policy_ensemble is not None:
            return agent

        return None
Esempio n. 5
0
def test_twilio_channel():
    with mock.patch.object(sanic.Sanic, "run", fake_sanic_run):
        # START DOC INCLUDE
        from rasa.core.channels.twilio import TwilioInput
        from rasa.core.agent import Agent
        from rasa.core.interpreter import RegexInterpreter

        # load your trained agent
        agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

        input_channel = TwilioInput(
            # you get this from your twilio account
            account_sid="YOUR_ACCOUNT_SID",
            # also from your twilio account
            auth_token="YOUR_AUTH_TOKEN",
            # a number associated with your twilio account
            twilio_number="YOUR_TWILIO_NUMBER",
        )

        s = agent.handle_channels([input_channel], 5004)
        # END DOC INCLUDE
        # the above marker marks the end of the code snipped included
        # in the docs
        routes_list = utils.list_routes(s)
        assert routes_list.get("twilio_webhook.health").startswith("/webhooks/twilio")
        assert routes_list.get("twilio_webhook.message").startswith(
            "/webhooks/twilio/webhook"
        )
Esempio n. 6
0
def test_callback_channel():
    with mock.patch.object(sanic.Sanic, "run", fake_sanic_run):
        # START DOC INCLUDE
        from rasa.core.channels.callback import CallbackInput
        from rasa.core.agent import Agent
        from rasa.core.interpreter import RegexInterpreter

        # load your trained agent
        agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

        input_channel = CallbackInput(
            # URL Core will call to send the bot responses
            endpoint=EndpointConfig("http://localhost:5004")
        )

        s = agent.handle_channels([input_channel], 5004)
        # END DOC INCLUDE
        # the above marker marks the end of the code snipped included
        # in the docs
        routes_list = utils.list_routes(s)
        assert routes_list.get("callback_webhook.health").startswith(
            "/webhooks/callback"
        )
        assert routes_list.get("callback_webhook.webhook").startswith(
            "/webhooks/callback/webhook"
        )
Esempio n. 7
0
def test_botframework_channel():
    with mock.patch.object(sanic.Sanic, "run", fake_sanic_run):
        # START DOC INCLUDE
        from rasa.core.channels.botframework import BotFrameworkInput
        from rasa.core.agent import Agent
        from rasa.core.interpreter import RegexInterpreter

        # load your trained agent
        agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

        input_channel = BotFrameworkInput(
            # you get this from your Bot Framework account
            app_id="MICROSOFT_APP_ID",
            # also from your Bot Framework account
            app_password="******",
        )

        s = agent.handle_channels([input_channel], 5004)
        # END DOC INCLUDE
        # the above marker marks the end of the code snipped included
        # in the docs
        routes_list = utils.list_routes(s)
        assert routes_list.get("botframework_webhook.health").startswith(
            "/webhooks/botframework"
        )
        assert routes_list.get("botframework_webhook.webhook").startswith(
            "/webhooks/botframework/webhook"
        )
Esempio n. 8
0
def test_rocketchat_channel():
    with mock.patch.object(sanic.Sanic, "run", fake_sanic_run):
        # START DOC INCLUDE
        from rasa.core.channels.rocketchat import RocketChatInput
        from rasa.core.agent import Agent
        from rasa.core.interpreter import RegexInterpreter

        # load your trained agent
        agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

        input_channel = RocketChatInput(
            # your bots rocket chat user name
            user="******",
            # the password for your rocket chat bots account
            password="******",
            # url where your rocket chat instance is running
            server_url="https://demo.rocket.chat",
        )

        s = agent.handle_channels([input_channel], 5004)
        # END DOC INCLUDE
        # the above marker marks the end of the code snipped included
        # in the docs
        routes_list = utils.list_routes(s)
        assert routes_list.get("rocketchat_webhook.health").startswith(
            "/webhooks/rocketchat"
        )
        assert routes_list.get("rocketchat_webhook.webhook").startswith(
            "/webhooks/rocketchat/webhook"
        )
Esempio n. 9
0
async def compare(models: Text, stories_file: Text, output: Text) -> None:
    """Evaluates multiple trained models on a test set."""
    from rasa.core.agent import Agent
    import rasa.nlu.utils as nlu_utils
    from rasa.core import utils

    num_correct = defaultdict(list)

    for run in nlu_utils.list_subdirectories(models):
        num_correct_run = defaultdict(list)

        for model in sorted(nlu_utils.list_subdirectories(run)):
            logger.info("Evaluating model {}".format(model))

            agent = Agent.load(model)

            completed_trackers = await _generate_trackers(stories_file, agent)

            story_eval_store, no_of_stories = collect_story_predictions(
                completed_trackers, agent)

            failed_stories = story_eval_store.failed_stories
            policy_name = "".join(
                [i for i in os.path.basename(model) if not i.isdigit()])
            num_correct_run[policy_name].append(no_of_stories -
                                                len(failed_stories))

        for k, v in num_correct_run.items():
            num_correct[k].append(v)

    utils.dump_obj_as_json_to_file(os.path.join(output, "results.json"),
                                   num_correct)
Esempio n. 10
0
def test_mattermost_channel():
    with mock.patch.object(sanic.Sanic, "run", fake_sanic_run):
        # START DOC INCLUDE
        from rasa.core.channels.mattermost import MattermostInput
        from rasa.core.agent import Agent
        from rasa.core.interpreter import RegexInterpreter

        # load your trained agent
        agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

        input_channel = MattermostInput(
            # this is the url of the api for your mattermost instance
            url="http://chat.example.com/api/v4",
            # the name of your team for mattermost
            team="community",
            # the username of your bot user that will post
            user="******",
            # messages
            pw="password"
            # the password of your bot user that will post messages
        )

        s = agent.handle_channels([input_channel], 5004)
        # END DOC INCLUDE
        # the above marker marks the end of the code snipped included
        # in the docs
        routes_list = utils.list_routes(s)
        assert routes_list.get("mattermost_webhook.health").startswith(
            "/webhooks/mattermost"
        )
        assert routes_list.get("mattermost_webhook.webhook").startswith(
            "/webhooks/mattermost/webhook"
        )
Esempio n. 11
0
def load_mod(lang):
    from rasa.core.agent import Agent, load_agent
    # from rasa.nlu.model import Interpreter
    path = f'{prefix}/models/{lang}_current.tar.gz'
    # interpreter = Interpreter.load(path)
    agent = Agent.load(path)
    return agent
Esempio n. 12
0
def test_telegram_channel():
    # telegram channel will try to set a webhook, so we need to mock the api
    with mock.patch.object(sanic.Sanic, 'run', fake_sanic_run):
        httpretty.register_uri(
            httpretty.POST,
            'https://api.telegram.org/bot123:YOUR_ACCESS_TOKEN/setWebhook',
            body='{"ok": true, "result": {}}')

        httpretty.enable()
        # START DOC INCLUDE
        from rasa.core.channels.telegram import TelegramInput
        from rasa.core.agent import Agent
        from rasa.core.interpreter import RegexInterpreter

        # load your trained agent
        agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

        input_channel = TelegramInput(
            # you get this when setting up a bot
            access_token="123:YOUR_ACCESS_TOKEN",
            # this is your bots username
            verify="YOUR_TELEGRAM_BOT",
            # the url your bot should listen for messages
            webhook_url="YOUR_WEBHOOK_URL")

        s = agent.handle_channels([input_channel], 5004)
        # END DOC INCLUDE
        # the above marker marks the end of the code snipped included
        # in the docs
        routes_list = utils.list_routes(s)
        assert routes_list.get("telegram_webhook.health").startswith(
            "/webhooks/telegram")
        assert routes_list.get("telegram_webhook.message").startswith(
            "/webhooks/telegram/webhook")
        httpretty.disable()
Esempio n. 13
0
async def load_agent_on_start(core_model, endpoints, nlu_model, app, loop):
    """Load an agent.

    Used to be scheduled on server start
    (hence the `app` and `loop` arguments)."""
    from rasa.core import broker
    from rasa.core.agent import Agent

    _interpreter = NaturalLanguageInterpreter.create(nlu_model, endpoints.nlu)
    _broker = broker.from_endpoint_config(endpoints.event_broker)

    _tracker_store = TrackerStore.find_tracker_store(None,
                                                     endpoints.tracker_store,
                                                     _broker)

    if endpoints and endpoints.model:
        from rasa.core import agent

        app.agent = Agent(interpreter=_interpreter,
                          generator=endpoints.nlg,
                          tracker_store=_tracker_store,
                          action_endpoint=endpoints.action)

        await agent.load_from_server(app.agent, model_server=endpoints.model)
    else:
        app.agent = Agent.load(core_model,
                               interpreter=_interpreter,
                               generator=endpoints.nlg,
                               tracker_store=_tracker_store,
                               action_endpoint=endpoints.action)

    return app.agent
Esempio n. 14
0
async def test_interpreter_passed_to_agent(
    monkeypatch: MonkeyPatch, trained_rasa_model: Text
):
    from rasa.core.interpreter import RasaNLUInterpreter

    agent = Agent.load(trained_rasa_model)
    assert isinstance(agent.interpreter, RasaNLUInterpreter)
Esempio n. 15
0
def test_slack_channel():
    # START DOC INCLUDE
    from rasa.core.channels.slack import SlackInput
    from rasa.core.agent import Agent
    from rasa.core.interpreter import RegexInterpreter

    # load your trained agent
    agent = Agent.load(MODEL_PATH, interpreter=RegexInterpreter())

    input_channel = SlackInput(
        slack_token="YOUR_SLACK_TOKEN",
        # this is the `bot_user_o_auth_access_token`
        slack_channel="YOUR_SLACK_CHANNEL"
        # the name of your channel to which the bot posts (optional)
    )

    s = agent.handle_channels([input_channel], 5004)
    # END DOC INCLUDE
    # the above marker marks the end of the code snipped included
    # in the docs
    routes_list = utils.list_routes(s)
    assert routes_list.get("slack_webhook.health").startswith(
        "/webhooks/slack")
    assert routes_list.get("slack_webhook.webhook").startswith(
        "/webhooks/slack/webhook")
Esempio n. 16
0
def test_single_state_featurizer_with_interpreter_state_with_no_action_name(
    unpacked_trained_moodbot_path: Text,
):
    # check that action name features are not added by the featurizer when not
    # present in the state and
    # check user input is ignored when action is not action_listen
    # and action_name is features are not added
    from rasa.core.agent import Agent

    interpreter = Agent.load(unpacked_trained_moodbot_path).interpreter
    f = SingleStateFeaturizer()
    f._default_feature_states[INTENT] = {"a": 0, "b": 1}
    f._default_feature_states[ENTITIES] = {"c": 0}
    f._default_feature_states[ACTION_NAME] = {"e": 0, "d": 1, "action_listen": 2}
    f._default_feature_states[SLOTS] = {"e_0": 0, "f_0": 1, "g_0": 2}
    f._default_feature_states[ACTIVE_LOOP] = {"h": 0, "i": 1, "j": 2, "k": 3}
    encoded = f.encode_state(
        {
            "user": {"text": "a ball", "intent": "b", "entities": ["c"]},
            "prev_action": {"action_text": "throw a ball"},
            "active_loop": {"name": "k"},
            "slots": {"e": (1.0,)},
        },
        interpreter=interpreter,
    )
    assert list(encoded.keys()) == [ACTION_TEXT, ACTIVE_LOOP, SLOTS]
    assert encoded[ACTION_TEXT][0].features.shape[-1] == 300
    assert (encoded[SLOTS][0].features != scipy.sparse.coo_matrix([[1, 0, 0]])).nnz == 0
    assert (
        encoded[ACTIVE_LOOP][0].features != scipy.sparse.coo_matrix([[0, 0, 0, 1]])
    ).nnz == 0
def test_single_state_featurizer_with_interpreter_state_with_action_listen(
    unpacked_trained_spacybot_path: Text,
):
    interpreter = Agent.load(unpacked_trained_spacybot_path).interpreter

    f = SingleStateFeaturizer()
    f._default_feature_states[INTENT] = {"greet": 0, "inform": 1}
    f._default_feature_states[ENTITIES] = {
        "city": 0,
        "name": 1,
        f"city{ENTITY_LABEL_SEPARATOR}to": 2,
        f"city{ENTITY_LABEL_SEPARATOR}from": 3,
    }
    f._default_feature_states[ACTION_NAME] = {
        "utter_ask_where_to": 0,
        "utter_greet": 1,
        "action_listen": 2,
    }
    # `_0` in slots represent feature dimension
    f._default_feature_states[SLOTS] = {"slot_1_0": 0, "slot_2_0": 1, "slot_3_0": 2}
    f._default_feature_states[ACTIVE_LOOP] = {
        "active_loop_1": 0,
        "active_loop_2": 1,
        "active_loop_3": 2,
        "active_loop_4": 3,
    }
    encoded = f.encode_state(
        {
            "user": {
                "text": "I am flying from London to Paris",
                "intent": "inform",
                "entities": ["city", f"city{ENTITY_LABEL_SEPARATOR}to"],
            },
            "prev_action": {
                "action_name": "action_listen",
                "action_text": "throw a ball",
            },
            "active_loop": {"name": "active_loop_4"},
            "slots": {"slot_1": (1.0,)},
        },
        interpreter=interpreter,
    )

    # check all the features are encoded and *_text features are encoded by a
    # dense featurizer
    assert sorted(list(encoded.keys())) == sorted(
        [TEXT, ENTITIES, ACTION_NAME, SLOTS, ACTIVE_LOOP, INTENT, ACTION_TEXT]
    )
    assert encoded[TEXT][0].features.shape[-1] == 300
    assert encoded[ACTION_TEXT][0].features.shape[-1] == 300
    assert (encoded[INTENT][0].features != scipy.sparse.coo_matrix([[0, 1]])).nnz == 0
    assert (
        encoded[ACTION_NAME][0].features != scipy.sparse.coo_matrix([[0, 0, 1]])
    ).nnz == 0
    assert encoded[ENTITIES][0].features.shape[-1] == 4
    assert (encoded[SLOTS][0].features != scipy.sparse.coo_matrix([[1, 0, 0]])).nnz == 0
    assert (
        encoded[ACTIVE_LOOP][0].features != scipy.sparse.coo_matrix([[0, 0, 0, 1]])
    ).nnz == 0
Esempio n. 18
0
async def test_end_to_end_evaluation_script(restaurantbot: Text):
    restaurantbot = Agent.load(restaurantbot)
    completed_trackers = await _generate_trackers(
        END_TO_END_STORY_FILE, restaurantbot, use_e2e=True
    )

    story_evaluation, num_stories = collect_story_predictions(
        completed_trackers, restaurantbot, use_e2e=True
    )

    serialised_store = [
        "utter_ask_howcanhelp",
        "action_listen",
        "utter_ask_howcanhelp",
        "action_listen",
        "utter_on_it",
        "utter_ask_cuisine",
        "action_listen",
        "utter_ask_numpeople",
        "action_listen",
        "utter_ask_howcanhelp",
        "action_listen",
        "utter_on_it",
        "utter_ask_numpeople",
        "action_listen",
        "utter_ask_moreupdates",
        "action_listen",
        "utter_ask_moreupdates",
        "action_listen",
        "utter_ack_dosearch",
        "action_search_restaurants",
        "action_suggest",
        "action_listen",
        "utter_ask_howcanhelp",
        "action_listen",
        "greet",
        "greet",
        "inform",
        "inform",
        "greet",
        "inform",
        "inform",
        "inform",
        "deny",
        "greet",
        "[moderately](price:moderate)",
        "[east](location)",
        "[french](cuisine)",
        "[cheap](price:lo)",
        "[french](cuisine)",
        "[bombay](location)",
        "[six](people:6)",
        "[moderately](price:moderate)",
    ]

    assert story_evaluation.evaluation_store.serialise()[0] == serialised_store
    assert not story_evaluation.evaluation_store.has_prediction_target_mismatch()
    assert len(story_evaluation.failed_stories) == 0
    assert num_stories == 4
Esempio n. 19
0
def main():
    from rasa.core.agent import Agent
    from rasa.core.interpreter import NaturalLanguageInterpreter
    from rasa.core.utils import AvailableEndpoints, set_default_subparser
    import rasa.nlu.utils as nlu_utils
    import rasa.core.cli
    from rasa.core import utils

    loop = asyncio.get_event_loop()

    # Running as standalone python application
    arg_parser = create_argument_parser()
    set_default_subparser(arg_parser, "default")
    cmdline_arguments = arg_parser.parse_args()

    logging.basicConfig(level=cmdline_arguments.loglevel)
    _endpoints = AvailableEndpoints.read_endpoints(cmdline_arguments.endpoints)

    if cmdline_arguments.output:
        nlu_utils.create_dir(cmdline_arguments.output)

    if not cmdline_arguments.core:
        raise ValueError(
            "you must provide a core model directory to evaluate using -d / --core"
        )
    if cmdline_arguments.mode == "default":

        _interpreter = NaturalLanguageInterpreter.create(
            cmdline_arguments.nlu, _endpoints.nlu
        )

        _agent = Agent.load(cmdline_arguments.core, interpreter=_interpreter)

        stories = loop.run_until_complete(
            rasa.core.cli.train.stories_from_cli_args(cmdline_arguments)
        )

        loop.run_until_complete(
            test(
                stories,
                _agent,
                cmdline_arguments.max_stories,
                cmdline_arguments.output,
                cmdline_arguments.fail_on_prediction_errors,
                cmdline_arguments.e2e,
            )
        )

    elif cmdline_arguments.mode == "compare":
        compare(
            cmdline_arguments.core, cmdline_arguments.stories, cmdline_arguments.output
        )

        story_n_path = os.path.join(cmdline_arguments.core, "num_stories.json")

        number_of_stories = utils.read_json_file(story_n_path)
        plot_curve(cmdline_arguments.output, number_of_stories)

    logger.info("Finished evaluation")
Esempio n. 20
0
async def load_agent(bot: Text, conf: BotsConf) -> Agent:
    # train it
    await train_agent(bot, conf)
    # load it
    bot_loc = get_latest_model(f"{conf.get_loc(bot)}/models")
    print(f'.. load bot model {bot_loc}')
    agent = Agent.load(bot_loc, action_endpoint=conf.get_endpoint(bot))
    return agent
Esempio n. 21
0
def Load_model():
    from rasa.utils.endpoints import EndpointConfig
    #加载Rasa Nlu模型和Rasa Core模型
    # agent = Agent.load("models/core",
    #                    interpreter=RasaNLUInterpreter("models/nlu"))
    agent = Agent.load(model_path="./models",
                       action_endpoint=EndpointConfig(url="http://localhost:5055/webhook"))
    return agent
Esempio n. 22
0
def run_weather_bot(serve_forever=True):
    interpreter = RasaNLUInterpreter('./models/nlu/default/weathernlu')
    agent = Agent.load('./models/dialogue', interpreter=interpreter)

    if serve_forever:
        agent.handle_channel(ConsoleInputChannel())

    return agent
Esempio n. 23
0
def run(serve_forever=True):
    interpreter = RasaNLUInterpreter("models/nlu")
    agent = Agent.load("models/dialogue", interpreter=interpreter)
    action_endpoint = EndpointConfig(url="http://localhost:5055/webhook")

    if serve_forever:
        agent.handle_channels([CmdlineInput()])
    return agent
Esempio n. 24
0
def run_dialogue(serve_forever=True):
    interpreter = RasaNLUInterpreter('./models/nlu/chatter')
    action_endpoint = EndpointConfig(url="http://localhost:5055/webhook")
    agent = Agent.load('./models/dialogue',
                       interpreter=interpreter,
                       action_endpoint=action_endpoint)
    rasa.core.run.serve_application(agent, channel='cmdline')
    return agent
Esempio n. 25
0
def test_core(
    model: Optional[Text] = None,
    stories: Optional[Text] = None,
    output: Text = DEFAULT_RESULTS_PATH,
    additional_arguments: Optional[Dict] = None,
    use_conversation_test_files: bool = False,
) -> None:
    """Tests a trained Core model against a set of test stories."""
    import rasa.model
    from rasa.shared.nlu.interpreter import RegexInterpreter
    from rasa.core.agent import Agent

    if additional_arguments is None:
        additional_arguments = {}

    if output:
        rasa.shared.utils.io.create_directory(output)

    try:
        unpacked_model = rasa.model.get_model(model)
    except ModelNotFound:
        rasa.shared.utils.cli.print_error(
            "Unable to test: could not find a model. Use 'rasa train' to train a "
            "Rasa model and provide it via the '--model' argument."
        )
        return

    _agent = Agent.load(unpacked_model)

    if _agent.policy_ensemble is None:
        rasa.shared.utils.cli.print_error(
            "Unable to test: could not find a Core model. Use 'rasa train' to train a "
            "Rasa model and provide it via the '--model' argument."
        )

    if isinstance(_agent.interpreter, RegexInterpreter):
        rasa.shared.utils.cli.print_warning(
            "No NLU model found. Using default 'RegexInterpreter' for end-to-end "
            "evaluation. If you added actual user messages to your test stories "
            "this will likely lead to the tests failing. In that case, you need "
            "to train a NLU model first, e.g. using `rasa train`."
        )

    from rasa.core.test import test as core_test

    kwargs = rasa.shared.utils.common.minimal_kwargs(
        additional_arguments, core_test, ["stories", "agent", "e2e"]
    )

    rasa.utils.common.run_in_loop(
        core_test(
            stories,
            _agent,
            e2e=use_conversation_test_files,
            out_directory=output,
            **kwargs,
        )
    )
Esempio n. 26
0
def test_core(
    model: Optional[Text] = None,
    stories: Optional[Text] = None,
    endpoints: Optional[Text] = None,
    output: Text = DEFAULT_RESULTS_PATH,
    kwargs: Optional[Dict] = None,
):
    import rasa.core.test
    import rasa.core.utils as core_utils
    import rasa.model
    from rasa.core.interpreter import RegexInterpreter, NaturalLanguageInterpreter
    from rasa.core.agent import Agent

    _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints)

    if kwargs is None:
        kwargs = {}

    if output:
        io_utils.create_directory(output)

    try:
        unpacked_model = rasa.model.get_model(model)
    except ModelNotFound:
        print_error(
            "Unable to test: could not find a model. Use 'rasa train' to train a "
            "Rasa model and provide it via the '--model' argument."
        )
        return

    core_path, nlu_path = rasa.model.get_model_subdirectories(unpacked_model)

    if not core_path:
        print_error(
            "Unable to test: could not find a Core model. Use 'rasa train' to train a "
            "Rasa model and provide it via the '--model' argument."
        )

    use_e2e = kwargs["e2e"] if "e2e" in kwargs else False

    _interpreter = RegexInterpreter()
    if use_e2e:
        if nlu_path:
            _interpreter = NaturalLanguageInterpreter.create(_endpoints.nlu or nlu_path)
        else:
            print_warning(
                "No NLU model found. Using default 'RegexInterpreter' for end-to-end "
                "evaluation."
            )

    _agent = Agent.load(unpacked_model, interpreter=_interpreter)

    kwargs = utils.minimal_kwargs(kwargs, rasa.core.test, ["stories", "agent"])

    loop = asyncio.get_event_loop()
    loop.run_until_complete(
        rasa.core.test(stories, _agent, out_directory=output, **kwargs)
    )
Esempio n. 27
0
def test_core(
    model: Optional[Text] = None,
    stories: Optional[Text] = None,
    endpoints: Optional[Text] = None,
    output: Text = DEFAULT_RESULTS_PATH,
    kwargs: Optional[Dict] = None,
):
    import rasa.core.test
    import rasa.core.utils as core_utils
    from rasa.nlu import utils as nlu_utils
    from rasa.model import get_model
    from rasa.core.interpreter import NaturalLanguageInterpreter
    from rasa.core.agent import Agent

    _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints)

    if kwargs is None:
        kwargs = {}

    if output:
        nlu_utils.create_dir(output)

    unpacked_model = get_model(model)
    if unpacked_model is None:
        print_error(
            "Unable to test: could not find a model. Use 'rasa train' to train a "
            "Rasa model."
        )
        return

    core_path, nlu_path = get_model_subdirectories(unpacked_model)

    if not os.path.exists(core_path):
        print_error(
            "Unable to test: could not find a Core model. Use 'rasa train' to "
            "train a model."
        )

    use_e2e = kwargs["e2e"] if "e2e" in kwargs else False

    _interpreter = RegexInterpreter()
    if use_e2e:
        if os.path.exists(nlu_path):
            _interpreter = NaturalLanguageInterpreter.create(nlu_path, _endpoints.nlu)
        else:
            print_warning(
                "No NLU model found. Using default 'RegexInterpreter' for end-to-end "
                "evaluation."
            )

    _agent = Agent.load(unpacked_model, interpreter=_interpreter)

    kwargs = minimal_kwargs(kwargs, rasa.core.test, ["stories", "agent"])

    loop = asyncio.get_event_loop()
    loop.run_until_complete(
        rasa.core.test(stories, _agent, out_directory=output, **kwargs)
    )
Esempio n. 28
0
async def test_nlg(http_nlg, trained_rasa_model):
    sender = str(uuid.uuid1())

    nlg_endpoint = EndpointConfig.from_dict({"url": http_nlg.make_url("/")})
    agent = Agent.load(trained_rasa_model, None, generator=nlg_endpoint)

    response = await agent.handle_text("/greet", sender_id=sender)
    assert len(response) == 1
    assert response[0] == {"text": "Hey there!", "recipient_id": sender}
Esempio n. 29
0
async def conversation_loop(model_path: Text):
    agent = Agent.load(model_path)
    print("Press 'q' to exit the conversation")
    while True:
        val = input("input: ")
        if val == "q":
            break
        response = await agent.handle_text(val)
        print("Bot: {}".format(response[0]["text"]))
Esempio n. 30
0
async def parse(text: Text, model_path: Text):
    agent = Agent.load(model_path)

    response = await agent.handle_text(text)
    logger.info(f"Text: '{text}'")
    logger.info("Response:")
    logger.info(response)
    print(response)
    return response