def _get_tracker_store_from_endpoints_config(
        endpoints_file: Text) -> TrackerStore:
    if (not endpoints_file or not os.path.isfile(endpoints_file)
            or not os.path.exists(endpoints_file)):
        print_error(
            "File '{}' was not found. Please specify a valid file with "
            "'--endpoints <file>'.".format(endpoints_file))
        exit(1)

    endpoints = AvailableEndpoints.read_endpoints(endpoints_file)

    tracker_store = TrackerStore.find_tracker_store(Domain.empty(),
                                                    endpoints.tracker_store)

    if not tracker_store or isinstance(tracker_store, InMemoryTrackerStore):
        print_error(
            "No valid tracker store config given. Please provide a valid "
            "tracker store configuration as it is described here: "
            "https://rasa.com/docs/core/0.14.4/tracker_stores/")
        exit(1)

    return tracker_store
Exemple #2
0
def create_agent(model: Text, endpoints: Text = None) -> "Agent":
    from rasa.core.tracker_store import TrackerStore
    import rasa.core.brokers.utils as broker_utils
    from rasa.core.utils import AvailableEndpoints
    from rasa.core.agent import Agent

    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    _broker = broker_utils.from_endpoint_config(_endpoints.event_broker)

    _tracker_store = TrackerStore.find_tracker_store(None,
                                                     _endpoints.tracker_store,
                                                     _broker)
    _lock_store = LockStore.find_lock_store(_endpoints.lock_store)

    return Agent.load(
        model,
        generator=_endpoints.nlg,
        tracker_store=_tracker_store,
        lock_store=_lock_store,
        action_endpoint=_endpoints.action,
    )
Exemple #3
0
def create_agent(model: Text, endpoints: Text = None) -> "Agent":
    from rasa.core.tracker_store import TrackerStore
    from rasa.core.utils import AvailableEndpoints
    from rasa.core.agent import Agent
    from rasa.core.brokers.broker import EventBroker
    import rasa.utils.common

    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    _broker = rasa.utils.common.run_in_loop(
        EventBroker.create(_endpoints.event_broker))
    _tracker_store = TrackerStore.create(_endpoints.tracker_store,
                                         event_broker=_broker)
    _lock_store = LockStore.create(_endpoints.lock_store)

    return Agent.load(
        model,
        generator=_endpoints.nlg,
        tracker_store=_tracker_store,
        lock_store=_lock_store,
        action_endpoint=_endpoints.action,
    )
Exemple #4
0
def run_interactive_learning(stories: Text = None,
                             finetune: bool = False,
                             skip_visualization: bool = False,
                             server_args: Dict[Text, Any] = None,
                             additional_arguments: Dict[Text, Any] = None
                             ):
    """Start the interactive learning with the model of the agent."""

    server_args = server_args or {}

    if not skip_visualization:
        p = Process(target=start_visualization, args=("story_graph.dot",))
        p.deamon = True
        p.start()
    else:
        p = None

    app = run.configure_app(enable_api=True)
    endpoints = AvailableEndpoints.read_endpoints(server_args.get("endpoints"))

    # before_server_start handlers make sure the agent is loaded before the
    # interactive learning IO starts
    if server_args.get("core"):
        app.register_listener(
            partial(run.load_agent_on_start, server_args.get("core"),
                    endpoints, server_args.get("nlu")),
            'before_server_start')
    else:
        app.register_listener(
            partial(train_agent_on_start, server_args, endpoints,
                    additional_arguments),
            'before_server_start')

    _serve_application(app, stories, finetune, skip_visualization)

    if not skip_visualization:
        p.terminate()
        p.join()
Exemple #5
0
def run(model: Text,
        endpoints: Text,
        connector: Text = None,
        credentials: Text = None,
        **kwargs: Dict):
    """Runs a Rasa model.

    Args:
        model: Path to model archive.
        endpoints: Path to endpoints file.
        connector: Connector which should be use (overwrites `credentials`
        field).
        credentials: Path to channel credentials file.
        **kwargs: Additional arguments which are passed to
        `rasa.core.run.serve_application`.

    """
    import rasa.core.run
    import rasa.nlu.run
    from rasa.core.utils import AvailableEndpoints
    import rasa.utils.common as utils

    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    if not connector and not credentials:
        connector = "rest"
        print_warning(
            "No chat connector configured, falling back to the "
            "REST input channel. To connect your bot to another channel, "
            "read the docs here: {}/user-guide/"
            "messaging-and-voice-channels".format(DOCS_BASE_URL))

    kwargs = utils.minimal_kwargs(kwargs, rasa.core.run.serve_application)
    rasa.core.run.serve_application(model,
                                    channel=connector,
                                    credentials=credentials,
                                    endpoints=_endpoints,
                                    **kwargs)
Exemple #6
0
async def test_load_agent_on_start_with_bad_model_file(
    tmp_path: Path,
    rasa_server: Sanic,
    loop: AbstractEventLoop,
):
    fake_model = tmp_path / "fake_model.tar.gz"
    fake_model.touch()
    fake_model_path = str(fake_model)

    with pytest.warns(UserWarning) as warnings:
        agent = await run.load_agent_on_start(
            fake_model_path,
            AvailableEndpoints(),
            None,
            rasa_server,
            loop,
        )
        assert any("fake_model.tar.gz' could not be loaded" in str(w.message)
                   for w in warnings)

    # Fallback agent was loaded even if model was unusable
    assert isinstance(agent.interpreter, interpreter.RegexInterpreter)
    assert agent.policy_ensemble is None
    assert isinstance(agent.domain, domain.Domain)
Exemple #7
0
def track_server_start(
    input_channels: List["InputChannel"],
    endpoints: Optional["AvailableEndpoints"],
    model_directory: Optional[Text],
    number_of_workers: int,
    is_api_enabled: bool,
) -> None:
    """Tracks when a user starts a rasa server.

    Args:
        input_channels: Used input channels
        endpoints: Endpoint configuration for the server
        model_directory: directory of the running model
        number_of_workers: number of used Sanic workers
        is_api_enabled: whether the rasa API server is enabled
    """
    from rasa.core.utils import AvailableEndpoints

    def project_fingerprint_from_model(
        _model_directory: Optional[Text], ) -> Optional[Text]:
        """Gets project fingerprint from an app's loaded model."""
        if not model_directory:
            return None

        try:
            model_archive = model.get_local_model(_model_directory)
            metadata = LocalModelStorage.metadata_from_archive(model_archive)

            return metadata.project_fingerprint
        except Exception:
            return None

    if not endpoints:
        endpoints = AvailableEndpoints()

    _track(
        TELEMETRY_SERVER_STARTED_EVENT,
        {
            "input_channels": [i.name() for i in input_channels],
            "api_enabled":
            is_api_enabled,
            "number_of_workers":
            number_of_workers,
            "endpoints_nlg":
            endpoints.nlg.type if endpoints.nlg else None,
            "endpoints_nlu":
            endpoints.nlu.type if endpoints.nlu else None,
            "endpoints_action_server":
            endpoints.action.type if endpoints.action else None,
            "endpoints_model_server":
            endpoints.model.type if endpoints.model else None,
            "endpoints_tracker_store":
            endpoints.tracker_store.type if endpoints.tracker_store else None,
            "endpoints_lock_store":
            endpoints.lock_store.type if endpoints.lock_store else None,
            "endpoints_event_broker":
            endpoints.event_broker.type if endpoints.event_broker else None,
            "project":
            project_fingerprint_from_model(model_directory),
        },
    )
Exemple #8
0
def test_default_wait_time_between_pulls():
    endpoint_config = EndpointConfig(url="http://*****:*****@latest")
    endpoints = AvailableEndpoints(model=endpoint_config)
    x._overwrite_endpoints_for_local_x(endpoints, "test", "http://localhost")
    assert endpoints.model.kwargs["wait_time_between_pulls"] == 2
Exemple #9
0
def _overwrite_endpoints_for_local_x(endpoints: AvailableEndpoints,
                                     rasa_x_token: Text, rasa_x_url: Text):
    endpoints.model = _get_model_endpoint(endpoints.model, rasa_x_token,
                                          rasa_x_url)
    endpoints.event_broker = _get_event_broker_endpoint(endpoints.event_broker)
Exemple #10
0
def test_default_model_server_url():
    endpoint_config = EndpointConfig()
    endpoints = AvailableEndpoints(model=endpoint_config)
    x._overwrite_endpoints_for_local_x(endpoints, "test", "http://localhost")
    assert (endpoints.model.url ==
            "http://localhost/projects/default/models/tag/production")
Exemple #11
0
def test_overwrite_model_server_url_with_no_model_endpoint():
    endpoints = AvailableEndpoints()
    x._overwrite_endpoints_for_local_x(endpoints, "test", "http://localhost")
    assert (endpoints.model.url ==
            "http://localhost/projects/default/models/tag/production")
Exemple #12
0
def run(args: argparse.Namespace) -> NoReturn:
    """Entrypoint for `rasa run`.

    Args:
        args: The CLI arguments.
    """
    import rasa

    args.endpoints = rasa.cli.utils.get_validated_path(args.endpoints,
                                                       "endpoints",
                                                       DEFAULT_ENDPOINTS_PATH,
                                                       True)
    args.credentials = rasa.cli.utils.get_validated_path(
        args.credentials, "credentials", DEFAULT_CREDENTIALS_PATH, True)

    if args.enable_api:
        if not args.remote_storage:
            args.model = _validate_model_path(args.model, "model",
                                              DEFAULT_MODELS_PATH)
        rasa.run(**vars(args))
        return

    # if the API is not enable you cannot start without a model
    # make sure either a model server, a remote storage, or a local model is
    # configured

    from rasa.model import get_model
    from rasa.core.utils import AvailableEndpoints

    # start server if remote storage is configured
    if args.remote_storage is not None:
        rasa.run(**vars(args))
        return

    # start server if model server is configured
    endpoints = AvailableEndpoints.read_endpoints(args.endpoints)
    model_server = endpoints.model if endpoints and endpoints.model else None
    if model_server is not None:
        rasa.run(**vars(args))
        return

    # start server if local model found
    args.model = _validate_model_path(args.model, "model", DEFAULT_MODELS_PATH)
    local_model_set = True
    try:
        get_model(args.model)
    except ModelNotFound:
        local_model_set = False

    if local_model_set:
        rasa.run(**vars(args))
        return

    rasa.shared.utils.cli.print_error(
        f"No model found. You have three options to provide a model:\n"
        f"1. Configure a model server in the endpoint configuration and provide "
        f"the configuration via '--endpoints'.\n"
        f"2. Specify a remote storage via '--remote-storage' to load the model "
        f"from.\n"
        f"3. Train a model before running the server using `rasa train` and "
        f"use '--model' to provide the model path.\n"
        f"For more information check {DOCS_BASE_URL}/model-storage.")
Exemple #13
0
from sqlalchemy.ext.declarative import declarative_base
import sqlalchemy.exc
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import psycopg2

from rasa.core.utils import AvailableEndpoints
import requests
import json
import logging
import random
import time
import datetime
logger = logging.getLogger(__name__)
_endpoints = AvailableEndpoints.read_endpoints("endpoints.yml")
_credentials = rasa.utils.io.read_config_file("credentials.yml")

# To get FB User Profile, use something like:
# See: https://developers.facebook.com/tools/explorer
# and https://developers.facebook.com/docs/graph-api/explorer/


Base = declarative_base()

class Human(Base):
    __tablename__ = 'human'
    id = Column(Integer, primary_key=True)
    fb_id = Column(String(250))
    first_name = Column(String(250))
    last_name = Column(String(250))
Exemple #14
0
                                              nlu_model, app, loop)

        tracker = load_tracker_from_json(tracker_dump,
                                         agent.domain)
        await replay_events(tracker, agent)

    app.register_listener(load_agent_and_tracker, 'before_server_start')
    app.run(host='0.0.0.0', port=port,
            access_log=logger.isEnabledFor(logging.DEBUG))


if __name__ == '__main__':
    # Running as standalone python application
    arg_parser = create_argument_parser()
    cmdline_args = arg_parser.parse_args()

    rasa.utils.configure_colored_logging(cmdline_args.loglevel)
    _endpoints = AvailableEndpoints.read_endpoints(cmdline_args.endpoints)

    print(cliutils.wrap_with_color(
        "We'll recreate the dialogue state. After that you can chat "
        "with the bot, continuing the input conversation.",
        rasa.cli.utils.bcolors.OKGREEN + rasa.cli.utils.bcolors.UNDERLINE))

    _loop = asyncio.get_event_loop()
    _loop.run_until_complete(serve_application(cmdline_args.core,
                                               cmdline_args.nlu,
                                               cmdline_args.port,
                                               cmdline_args.enable_api,
                                               _endpoints))
Exemple #15
0
async def test_formbot_example():
    sys.path.append("examples/formbot/")

    p = "examples/formbot/"
    stories = os.path.join(p, "data", "stories.md")
    endpoint = EndpointConfig("https://example.com/webhooks/actions")
    endpoints = AvailableEndpoints(action=endpoint)
    agent = await train(
        os.path.join(p, "domain.yml"),
        stories,
        os.path.join(p, "models", "dialogue"),
        endpoints=endpoints,
        policy_config="examples/formbot/config.yml",
    )

    async def mock_form_happy_path(input_text, output_text, slot=None):
        if slot:
            form = "restaurant_form"
            template = f"utter_ask_{slot}"
        else:
            form = None
            template = "utter_submit"
        response = {
            "events": [
                {
                    "event": "form",
                    "name": form,
                    "timestamp": None
                },
                {
                    "event": "slot",
                    "timestamp": None,
                    "name": "requested_slot",
                    "value": slot,
                },
            ],
            "responses": [{
                "template": template
            }],
        }
        with aioresponses() as mocked:
            mocked.post("https://example.com/webhooks/actions",
                        payload=response,
                        repeat=True)
            responses = await agent.handle_text(input_text)
            assert responses[0]["text"] == output_text

    async def mock_form_unhappy_path(input_text, output_text, slot):
        response_error = {
            "error":
            f"Failed to extract slot {slot} with action restaurant_form",
            "action_name": "restaurant_form",
        }
        with aioresponses() as mocked:
            # noinspection PyTypeChecker
            mocked.post(
                "https://example.com/webhooks/actions",
                repeat=True,
                exception=ClientResponseError(400, "",
                                              json.dumps(response_error)),
            )
            responses = await agent.handle_text(input_text)
            assert responses[0]["text"] == output_text

    await mock_form_happy_path("/request_restaurant",
                               "what cuisine?",
                               slot="cuisine")
    await mock_form_unhappy_path("/chitchat", "chitchat", slot="cuisine")
    await mock_form_happy_path('/inform{"cuisine": "mexican"}',
                               "how many people?",
                               slot="num_people")
    await mock_form_happy_path('/inform{"number": "2"}',
                               "do you want to seat outside?",
                               slot="outdoor_seating")
    await mock_form_happy_path("/affirm",
                               "please provide additional preferences",
                               slot="preferences")

    responses = await agent.handle_text("/restart")
    assert responses[0]["text"] == "restarted"

    responses = await agent.handle_text("/greet")
    assert (responses[0]["text"] ==
            "Hello! I am restaurant search assistant! How can I help?")

    await mock_form_happy_path("/request_restaurant",
                               "what cuisine?",
                               slot="cuisine")
    await mock_form_happy_path('/inform{"cuisine": "mexican"}',
                               "how many people?",
                               slot="num_people")
    await mock_form_happy_path('/inform{"number": "2"}',
                               "do you want to seat outside?",
                               slot="outdoor_seating")
    await mock_form_unhappy_path("/stop",
                                 "do you want to continue?",
                                 slot="outdoor_seating")
    await mock_form_happy_path("/affirm",
                               "do you want to seat outside?",
                               slot="outdoor_seating")
    await mock_form_happy_path("/affirm",
                               "please provide additional preferences",
                               slot="preferences")
    await mock_form_happy_path(
        "/deny",
        "please give your feedback on your experience so far",
        slot="feedback")
    await mock_form_happy_path('/inform{"feedback": "great"}', "All done!")

    responses = await agent.handle_text("/thankyou")
    assert responses[0]["text"] == "you are welcome :)"
Exemple #16
0
# app.blueprint(html_bp)
# app.blueprint(json_bp)

# TODO 从配置文件获取配置
CONFIG = load_config()
BASE_CORE_PATH = "core/"
enable_api = True
cors = None
auth_token = None
jwt_secret = None
jwt_method = None
# aws,gcs,azure
remote_storage = None
endpoints = get_validated_path(BASE_CORE_PATH + "endpoints.yml", "endpoints",
                               DEFAULT_ENDPOINTS_PATH, True)
_endpoints = AvailableEndpoints.read_endpoints(endpoints)
port = DEFAULT_RASA_PORT

# model = get_validated_path(BASE_CORE_PATH+"models/", "model", DEFAULT_MODELS_PATH)
# model_path = BASE_CORE_PATH+"models/"
model_path = CONFIG.BASE_MODEL_DIR

if __name__ == "__main__":

    if not model_path:
        print("No model found. Train a model before running the server")
    app = server.create_app(cors_origins=cors,
                            auth_token=auth_token,
                            jwt_secret=jwt_secret,
                            jwt_method=jwt_method,
                            endpoints=endpoints,
Exemple #17
0
def test_overwrite_model_server_url():
    endpoint_config = EndpointConfig(
        url="http://*****:*****@latest")
    endpoints = AvailableEndpoints(model=endpoint_config)
    x._overwrite_endpoints_for_local_x(endpoints, "test", "http://localhost")
    assert endpoints.model.url == "http://localhost/models/tags/production"