Пример #1
0
def run(
    model: Text,
    endpoints: Text,
    connector: Text = None,
    credentials: Text = None,
    **kwargs: Dict,
):
    """Runs a Rasa model.

    Args:
        model: Path to model archive.
        endpoints: Path to endpoints file.
        connector: Connector which should be use (overwrites `credentials`
        field).
        credentials: Path to channel credentials file.
        **kwargs: Additional arguments which are passed to
        `rasa.core.run.serve_application`.

    """
    import rasa.core.run
    import rasa.nlu.run
    from rasa.core.utils import AvailableEndpoints

    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    if not connector and not credentials:
        connector = "rest"
        print_warning(
            f"No chat connector configured, falling back to the "
            f"REST input channel. To connect your bot to another channel, "
            f"read the docs here: {DOCS_BASE_URL}/messaging-and-voice-channels"
        )

    kwargs = rasa.shared.utils.common.minimal_kwargs(
        kwargs, rasa.core.run.serve_application)
    rasa.core.run.serve_application(
        model,
        channel=connector,
        credentials=credentials,
        endpoints=_endpoints,
        **kwargs,
    )
Пример #2
0
def create_agent(model: Text, endpoints: Text = None) -> "Agent":
    from rasa.core.tracker_store import TrackerStore
    from rasa.core.utils import AvailableEndpoints
    from rasa.core.agent import Agent
    from rasa.core.brokers.broker import EventBroker

    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    _broker = EventBroker.create(_endpoints.event_broker)
    _tracker_store = TrackerStore.create(_endpoints.tracker_store,
                                         event_broker=_broker)
    _lock_store = LockStore.create(_endpoints.lock_store)

    return Agent.load(
        model,
        generator=_endpoints.nlg,
        tracker_store=_tracker_store,
        lock_store=_lock_store,
        action_endpoint=_endpoints.action,
    )
Пример #3
0
def create_agent(model: Text, endpoints: Text = None) -> "Agent":
    from rasa.core.tracker_store import TrackerStore
    from rasa.core import broker
    from rasa.core.utils import AvailableEndpoints
    from rasa.core.agent import Agent

    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    _broker = broker.from_endpoint_config(_endpoints.event_broker)

    _tracker_store = TrackerStore.find_tracker_store(None,
                                                     _endpoints.tracker_store,
                                                     _broker)

    return Agent.load(
        model,
        generator=_endpoints.nlg,
        tracker_store=_tracker_store,
        action_endpoint=_endpoints.action,
    )
Пример #4
0
def _get_old_tracker_store(endpoints_file: Text) -> TrackerStore:
    if (not endpoints_file or not os.path.isfile(endpoints_file)
            or not os.path.exists(endpoints_file)):
        print_error(
            "File '{}' was not found. Please specify a valid file with "
            "'--endpoints <file>'.".format(endpoints_file))
        exit(1)

    endpoints = AvailableEndpoints.read_endpoints(endpoints_file)

    tracker_store = TrackerStore.find_tracker_store(Domain.empty(),
                                                    endpoints.tracker_store)

    if not tracker_store or isinstance(tracker_store, InMemoryTrackerStore):
        print_error(
            "No valid tracker store config given. Please provide a valid "
            "tracker store configuration as it is described here: "
            "https://rasa.com/docs/core/0.14.4/tracker_stores/")
        exit(1)

    return tracker_store
Пример #5
0
def run(model: Text,
        endpoints: Text,
        connector: Text = None,
        credentials: Text = None,
        **kwargs: Dict):
    """Runs a Rasa model.

    Args:
        model: Path to model archive.
        endpoints: Path to endpoints file.
        connector: Connector which should be use (overwrites `credentials`
        field).
        credentials: Path to channel credentials file.
        **kwargs: Additional arguments which are passed to
        `rasa.core.run.serve_application`.

    """
    import rasa.core.run
    from rasa.core.utils import AvailableEndpoints

    model_path = get_model(model)
    core_path, nlu_path = get_model_subdirectories(model_path)
    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    if not connector and not credentials:
        channel = "cmdline"
        logger.info("No chat connector configured, falling back to the "
                    "command line. Use `rasa configure channel` to connect"
                    "the bot to e.g. facebook messenger.")
    else:
        channel = connector

    kwargs = minimal_kwargs(kwargs, rasa.core.run.serve_application)
    rasa.core.run.serve_application(core_path,
                                    nlu_path,
                                    channel=channel,
                                    credentials_file=credentials,
                                    endpoints=_endpoints,
                                    **kwargs)
    shutil.rmtree(model_path)
Пример #6
0
def run_interactive_learning(stories: Text = None,
                             finetune: bool = False,
                             skip_visualization: bool = False,
                             server_args: Dict[Text, Any] = None,
                             additional_arguments: Dict[Text, Any] = None
                             ):
    """Start the interactive learning with the model of the agent."""

    server_args = server_args or {}

    if not skip_visualization:
        p = Process(target=start_visualization, args=("story_graph.dot",))
        p.deamon = True
        p.start()
    else:
        p = None

    app = run.configure_app(enable_api=True)
    endpoints = AvailableEndpoints.read_endpoints(server_args.get("endpoints"))

    # before_server_start handlers make sure the agent is loaded before the
    # interactive learning IO starts
    if server_args.get("core"):
        app.register_listener(
            partial(run.load_agent_on_start, server_args.get("core"),
                    endpoints, server_args.get("nlu")),
            'before_server_start')
    else:
        app.register_listener(
            partial(train_agent_on_start, server_args, endpoints,
                    additional_arguments),
            'before_server_start')

    _serve_application(app, stories, finetune, skip_visualization)

    if not skip_visualization:
        p.terminate()
        p.join()
Пример #7
0
def run(args: argparse.Namespace) -> NoReturn:
    """Entrypoint for `rasa run`.

    Args:
        args: The CLI arguments.
    """
    import rasa

    args.endpoints = rasa.cli.utils.get_validated_path(args.endpoints,
                                                       "endpoints",
                                                       DEFAULT_ENDPOINTS_PATH,
                                                       True)
    args.credentials = rasa.cli.utils.get_validated_path(
        args.credentials, "credentials", DEFAULT_CREDENTIALS_PATH, True)

    if args.enable_api:
        if not args.remote_storage:
            args.model = _validate_model_path(args.model, "model",
                                              DEFAULT_MODELS_PATH)
        rasa.run(**vars(args))
        return

    # if the API is not enable you cannot start without a model
    # make sure either a model server, a remote storage, or a local model is
    # configured

    from rasa.model import get_model
    from rasa.core.utils import AvailableEndpoints

    # start server if remote storage is configured
    if args.remote_storage is not None:
        rasa.run(**vars(args))
        return

    # start server if model server is configured
    endpoints = AvailableEndpoints.read_endpoints(args.endpoints)
    model_server = endpoints.model if endpoints and endpoints.model else None
    if model_server is not None:
        rasa.run(**vars(args))
        return

    # start server if local model found
    args.model = _validate_model_path(args.model, "model", DEFAULT_MODELS_PATH)
    local_model_set = True
    try:
        get_model(args.model)
    except ModelNotFound:
        local_model_set = False

    if local_model_set:
        rasa.run(**vars(args))
        return

    rasa.shared.utils.cli.print_error(
        f"No model found. You have three options to provide a model:\n"
        f"1. Configure a model server in the endpoint configuration and provide "
        f"the configuration via '--endpoints'.\n"
        f"2. Specify a remote storage via '--remote-storage' to load the model "
        f"from.\n"
        f"3. Train a model before running the server using `rasa train` and "
        f"use '--model' to provide the model path.\n"
        f"For more information check {DOCS_BASE_URL}/model-storage.")
Пример #8
0
from sqlalchemy.ext.declarative import declarative_base
import sqlalchemy.exc
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import psycopg2

from rasa.core.utils import AvailableEndpoints
import requests
import json
import logging
import random
import time
import datetime
logger = logging.getLogger(__name__)
_endpoints = AvailableEndpoints.read_endpoints("endpoints.yml")
_credentials = rasa.utils.io.read_config_file("credentials.yml")

# To get FB User Profile, use something like:
# See: https://developers.facebook.com/tools/explorer
# and https://developers.facebook.com/docs/graph-api/explorer/


Base = declarative_base()

class Human(Base):
    __tablename__ = 'human'
    id = Column(Integer, primary_key=True)
    fb_id = Column(String(250))
    first_name = Column(String(250))
    last_name = Column(String(250))
Пример #9
0
# app.blueprint(html_bp)
# app.blueprint(json_bp)

# TODO 从配置文件获取配置
CONFIG = load_config()
BASE_CORE_PATH = "core/"
enable_api = True
cors = None
auth_token = None
jwt_secret = None
jwt_method = None
# aws,gcs,azure
remote_storage = None
endpoints = get_validated_path(BASE_CORE_PATH + "endpoints.yml", "endpoints",
                               DEFAULT_ENDPOINTS_PATH, True)
_endpoints = AvailableEndpoints.read_endpoints(endpoints)
port = DEFAULT_RASA_PORT

# model = get_validated_path(BASE_CORE_PATH+"models/", "model", DEFAULT_MODELS_PATH)
# model_path = BASE_CORE_PATH+"models/"
model_path = CONFIG.BASE_MODEL_DIR

if __name__ == "__main__":

    if not model_path:
        print("No model found. Train a model before running the server")
    app = server.create_app(cors_origins=cors,
                            auth_token=auth_token,
                            jwt_secret=jwt_secret,
                            jwt_method=jwt_method,
                            endpoints=endpoints,
Пример #10
0
                                              nlu_model, app, loop)

        tracker = load_tracker_from_json(tracker_dump,
                                         agent.domain)
        await replay_events(tracker, agent)

    app.register_listener(load_agent_and_tracker, 'before_server_start')
    app.run(host='0.0.0.0', port=port,
            access_log=logger.isEnabledFor(logging.DEBUG))


if __name__ == '__main__':
    # Running as standalone python application
    arg_parser = create_argument_parser()
    cmdline_args = arg_parser.parse_args()

    rasa.utils.configure_colored_logging(cmdline_args.loglevel)
    _endpoints = AvailableEndpoints.read_endpoints(cmdline_args.endpoints)

    print(cliutils.wrap_with_color(
        "We'll recreate the dialogue state. After that you can chat "
        "with the bot, continuing the input conversation.",
        rasa.cli.utils.bcolors.OKGREEN + rasa.cli.utils.bcolors.UNDERLINE))

    _loop = asyncio.get_event_loop()
    _loop.run_until_complete(serve_application(cmdline_args.core,
                                               cmdline_args.nlu,
                                               cmdline_args.port,
                                               cmdline_args.enable_api,
                                               _endpoints))