Example #1
0
async def load_agent_on_start(
    model_path: Text,
    endpoints: AvailableEndpoints,
    remote_storage: Optional[Text],
    app: Sanic,
    loop: Text,
):
    """Load an agent.

    Used to be scheduled on server start
    (hence the `app` and `loop` arguments)."""
    import rasa.core.brokers.utils as broker_utils

    # noinspection PyBroadException
    try:
        with model.get_model(model_path) as unpacked_model:
            _, nlu_model = model.get_model_subdirectories(unpacked_model)
            _interpreter = NaturalLanguageInterpreter.create(
                nlu_model, endpoints.nlu)
    except Exception:
        logger.debug(
            "Could not load interpreter from '{}'.".format(model_path))
        _interpreter = None

    _broker = broker_utils.from_endpoint_config(endpoints.event_broker)
    _tracker_store = TrackerStore.find_tracker_store(None,
                                                     endpoints.tracker_store,
                                                     _broker)
    _lock_store = LockStore.find_lock_store(endpoints.lock_store)

    model_server = endpoints.model if endpoints and endpoints.model else None

    app.agent = await agent.load_agent(
        model_path,
        model_server=model_server,
        remote_storage=remote_storage,
        interpreter=_interpreter,
        generator=endpoints.nlg,
        tracker_store=_tracker_store,
        lock_store=_lock_store,
        action_endpoint=endpoints.action,
    )

    if not app.agent:
        logger.warning(
            "Agent could not be loaded with the provided configuration. "
            "Load default agent without any model.")
        app.agent = Agent(
            interpreter=_interpreter,
            generator=endpoints.nlg,
            tracker_store=_tracker_store,
            action_endpoint=endpoints.action,
            model_server=model_server,
            remote_storage=remote_storage,
        )

    return app.agent
Example #2
0
def test_create_interpreter(parameters, trained_nlu_model):
    obj = parameters["obj"]
    if obj == "trained_nlu_model":
        _, obj = get_model_subdirectories(get_model(trained_nlu_model))

    interpreter = rasa.core.interpreter.create_interpreter(
        parameters["endpoint"] or obj)

    assert isinstance(interpreter, parameters["type"])
Example #3
0
def test_nlu(model: Optional[Text], nlu_data: Optional[Text], kwargs: Optional[Dict]):
    from rasa.nlu.test import run_evaluation

    unpacked_model = get_model(model)
    nlu_model = os.path.join(unpacked_model, "nlu")

    if os.path.exists(nlu_model):
        kwargs = minimal_kwargs(kwargs, run_evaluation, ["data_path", "model"])
        run_evaluation(nlu_data, nlu_model, **kwargs)
Example #4
0
async def load_agent_on_start(
    model_path: Text,
    endpoints: AvailableEndpoints,
    remote_storage: Optional[Text],
    app: Sanic,
    loop: Text,
):
    """Load an agent.

    Used to be scheduled on server start
    (hence the `app` and `loop` arguments)."""

    # noinspection PyBroadException
    # bf mod
    try:
        with model.get_model(model_path) as unpacked_model:
            _, nlu_models = model.get_model_subdirectories(unpacked_model)
            _interpreters = {}
            for lang, nlu_model in nlu_models.items():
                _interpreters[lang] = NaturalLanguageInterpreter.create(endpoints.nlu or nlu_model)
    except Exception:
        logger.debug(f"Could not load interpreter from '{model_path}'.")
        _interpreters = {}
    # /bf mod

    _broker = EventBroker.create(endpoints.event_broker)
    _tracker_store = TrackerStore.create(endpoints.tracker_store, event_broker=_broker)
    _lock_store = LockStore.create(endpoints.lock_store)

    model_server = endpoints.model if endpoints and endpoints.model else None

    app.agent = await agent.load_agent(
        model_path,
        model_server=model_server,
        remote_storage=remote_storage,
        interpreters=_interpreters,
        generator=endpoints.nlg,
        tracker_store=_tracker_store,
        lock_store=_lock_store,
        action_endpoint=endpoints.action,
    )

    if not app.agent:
        raise_warning(
            "Agent could not be loaded with the provided configuration. "
            "Load default agent without any model."
        )
        app.agent = Agent(
            interpreters=_interpreters,
            generator=endpoints.nlg,
            tracker_store=_tracker_store,
            action_endpoint=endpoints.action,
            model_server=model_server,
            remote_storage=remote_storage,
        )

    return app.agent
Example #5
0
def test_create_interpreter(parameters, trained_nlu_model):
    obj = parameters["obj"]
    if obj == "trained_nlu_model":
        _, obj = get_model_subdirectories(get_model(trained_nlu_model))

    interpreter = NaturalLanguageInterpreter.create(obj,
                                                    parameters["endpoint"])

    assert isinstance(interpreter, parameters["type"])
Example #6
0
    def load(
        cls,
        model_path: Text,
        interpreter: Optional[NaturalLanguageInterpreter] = None,
        generator: Union[EndpointConfig, NaturalLanguageGenerator] = None,
        tracker_store: Optional[TrackerStore] = None,
        lock_store: Optional[LockStore] = None,
        action_endpoint: Optional[EndpointConfig] = None,
        model_server: Optional[EndpointConfig] = None,
        remote_storage: Optional[Text] = None,
        path_to_model_archive: Optional[Text] = None,
    ) -> "Agent":
        """Load a persisted model from the passed path."""
        try:
            if not model_path:
                raise ModelNotFound("No path specified.")
            elif not os.path.exists(model_path):
                raise ModelNotFound(f"No file or directory at '{model_path}'.")
            elif os.path.isfile(model_path):
                model_path = get_model(model_path)
        except ModelNotFound:
            raise ValueError(
                "You are trying to load a MODEL from '{}', which is not possible. \n"
                "The model path should be a 'tar.gz' file or a directory "
                "containing the various model files in the sub-directories 'core' "
                "and 'nlu'. \n\nIf you want to load training data instead of "
                "a model, use `agent.load_data(...)` instead.".format(
                    model_path))

        core_model, nlu_model = get_model_subdirectories(model_path)

        if not interpreter and nlu_model:
            interpreter = NaturalLanguageInterpreter.create(nlu_model)

        domain = None
        ensemble = None

        if core_model:
            domain = Domain.load(os.path.join(core_model, DEFAULT_DOMAIN_PATH))
            ensemble = PolicyEnsemble.load(core_model) if core_model else None

            # ensures the domain hasn't changed between test and train
            domain.compare_with_specification(core_model)

        return cls(
            domain=domain,
            policies=ensemble,
            interpreter=interpreter,
            generator=generator,
            tracker_store=tracker_store,
            lock_store=lock_store,
            action_endpoint=action_endpoint,
            model_directory=model_path,
            model_server=model_server,
            remote_storage=remote_storage,
            path_to_model_archive=path_to_model_archive,
        )
Example #7
0
def test_core(
    model: Optional[Text] = None,
    stories: Optional[Text] = None,
    endpoints: Optional[Text] = None,
    output: Text = DEFAULT_RESULTS_PATH,
    kwargs: Optional[Dict] = None,
):
    import rasa.core.test
    import rasa.core.utils as core_utils
    from rasa.nlu import utils as nlu_utils
    from rasa.model import get_model
    from rasa.core.interpreter import NaturalLanguageInterpreter
    from rasa.core.agent import Agent

    _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints)

    if kwargs is None:
        kwargs = {}

    if output:
        nlu_utils.create_dir(output)

    try:
        unpacked_model = get_model(model)
    except ModelNotFound:
        print_error(
            "Unable to test: could not find a model. Use 'rasa train' to train a "
            "Rasa model.")
        return

    core_path, nlu_path = get_model_subdirectories(unpacked_model)

    if not os.path.exists(core_path):
        print_error(
            "Unable to test: could not find a Core model. Use 'rasa train' to "
            "train a model.")

    use_e2e = kwargs["e2e"] if "e2e" in kwargs else False

    _interpreter = RegexInterpreter()
    if use_e2e:
        if os.path.exists(nlu_path):
            _interpreter = NaturalLanguageInterpreter.create(
                nlu_path, _endpoints.nlu)
        else:
            print_warning(
                "No NLU model found. Using default 'RegexInterpreter' for end-to-end "
                "evaluation.")

    _agent = Agent.load(unpacked_model, interpreter=_interpreter)

    kwargs = minimal_kwargs(kwargs, rasa.core.test, ["stories", "agent"])

    loop = asyncio.get_event_loop()
    loop.run_until_complete(
        rasa.core.test(stories, _agent, out_directory=output, **kwargs))
Example #8
0
def test_get_model_from_directory_with_subdirectories(trained_rasa_model: Text,
                                                      tmp_path: Path):
    unpacked = get_model(trained_rasa_model)
    unpacked_core, unpacked_nlu = get_model_subdirectories(unpacked)

    assert unpacked_core
    assert unpacked_nlu

    with pytest.raises(ModelNotFound):
        get_model_subdirectories(str(tmp_path))  # temp path should be empty
Example #9
0
def test_core(model: Text, stories: Text, endpoints: Text = None,
              output: Text = DEFAULT_RESULTS_PATH, model_path: Text = None,
              **kwargs: Dict):
    import rasa.core.test
    import rasa.core.utils as core_utils
    from rasa.nlu import utils as nlu_utils
    from rasa.model import get_model
    from rasa.core.interpreter import NaturalLanguageInterpreter
    from rasa.core.agent import Agent

    _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints)

    if output:
        nlu_utils.create_dir(output)

    if os.path.isfile(model):
        model_path = get_model(model)

    if model_path:
        # Single model: Normal evaluation
        loop = asyncio.get_event_loop()
        model_path = get_model(model)
        core_path, nlu_path = get_model_subdirectories(model_path)

        _interpreter = NaturalLanguageInterpreter.create(nlu_path,
                                                         _endpoints.nlu)

        _agent = Agent.load(core_path, interpreter=_interpreter)

        kwargs = minimal_kwargs(kwargs, rasa.core.test)
        loop.run_until_complete(
            rasa.core.test(stories, _agent, out_directory=output, **kwargs))

    else:
        from rasa.core.test import compare, plot_curve

        compare(model, stories, output)

        story_n_path = os.path.join(model, 'num_stories.json')

        number_of_stories = core_utils.read_json_file(story_n_path)
        plot_curve(output, number_of_stories)
Example #10
0
 def project_fingerprint_from_model(
     _model_directory: Optional[Text], ) -> Optional[Text]:
     """Get project fingerprint from an app's loaded model."""
     if _model_directory:
         try:
             with model.get_model(_model_directory) as unpacked_model:
                 fingerprint = model.fingerprint_from_path(unpacked_model)
                 return fingerprint.get(model.FINGERPRINT_PROJECT)
         except Exception:
             return None
     return None
Example #11
0
def test_get_model_from_directory_with_subdirectories(
        trained_rasa_model, tmpdir_factory: TempdirFactory):
    unpacked = get_model(trained_rasa_model)
    unpacked_core, unpacked_nlu = get_model_subdirectories(unpacked)

    assert unpacked_core
    assert unpacked_nlu

    directory = tmpdir_factory.mktemp("empty_model_dir").strpath
    with pytest.raises(ModelNotFound):
        get_model_subdirectories(directory)
Example #12
0
def set_fingerprint(trained_rasa_model: Text, fingerprint: Fingerprint,
                    tmp_path: Path) -> Text:
    unpacked_model_path = get_model(trained_rasa_model)

    os.remove(os.path.join(unpacked_model_path, FINGERPRINT_FILE_PATH))

    output_path = str(tmp_path / "test.tar.gz")

    create_package_rasa(unpacked_model_path, output_path, fingerprint)

    return output_path
Example #13
0
def set_fingerprint(trained_rasa_model: Text, fingerprint: Fingerprint) -> Text:
    unpacked_model_path = get_model(trained_rasa_model)

    os.remove(os.path.join(unpacked_model_path, FINGERPRINT_FILE_PATH))

    tempdir = tempfile.mkdtemp()
    output_path = os.path.join(tempdir, "test.tar.gz")

    create_package_rasa(unpacked_model_path, output_path, fingerprint)

    return output_path
Example #14
0
 def get_interpreter(model_path):
     from rasa.model import get_model, get_model_subdirectories
     from rasa.core.interpreter import create_interpreter
     try:
         with get_model(model_path) as unpacked_model:
             _, nlu_model = get_model_subdirectories(unpacked_model)
             _interpreter = create_interpreter(nlu_model)
     except Exception:
         logger.debug(f"Could not load interpreter from '{model_path}'.")
         _interpreter = None
     return _interpreter
Example #15
0
    async def on_trynow(self, request):
        res_data = await request.json()
        print("----------- Inside Try now --from SID {}--------------".format(res_data['sessionId']))
        result = await ExportProject.main(res_data['sessionId'], res_data['projectObjectId'], 'SESSION')
        print(result)

        if result is not None:
            return web.json_response({"status": "Error", "message": result})

        import rasa.model as model
        from rasa.core.agent import Agent
        from rasa.core.tracker_store import MongoTrackerStore
        from rasa.core.domain import Domain
        from rasa.train import train_async
        from rasa.utils.endpoints import EndpointConfig

        base_path = CONFIG.get('api_gateway', 'SESSION_MODEL_PATH')
        config = "config.yml"
        training_files = "data/"
        domain = "domain.yml"
        output = "models/"

        endpoints = EndpointConfig(url="http://action_server:5055/webhook")

        base_path = base_path + res_data['sessionId'] + "/"

        config = base_path + config
        training_files = base_path + training_files
        domain = base_path + domain
        output = base_path + output
        start_time = time.time()
        try:
            model_path = await train_async(domain, config, [training_files], output, additional_arguments={"augmentation_factor": 10})
            end_time = time.time()
            print("it took this long to run: {}".format(end_time - start_time))
            unpacked = model.get_model(model_path)
            domain = Domain.load(domain)
            _tracker_store = MongoTrackerStore(domain=domain,
                                                host=CONFIG.get('api_gateway', 'MONGODB_URL'),
                                                db=CONFIG.get('api_gateway', 'MONGODB_NAME'),
                                                username=None,
                                                password=None,
                                                auth_source="admin",
                                                collection="conversations",
                                                event_broker=None)
            print("***************  Actions Endpoint as per data ********** {}".format(endpoints.url))
            self.agent = Agent.load(unpacked, tracker_store=_tracker_store, action_endpoint=endpoints)
            return web.json_response({"status": "Success", "message": "Ready to chat"})
            #await sio.emit('chatResponse', {"status": "Success", "message": "Ready to chat"}, namespace='/trynow', room=sid)
        except Exception as e:
            print("Exception while try Now ---  "+str(e))
            #await sio.emit('chatResponse', {"status": "Error", "message": repr(e)}, namespace='/trynow', room=sid)
            return web.json_response({"status": "Error", "message": repr(e)})
Example #16
0
File: run.py Project: yuanlida/rasa
def run(
    model: Text,
    endpoints: Text,
    connector: Text = None,
    credentials: Text = None,
    **kwargs: Dict
):
    """Runs a Rasa model.

    Args:
        model: Path to model archive.
        endpoints: Path to endpoints file.
        connector: Connector which should be use (overwrites `credentials`
        field).
        credentials: Path to channel credentials file.
        **kwargs: Additional arguments which are passed to
        `rasa.core.run.serve_application`.

    """
    import rasa.core.run
    import rasa.nlu.run
    from rasa.core.utils import AvailableEndpoints

    model_path = get_model(model)
    if not model_path:
        print_error(
            "No model found. Train a model before running the "
            "server using `rasa train`."
        )
        return

    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    if not connector and not credentials:
        connector = "rest"
        print_warning(
            "No chat connector configured, falling back to the "
            "REST input channel. To connect your bot to another channel, "
            "read the docs here: {}/user-guide/"
            "messaging-and-voice-channels".format(DOCS_BASE_URL)
        )

    kwargs = minimal_kwargs(kwargs, rasa.core.run.serve_application)
    rasa.core.run.serve_application(
        model,
        channel=connector,
        credentials=credentials,
        endpoints=_endpoints,
        **kwargs
    )

    shutil.rmtree(model_path)
Example #17
0
def run(model: Text,
        endpoints: Text,
        connector: Text = None,
        credentials: Text = None,
        **kwargs: Dict):
    """Runs a Rasa model.

    Args:
        model: Path to model archive.
        endpoints: Path to endpoints file.
        connector: Connector which should be use (overwrites `credentials`
        field).
        credentials: Path to channel credentials file.
        **kwargs: Additional arguments which are passed to
        `rasa.core.run.serve_application`.

    """
    import rasa.core.run
    import rasa.nlu.run
    from rasa.core.utils import AvailableEndpoints

    model_path = get_model(model)
    if not model_path:
        logger.error("No model found. Train a model before running the "
                     "server using `rasa train`.")
        return

    core_path, nlu_path = get_model_subdirectories(model_path)
    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    if not connector and not credentials:
        channel = "cmdline"
        logger.info("No chat connector configured, falling back to the "
                    "command line. Use `rasa configure channel` to connect"
                    "the bot to e.g. facebook messenger.")
    else:
        channel = connector

    if os.path.exists(core_path):
        kwargs = minimal_kwargs(kwargs, rasa.core.run.serve_application)
        rasa.core.run.serve_application(core_path,
                                        nlu_path,
                                        channel=channel,
                                        credentials_file=credentials,
                                        endpoints=_endpoints,
                                        **kwargs)

    # TODO: No core model was found, run only nlu server for now
    elif os.path.exists(nlu_path):
        rasa.nlu.run.run_cmdline(nlu_path)

    shutil.rmtree(model_path)
Example #18
0
def test_rasa_packaging(trained_model, project, use_fingerprint):
    unpacked_model_path = get_model(trained_model)

    os.remove(os.path.join(unpacked_model_path, FINGERPRINT_FILE_PATH))
    if use_fingerprint:
        fingerprint = model_fingerprint(**_project_files(project))
    else:
        fingerprint = None

    tempdir = tempfile.mkdtemp()
    output_path = os.path.join(tempdir, "test.tar.gz")

    create_package_rasa(unpacked_model_path, output_path, fingerprint)

    unpacked = get_model(output_path)

    assert (os.path.exists(os.path.join(
        unpacked, FINGERPRINT_FILE_PATH)) == use_fingerprint)
    assert os.path.exists(os.path.join(unpacked, "core"))
    assert os.path.exists(os.path.join(unpacked, "nlu"))

    assert not os.path.exists(unpacked_model_path)
Example #19
0
async def test_rasa_packaging(trained_rasa_model: Text, project: Text,
                              use_fingerprint: bool, tmp_path: Path):
    unpacked_model_path = get_model(trained_rasa_model)

    os.remove(os.path.join(unpacked_model_path, FINGERPRINT_FILE_PATH))
    if use_fingerprint:
        fingerprint = await model_fingerprint(_project_files(project))
    else:
        fingerprint = None

    output_path = str(tmp_path / "test.tar.gz")

    create_package_rasa(unpacked_model_path, output_path, fingerprint)

    unpacked = get_model(output_path)

    assert (os.path.exists(os.path.join(
        unpacked, FINGERPRINT_FILE_PATH)) == use_fingerprint)
    assert os.path.exists(
        os.path.join(unpacked, DEFAULT_CORE_SUBDIRECTORY_NAME))
    assert os.path.exists(os.path.join(unpacked, "nlu"))

    assert not os.path.exists(unpacked_model_path)
Example #20
0
def run(args: argparse.Namespace):
    import rasa.run

    args.model = _validate_model_path(args.model, "model", DEFAULT_MODELS_PATH)

    if not args.enable_api:
        # if the API is enabled you can start without a model as you can train a
        # model via the API once the server is up and running
        from rasa.model import get_model

        try:
            get_model(args.model)
        except ModelNotFound:
            print_error("No model found. Train a model before running the "
                        "server using `rasa train`.")
            return

    args.endpoints = get_validated_path(args.endpoints, "endpoints",
                                        DEFAULT_ENDPOINTS_PATH, True)
    args.credentials = get_validated_path(args.credentials, "credentials",
                                          DEFAULT_CREDENTIALS_PATH, True)

    rasa.run(**vars(args))
Example #21
0
async def test_rasa_packaging(trained_rasa_model, project, use_fingerprint):
    unpacked_model_path = get_model(trained_rasa_model)

    os.remove(os.path.join(unpacked_model_path, FINGERPRINT_FILE_PATH))
    if use_fingerprint:
        fingerprint = await model_fingerprint(_project_files(project))
    else:
        fingerprint = None

    tempdir = tempfile.mkdtemp()
    output_path = os.path.join(tempdir, "test.tar.gz")

    create_package_rasa(unpacked_model_path, output_path, fingerprint)

    unpacked = get_model(output_path)

    assert (
        os.path.exists(os.path.join(unpacked, FINGERPRINT_FILE_PATH)) == use_fingerprint
    )
    assert os.path.exists(os.path.join(unpacked, DEFAULT_CORE_SUBDIRECTORY_NAME))
    assert os.path.exists(os.path.join(unpacked, "nlu-en")) # bf
    assert os.path.exists(os.path.join(unpacked, "nlu-fr")) # bf

    assert not os.path.exists(unpacked_model_path)
def load_model():
    """Loads rasa nlu models

    Args:
        model_dir: The model directory of the loaded model

    Returns:
        Returns the agent responsible for parsing text
    """
    ROOT = "models/"
    MODELS = [
        "{}{}".format(ROOT, file) for file in os.listdir(ROOT) if "nlu" in file
    ]
    MODEL_PATH = sorted(MODELS, key=os.path.getctime, reverse=True)[0]
    get_interpreter = get_model(MODEL_PATH)

    return Agent.load(get_interpreter)
Example #23
0
def chat(
    model_path: Optional[Text] = None,
    agent: Optional["Agent"] = None,
    interpreter: Optional[NaturalLanguageInterpreter] = None,
) -> None:
    """Chat to the bot within a Jupyter notebook.

    Args:
        model_path: Path to a Rasa Stack model.
        agent: Rasa Core agent (used if no Rasa Stack model given).
        interpreter: Rasa NLU interpreter (used with Rasa Core agent if no
                     Rasa Stack model is given).
    """

    if model_path:
        from rasa.run import create_agent

        unpacked = model.get_model(model_path)
        agent = create_agent(unpacked)

    elif agent is not None and interpreter is not None:
        # HACK: this skips loading the interpreter and directly
        # sets it afterwards
        nlu_interpreter = RasaNLUInterpreter(
            "skip this and use given interpreter", lazy_init=True)
        nlu_interpreter.interpreter = interpreter
        agent.interpreter = interpreter
    else:
        print_error(
            "You either have to define a model path or an agent and an interpreter."
        )
        return

    print(
        "Your bot is ready to talk! Type your messages here or send '/stop'.")
    loop = asyncio.get_event_loop()
    while True:
        message = input()
        if message == "/stop":
            break

        responses = loop.run_until_complete(agent.handle_text(message))
        for response in responses:
            _display_bot_response(response)
Example #24
0
 def __init__(self, config, loop):
     """constructor"""
     super(RasaServiceLocal, self).__init__(config, loop)
     self.config = config
     self.subscribe_to = 'hermod/+/rasa/get_domain,hermod/+/rasa/set_slots' \
     + ',hermod/+/dialog/ended,hermod/+/dialog/init,hermod/+/nlu/externalparse,' \
     + 'hermod/+/nlu/parse,hermod/+/intent,hermod/+/intent,hermod/+/dialog/started'
     model_path = get_model(
         config['services']['RasaServiceLocal'].get('model_path'))
     endpoint = EndpointConfig(
         config['services']['RasaServiceLocal'].get('rasa_actions_url'))
     domain = 'domain.yml'
     self.tracker_store = InMemoryTrackerStore(domain)
     regex_interpreter = RegexInterpreter()
     self.text_interpreter = RasaNLUInterpreter(model_path + '/nlu')
     self.agent = Agent.load(model_path,
                             action_endpoint=endpoint,
                             tracker_store=self.tracker_store,
                             interpreter=regex_interpreter)
Example #25
0
def test_nlu(model: Optional[Text], nlu_data: Optional[Text], kwargs: Optional[Dict]):
    from rasa.nlu.test import run_evaluation

    unpacked_model = get_model(model)

    if unpacked_model is None:
        print_error(
            "Could not find any model. Use 'rasa train nlu' to train an NLU model."
        )
        return

    nlu_model = os.path.join(unpacked_model, "nlu")

    if os.path.exists(nlu_model):
        kwargs = minimal_kwargs(kwargs, run_evaluation, ["data_path", "model"])
        run_evaluation(nlu_data, nlu_model, **kwargs)
    else:
        print_error(
            "Could not find any model. Use 'rasa train nlu' to train an NLU model."
        )
Example #26
0
def run(model: Text,
        endpoints: Text,
        connector: Text = None,
        credentials: Text = None,
        **kwargs: Dict):
    """Runs a Rasa model.

    Args:
        model: Path to model archive.
        endpoints: Path to endpoints file.
        connector: Connector which should be use (overwrites `credentials`
        field).
        credentials: Path to channel credentials file.
        **kwargs: Additional arguments which are passed to
        `rasa_core.run.serve_application`.

    """
    import rasa_core.run
    from rasa_core.utils import AvailableEndpoints

    model_path = get_model(model)
    core_path, nlu_path = get_model_subdirectories(model_path)
    _endpoints = AvailableEndpoints.read_endpoints(endpoints)

    if not connector and not credentials:
        channel = "cmdline"
        logger.info("No chat connector configured, falling back to the "
                    "command line. Use `rasa configure channel` to connect"
                    "the bot to e.g. facebook messenger.")
    else:
        channel = connector

    kwargs = minimal_kwargs(kwargs, rasa_core.run.serve_application)
    rasa_core.run.serve_application(core_path,
                                    nlu_path,
                                    channel=channel,
                                    credentials_file=credentials,
                                    endpoints=_endpoints,
                                    **kwargs)
    shutil.rmtree(model_path)
Example #27
0
def test_core(
    model: Optional[Text] = None,
    stories: Optional[Text] = None,
    endpoints: Optional[Text] = None,
    output: Text = DEFAULT_RESULTS_PATH,
    kwargs: Optional[Dict] = None,
):
    import rasa.core.test
    import rasa.core.utils as core_utils
    from rasa.nlu import utils as nlu_utils
    from rasa.model import get_model
    from rasa.core.interpreter import NaturalLanguageInterpreter
    from rasa.core.agent import Agent

    _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints)

    if kwargs is None:
        kwargs = {}

    if output:
        nlu_utils.create_dir(output)

    loop = asyncio.get_event_loop()
    model_path = get_model(model)
    core_path, nlu_path = get_model_subdirectories(model_path)

    if os.path.exists(core_path) and os.path.exists(nlu_path):
        _interpreter = NaturalLanguageInterpreter.create(
            nlu_path, _endpoints.nlu)

        _agent = Agent.load(model_path, interpreter=_interpreter)

        kwargs = minimal_kwargs(kwargs, rasa.core.test, ["stories", "agent"])

        loop.run_until_complete(
            rasa.core.test(stories, _agent, out_directory=output, **kwargs))
    else:
        print_error(
            "Not able to test. Make sure both models - core and nlu - are available."
        )
Example #28
0
    def __init__(self, path_to_models: str, **kwargs) -> None:
        """
        Constructs RASA Agent as a DeepPavlov skill:
            read model folder,
            initialize rasa.core.agent.Agent and wrap it's interfaces

        Args:
            path_to_models: string path to folder with RASA models

        """
        # we need absolute path (expanded for user home and resolved if it relative path):
        self.path_to_models = Path(path_to_models).expanduser().resolve()

        model = get_validated_path(self.path_to_models, "model", DEFAULT_MODELS_PATH)

        model_path = get_model(model)
        if not model_path:
            # can not laod model path
            raise Exception("can not load model path: %s" % model)

        self._agent = Agent.load(model_path)
        self.ioloop = asyncio.new_event_loop()
        logger.info(f"path to RASA models is: `{self.path_to_models}`")
Example #29
0
def test_train_nlu(run_in_simple_project: Callable[..., RunResult]):
    run_in_simple_project(
        "train",
        "nlu",
        "-c",
        "config.yml",
        "--nlu",
        "data/nlu.md",
        "--out",
        "train_models",
    )

    assert os.path.exists("train_models")
    files = rasa.shared.utils.io.list_files("train_models")
    assert len(files) == 1
    assert os.path.basename(files[0]).startswith("nlu-")
    model_dir = model.get_model("train_models")
    assert model_dir is not None
    metadata = Metadata.load(os.path.join(model_dir, "nlu"))
    assert metadata.get("training_data") is None
    assert not os.path.exists(
        os.path.join(model_dir, "nlu",
                     training_data.DEFAULT_TRAINING_DATA_OUTPUT_PATH))
Example #30
0
def shell_nlu(args: argparse.Namespace):
    from rasa.cli.utils import get_validated_path
    from rasa.constants import DEFAULT_MODELS_PATH
    from rasa.model import get_model, get_model_subdirectories
    import rasa.nlu.run

    args.connector = "cmdline"

    model = get_validated_path(args.model, "model", DEFAULT_MODELS_PATH)
    model_path = get_model(model)
    if not model_path:
        print_error("No model found. Train a model before running the "
                    "server using `rasa train nlu`.")
        return

    _, nlu_model = get_model_subdirectories(model_path)

    if not os.path.exists(nlu_model):
        print_error("No NLU model found. Train a model before running the "
                    "server using `rasa train nlu`.")
        return

    rasa.nlu.run.run_cmdline(nlu_model)