Пример #1
0
def run_babi(serve_forever=True):
    agent = Agent.load("examples_weather/babi/models/policy/weather",
                       interpreter=
                           RasaNLUHttpInterpreter(model_name='model_20171013-084449', token=None,
                                                  server='http://localhost:7000'))

    if serve_forever:
        agent.handle_channel(ConsoleInputChannel())
    return agent
Пример #2
0
def test_http_interpreter():

    httpretty.register_uri(httpretty.GET, 'https://interpreter.com/parse')

    endpoint = EndpointConfig('https://interpreter.com')
    httpretty.enable()
    interpreter = RasaNLUHttpInterpreter(endpoint=endpoint)
    interpreter.parse(text='message_text', message_id='1134')

    query = httpretty.last_request.querystring
    httpretty.disable()
    response = {
        'project': ['default'],
        'q': ['message_text'],
        'message_id': ['1134']
    }

    assert query == response
Пример #3
0
def interpreter_from_args(
    nlu_model,  # type: Union[Text, NaturalLanguageInterpreter, None]
    nlu_endpoint  # type: Optional[EndpointConfig]
):
    # type: (...) -> Optional[NaturalLanguageInterpreter]
    """Create an interpreter from the commandline arguments.

    Depending on which values are passed for model and endpoint, this
    will create the corresponding interpreter (either loading the model
    locally or setting up an endpoint based interpreter)."""

    if isinstance(nlu_model, NaturalLanguageInterpreter):
        return nlu_model

    if nlu_model:
        name_parts = os.path.split(nlu_model)
    else:
        name_parts = []

    if len(name_parts) == 1:
        if nlu_endpoint:
            # using the default project name
            return RasaNLUHttpInterpreter(name_parts[0], nlu_endpoint)
        else:
            return NaturalLanguageInterpreter.create(nlu_model)
    elif len(name_parts) == 2:
        if nlu_endpoint:
            return RasaNLUHttpInterpreter(name_parts[1], nlu_endpoint,
                                          name_parts[0])
        else:
            return NaturalLanguageInterpreter.create(nlu_model)
    else:
        if nlu_endpoint:
            raise Exception("You have configured an endpoint to use for "
                            "the NLU model. To use it, you need to "
                            "specify the model to use with "
                            "`--nlu project/model`.")
        else:
            return NaturalLanguageInterpreter.create(nlu_model)
Пример #4
0
def run(channel = 'console'):
    nlu = "http://localhost:5000"
    interpreter = None
    if (channel == 'console'):
        interpreter = RasaNLUInterpreter(model_directory = nluModelDir)
    else:
        interpreter = RasaNLUHttpInterpreter(model_name = "default") #, nlu)

    agent = Agent.load(modelPath, interpreter = interpreter)
    if (channel == 'console'):
        agent.handle_channel(ConsoleInputChannel())
    else:
        input_channel = CustomWebChannel()
        http_channel = HttpInputChannel(4000, '/', input_channel)
        agent.handle_channel(http_channel)
Пример #5
0
def test_http_parsing():
    message = UserMessage('lunch?')
    httpretty.register_uri(httpretty.GET, 'https://interpreter.com/parse')

    endpoint = EndpointConfig('https://interpreter.com')
    httpretty.enable()
    inter = RasaNLUHttpInterpreter(endpoint=endpoint)
    try:
        MessageProcessor(inter, None, None, None, None)._parse_message(message)
    except KeyError:
        pass  # logger looks for intent and entities, so we except

    query = httpretty.last_request.querystring
    httpretty.disable()

    assert query['message_id'][0] == message.message_id
Пример #6
0
def start_server_local(dialogue_model_path, server_endpoints):
    """Start a server which serve a local model.

    Environment variables:
    ----------
    SOCKET_PORT:    int
                    Set the port, the server should listen on
                    (default: 5005)

    Parameters:
    ----------
    dialogue_model_path:    str
                            Path to the local model directiory
    server_endpoints:       AvailableEndpoints
                            tuple with the endpoints nlg, nlu, action and model
    """

    # Check SOCKET_PORT
    socket_port = int(
        os.environ['SOCKET_PORT']) if "SOCKET_PORT" in os.environ else 5005

    # define nlu-server
    rasaNLU = RasaNLUHttpInterpreter(project_name="damage_report_1.0.0",
                                     endpoint=server_endpoints.nlu)

    # initialize the agent
    agent = Agent.load(dialogue_model_path,
                       interpreter=rasaNLU,
                       action_endpoint=server_endpoints.action)

    # define all channels the server should listen to.
    # SocketIOInput - socketIO for the webchat
    # RestInput     - Rest-Api for other services
    channels = [
        SocketIOInput(
            # event name for messages sent from the user
            user_message_evt="user_uttered",
            # event name for messages sent from the bot
            bot_message_evt="bot_uttered",
            # socket.io namespace to use for the messages
            namespace=None),
        RestInput()
    ]

    # Start the server
    agent.handle_channels(channels, socket_port)
Пример #7
0
async def test_http_parsing():
    message = UserMessage('lunch?')

    endpoint = EndpointConfig('https://interpreter.com')
    with aioresponses() as mocked:
        mocked.post('https://interpreter.com/parse', repeat=True, status=200)

        inter = RasaNLUHttpInterpreter(endpoint=endpoint)
        try:
            await MessageProcessor(inter, None, None, None,
                                   None)._parse_message(message)
        except KeyError:
            pass  # logger looks for intent and entities, so we except

        r = latest_request(mocked, 'POST', "https://interpreter.com/parse")

        assert r
        assert json_of_latest_request(r)['message_id'] == message.message_id
Пример #8
0
def run_babi_online():
    training_data = 'data/weather.md'
    logger.info("Starting to train policy")
    agent = Agent("../weather_domain.yml",
                  policies=[MemoizationPolicy(),
                            WeatherPolicy()],
                  interpreter=RegexInterpreter())

    input_c = FileInputChannel(training_data,
                               message_line_pattern='^\s*\*\s(.*)$',
                               max_messages=10)
    agent.train_online(training_data, input_channel=input_c, epochs=10)

    agent.interpreter = RasaNLUHttpInterpreter(
        model_name='model_20171013-084449',
        token=None,
        server='http://localhost:7000')
    return agent
Пример #9
0
async def test_http_interpreter():
    with aioresponses() as mocked:
        mocked.post("https://example.com/parse")

        endpoint = EndpointConfig('https://example.com')
        interpreter = RasaNLUHttpInterpreter(endpoint=endpoint)
        await interpreter.parse(text='message_text', message_id='1134')

        r = latest_request(mocked, "POST", "https://example.com/parse")

        query = json_of_latest_request(r)
        response = {
            'project': 'default',
            'q': 'message_text',
            'message_id': '1134',
            'model': None,
            'token': None
        }

        assert query == response
Пример #10
0
def start_online_training(dialogue_model_path, server_endpoints):
    """Start a server in interactive mode.
        Parameters:
        ----------
        dialogue_model_path:    str
                                Path to the local model directiory
        server_endpoints:       AvailableEndpoints
                                tuple with the endpoints nlg, nlu, action and model
    """

    # define nlu-server
    rasaNLU = RasaNLUHttpInterpreter(project_name="damage_report_1.0.0",
                                     endpoint=server_endpoints.nlu)

    # initialize the agent
    agent = Agent.load(dialogue_model_path,
                       interpreter=rasaNLU,
                       action_endpoint=server_endpoints.action)

    # overwrite the original function with the custom one
    online._start_online_learning_io = _start_online_learning_io

    # Start online trainer
    run_online_learning(agent=agent)
Пример #11
0
import os
from os import environ as env
from gevent.pywsgi import WSGIServer

from server import create_app
from rasa_core import utils
from rasa_core.interpreter import RasaNLUHttpInterpreter

utils.configure_colored_logging("DEBUG")

user_input_dir = "/app/nlu/" + os.environ["RASA_NLU_PROJECT_NAME"] + "/user_input"
if not os.path.exists(user_input_dir):
    os.makedirs(user_input_dir)

nlu_interpreter = RasaNLUHttpInterpreter(
    model_name = env["RASA_NLU_MODEL_NAME"],
    token = env["RASA_NLU_SERVER_TOKEN"],
    server = env["RASA_NLU_SERVER_ADDRESS"],
    project_name = env["RASA_NLU_PROJECT_NAME"])

app = create_app(
    model_directory = env["RASA_CORE_MODEL_PATH"],
    cors_origins="*",
    loglevel = "DEBUG",
    logfile = "./logs/rasa_core.log",
    interpreter = nlu_interpreter)

http_server = WSGIServer(('0.0.0.0', 5005), app)
http_server.serve_forever()
from rasa_core.agent import Agent
from rasa_core.channels.channel import InputChannel
from rasa_core.channels.slack import SlackInput
from rasa_core.interpreter import RasaNLUHttpInterpreter
from rasa_core.utils import EndpointConfig
import yaml

# load your trained agent
MODEL_PATH = './models/dialogue'
action_endpoint = EndpointConfig(url="http://localhost:5055/webhook")
agent = Agent.load(MODEL_PATH,
                   interpreter=RasaNLUHttpInterpreter(
                       './models/nlu/default/weathernlu', action_endpoint),
                   action_endpoint=action_endpoint)

input_channel = SlackInput(
    "xoxb-510293626996-511519984071-kL4oH87tyMDYvyY0W04TmkuM", True)
# this is the `bot_user_o_auth_access_token`

# the name of your channel to which the bot posts (optional)

# set serve_forever=True if you want to keep the server running
s = agent.handle_channels([input_channel], serve_forever=True)
from rasa_core.server import RasaCoreServer
from rasa_core.server import create_argument_parser

logger = logging.getLogger(__name__)

if __name__ == '__main__':
    # Script to run Rasa NLU HTTP Interpreter
    arg_parser = create_argument_parser()
    cmdline_args = arg_parser.parse_args()

    logging.basicConfig(level=cmdline_args.loglevel)
    if cmdline_args.nlu == 'RasaNLUHttpInterpreter':
        logger.info("Creating RasaNLU HTTP Interpreter")
        #Fill in the modelname and server details.
        cmdline_args.nlu = RasaNLUHttpInterpreter(
            model_name=os.environ.get("rasa_nlu_model_name"),
            token="",
            server=os.environ.get("rasa_nlu_http_server"),
            project_name=os.environ.get("rasa_nlu_project_name"))

    rasa = RasaCoreServer(cmdline_args.core,
                          cmdline_args.nlu,
                          cmdline_args.loglevel,
                          cmdline_args.log_file,
                          cmdline_args.cors,
                          auth_token=cmdline_args.auth_token)

    logger.info("Starting Rasa Core http server on port %s" %
                cmdline_args.port)
    rasa.app.run("0.0.0.0", cmdline_args.port)