def run(serve_forever=True): AvailableEndpoints = namedtuple('AvailableEndpoints', 'nlg ' 'nlu ' 'action ' 'model') _endpoints = EndpointConfig(url="http://localhost:5055/webhook") try: endpoints = AvailableEndpoints(action=_endpoints, nlg=None, nlu=None, model=None) _interpreter = RasaNLUInterpreter("models/current/nlu/") _agent = load_agent("models/current/dialogue", interpreter=_interpreter, endpoints=endpoints) serve_application( _agent, "cmdline", constants.DEFAULT_SERVER_PORT, ) except: raise Exception("Failed to run")
def run_dialogue(): interpreter = RasaNLUInterpreter('./models/chatbotnlu/default/chatbotnlu') #action_endpoint=EndpointConfig(url='http://localhost:5055/webhook') agent = Agent.load('./models/current/dialogue', interpreter=interpreter) serve_application(agent, channel='cmdline') #do_interactive_learning(cmdline_args=) return agent
def run(serve_forever=True): interpreter = RasaNLUInterpreter("models/nlu/default/current") agent = Agent.load("models/dialogue", interpreter=interpreter) if serve_forever: serve_application(agent,channel='cmdline') return agent
def run(): endpoints = AvailableEndpoints.read_endpoints('config/endpoints.yml') interpreter = NaturalLanguageInterpreter.create('models/ticket/nlu_bot',endpoints.nlu) agent = load_agent("models/dialogue", interpreter=interpreter, endpoints=endpoints) serve_application(agent,channel='rest') # serve_application(agent) return agent
def run_malu_bot(serve_forever=True): interpreter = RasaNLUInterpreter( './models/nlu/default/malu') #carrega o modelo de nlu action_endpoint = EndpointConfig(url="http://localhost:5055/webhook") agent = Agent.load('./models/dialogue', interpreter=interpreter, action_endpoint=action_endpoint) #carregar um agente serve_application(agent, channel='cmdline') return agent
def run_cmd(core="models/current/dialogue", nlu="models/current/nlu", endpoint="endpoints.yml"): from rasa_core import run _endpoints = run.AvailableEndpoints.read_endpoints(endpoint) _interpreter = run.NaturalLanguageInterpreter.create(nlu, _endpoints.nlu) _agent = run.load_agent(core_model=core, interpreter=_interpreter, endpoints=_endpoints) run.serve_application(_agent)
def run_bot(serve_forever=True): # Configure the interpreter interpreter = RasaNLUInterpreter('models/nlu/default/chat') # Configure the webhook for custom actions action_endpoint = EndpointConfig(url="http://localhost:5055/webhook") # Configure the agent agent = Agent.load('models/dialogue', interpreter=interpreter, action_endpoint=action_endpoint) # Run the bot in command line mode run.serve_application(agent, channel='cmdline') return agent
def start_core(platform_token): from rasa_core.utils import AvailableEndpoints _endpoints = AvailableEndpoints( # TODO: make endpoints more configurable, esp ports model=EndpointConfig( "http://localhost:5002" "/api/projects/default/models/tags/production", token=platform_token, wait_time_between_pulls=1), event_broker=EndpointConfig(**{"type": "file"}), nlg=EndpointConfig("http://localhost:5002" "/api/nlg", token=platform_token)) from rasa_core import broker _broker = broker.from_endpoint_config(_endpoints.event_broker) from rasa_core.tracker_store import TrackerStore _tracker_store = TrackerStore.find_tracker_store(None, _endpoints.tracker_store, _broker) from rasa_core.run import load_agent _agent = load_agent("models", interpreter=None, tracker_store=_tracker_store, endpoints=_endpoints) from rasa_core.run import serve_application print_success("About to start core") serve_application( _agent, "rasa", 5005, "credentials.yml", "*", None, # TODO: configure auth token True)
def start_core(platform_token): from rasa_core.utils import AvailableEndpoints from rasa_core.run import serve_application from rasa_core.utils import EndpointConfig _endpoints = AvailableEndpoints( # TODO: make endpoints more configurable, esp ports model=EndpointConfig("http://localhost:5002" "/api/projects/default/models/tags/production", token=platform_token, wait_time_between_pulls=1), event_broker=EndpointConfig(**{"type": "file"}), nlg=EndpointConfig("http://localhost:5002" "/api/nlg", token=platform_token)) serve_application("models", nlu_model=None, channel="rasa", credentials_file="credentials.yml", cors="*", auth_token=None, # TODO: configure auth token enable_api=True, endpoints=_endpoints)
if __name__ == '__main__': # Running as standalone python application from rasa_core import run arg_parser = run.create_argument_parser() cmdline_args = arg_parser.parse_args() logging.getLogger('werkzeug').setLevel(logging.WARN) logging.getLogger('matplotlib').setLevel(logging.WARN) utils.configure_colored_logging(cmdline_args.loglevel) utils.configure_file_logging(cmdline_args.loglevel, cmdline_args.log_file) logger.warning("USING `rasa_core.server` is deprecated and will be " "removed in the future. Use `rasa_core.run --enable_api` " "instead.") logger.info("Rasa process starting") _endpoints = AvailableEndpoints.read_endpoints(cmdline_args.endpoints) _interpreter = NaturalLanguageInterpreter.create(cmdline_args.nlu, _endpoints.nlu) _agent = run.load_agent(cmdline_args.core, interpreter=_interpreter, endpoints=_endpoints) run.serve_application(_agent, cmdline_args.connector, cmdline_args.port, cmdline_args.credentials, cmdline_args.cors, cmdline_args.auth_token, cmdline_args.enable_api, cmdline_args.jwt_secret, cmdline_args.jwt_method)
arg_parser = run.create_argument_parser() cmdline_args = arg_parser.parse_args() logging.getLogger('werkzeug').setLevel(logging.WARN) logging.getLogger('matplotlib').setLevel(logging.WARN) utils.configure_colored_logging(cmdline_args.loglevel) utils.configure_file_logging(cmdline_args.loglevel, cmdline_args.log_file) logger.warning("USING `rasa_core.server` is deprecated and will be " "removed in the future. Use `rasa_core.run --enable_api` " "instead.") logger.info("Rasa process starting") _endpoints = run.read_endpoints(cmdline_args.endpoints) _interpreter = NaturalLanguageInterpreter.create(cmdline_args.nlu, _endpoints.nlu) _agent = run.load_agent(cmdline_args.core, interpreter=_interpreter, endpoints=_endpoints) run.serve_application(_agent, cmdline_args.connector, cmdline_args.port, cmdline_args.credentials, cmdline_args.cors, cmdline_args.auth_token, cmdline_args.enable_api)
if not cmdline_args.credentials: logger.info("Fetching credentials from server") url = "{}/project/{}/{}".format(bf_url, project_id, "credentials") try: cmdline_args.credentials = bf_utils.load_from_remote( EndpointConfig(url=url), "credentials") except Exception as e: print(e) raise ValueError( 'No credentials found for project {}.'.format(project_id)) _endpoints = MoreAvailableEndpoints.read_endpoints(cmdline_args.endpoints) _broker = PikaProducer.from_endpoint_config(_endpoints.event_broker) _tracker_store = TrackerStore.find_tracker_store(None, _endpoints.tracker_store, _broker) _agent = load_agent(cmdline_args.core, nlu_models=None, project_id=_endpoints.nlu.kwargs.get('project'), tracker_store=_tracker_store, endpoints=_endpoints, rules=cmdline_args.rules) run.serve_application(_agent, cmdline_args.connector, cmdline_args.port, cmdline_args.credentials, cmdline_args.cors, os.environ.get('AUTH_TOKEN', None), cmdline_args.enable_api, cmdline_args.jwt_secret, cmdline_args.jwt_method)
from rasa_core.channels.slack import SlackInput from rasa_core.agent import Agent from rasa_core.interpreter import RasaNLUInterpreter import yaml from rasa_core.utils import EndpointConfig from rasa_core.run import serve_application nlu_interpreter = RasaNLUInterpreter('./models/nlu/default/movie') action_endpoint = EndpointConfig(url="http://localhost:5055/webhook") agent = Agent.load('./models/dialogue', interpreter=nlu_interpreter, action_endpoint=action_endpoint) serve_application(agent, port=3000, cors='*')
def run_bot(serve_forever=True): interpreter = RasaNLUInterpreter('./models/nlu/default/chat') agent = Agent.load('./models/dialogue', interpreter=interpreter) run.serve_application(agent, channel='cmdline') return agent