def serve_application(model_directory: Text, nlu_model: Optional[Text] = None, tracker_dump: Optional[Text] = None, port: int = constants.DEFAULT_SERVER_PORT, endpoints: Optional[Text] = None, enable_api: bool = True): from rasa_core import run _endpoints = AvailableEndpoints.read_endpoints(endpoints) nlu = NaturalLanguageInterpreter.create(nlu_model, _endpoints.nlu) input_channels = run.create_http_input_channels("cmdline", None) agent = load_agent(model_directory, interpreter=nlu, endpoints=_endpoints) http_server = run.start_server(input_channels, None, None, port=port, initial_agent=agent, enable_api=enable_api) tracker = load_tracker_from_json(tracker_dump, agent.domain) run.start_cmdline_io(constants.DEFAULT_SERVER_FORMAT.format(port), http_server.stop, sender_id=tracker.sender_id) replay_events(tracker, agent) try: http_server.serve_forever() except Exception as exc: logger.exception(exc)
async def load_agent_on_start(core_model, endpoints, nlu_model, app, loop): """Load an agent. Used to be scheduled on server start (hence the `app` and `loop` arguments).""" from rasa_core import broker from rasa_core.agent import Agent _interpreter = NaturalLanguageInterpreter.create(nlu_model, endpoints.nlu) _broker = broker.from_endpoint_config(endpoints.event_broker) _tracker_store = TrackerStore.find_tracker_store(None, endpoints.tracker_store, _broker) if endpoints and endpoints.model: from rasa_core import agent app.agent = Agent(interpreter=_interpreter, generator=endpoints.nlg, tracker_store=_tracker_store, action_endpoint=endpoints.action) await agent.load_from_server(app.agent, model_server=endpoints.model) else: app.agent = Agent.load(core_model, interpreter=_interpreter, generator=endpoints.nlg, tracker_store=_tracker_store, action_endpoint=endpoints.action) return app.agent
def run(core_dir, nlu_dir): _endpoints = AvailableEndpoints.read_endpoints('endpoints.yml') _interpreter = NaturalLanguageInterpreter.create(nlu_dir) input_channel = TelegramInput(access_token=os.getenv( 'TELEGRAM_ACCESS_TOKEN', ''), verify=os.getenv('VERIFY', ''), webhook_url=os.getenv('WEBHOOK_URL', '')) elastic_user = os.getenv('ELASTICSEARCH_USER') if elastic_user is None: _tracker_store = ElasticTrackerStore( domain=os.getenv('ELASTICSEARCH_URL', 'elasticsearch:9200')) else: _tracker_store = ElasticTrackerStore( domain=os.getenv('ELASTICSEARCH_URL', 'elasticsearch:9200'), user=os.getenv('ELASTICSEARCH_USER', 'user'), password=os.getenv('ELASTICSEARCH_PASSWORD', 'password'), scheme=os.getenv('ELASTICSEARCH_HTTP_SCHEME', 'http'), scheme_port=os.getenv('ELASTICSEARCH_PORT', '80')) _agent = load_agent(core_dir, interpreter=_interpreter, tracker_store=_tracker_store, endpoints=_endpoints) http_server = _agent.handle_channels([input_channel], 5001, "") try: http_server.serve_forever() except Exception as exc: logger.exception(exc)
def run(core_dir, nlu_dir): input_channel = SocketIOInput(user_message_evt="user_uttered", bot_message_evt="bot_uttered", session_persistence=True, namespace=None) _endpoints = AvailableEndpoints.read_endpoints('endpoints.yml') _interpreter = NaturalLanguageInterpreter.create(nlu_dir) elastic_user = os.getenv('ELASTICSEARCH_USER') if elastic_user is None: _tracker_store = ElasticTrackerStore( domain=os.getenv('ELASTICSEARCH_URL', 'elasticsearch:9200')) else: _tracker_store = ElasticTrackerStore( domain=os.getenv('ELASTICSEARCH_URL', 'elasticsearch:9200'), user=os.getenv('ELASTICSEARCH_USER', 'user'), password=os.getenv('ELASTICSEARCH_PASSWORD', 'password'), scheme=os.getenv('ELASTICSEARCH_HTTP_SCHEME', 'http'), scheme_port=os.getenv('ELASTICSEARCH_PORT', '80')) _agent = load_agent(core_dir, interpreter=_interpreter, tracker_store=_tracker_store, endpoints=_endpoints) WEBCHAT_PORT = os.getenv('WEBCHAT_PORT', 3000) http_server = start_server([input_channel], "", "", int(WEBCHAT_PORT), _agent) try: http_server.serve_forever() except Exception as exc: logger.exception(exc)
def __init__( self, domain=None, # type: Union[Text, Domain] policies=None, # type: Union[PolicyEnsemble, List[Policy], None] interpreter=None, # type: Optional[NaturalLanguageInterpreter] generator=None, # type: Union[EndpointConfig, NLG, None] tracker_store=None, # type: Optional[TrackerStore] action_endpoint=None, # type: Optional[EndpointConfig] fingerprint=None # type: Optional[Text] ): # Initializing variables with the passed parameters. self.domain = self._create_domain(domain) self.policy_ensemble = self._create_ensemble(policies) if not isinstance(interpreter, NaturalLanguageInterpreter): if interpreter is not None: logger.warning( "Passing a value for interpreter to an agent " "where the value is not an interpreter " "is deprecated. Construct the interpreter, before" "passing it to the agent, e.g. " "`interpreter = NaturalLanguageInterpreter.create(" "nlu)`.") interpreter = NaturalLanguageInterpreter.create(interpreter, None) self.interpreter = interpreter self.nlg = NaturalLanguageGenerator.create(generator, self.domain) self.tracker_store = self.create_tracker_store(tracker_store, self.domain) self.action_endpoint = action_endpoint self._set_fingerprint(fingerprint)
def __init__( self, domain=None, # type: Union[Text, Domain] policies=None, # type: Union[PolicyEnsemble, List[Policy], None] interpreter=None, # type: Optional[NaturalLanguageInterpreter] generator=None, # type: Union[EndpointConfig, NLG, None] tracker_store=None, # type: Optional[TrackerStore] action_endpoint=None, # type: Optional[EndpointConfig] fingerprint=None # type: Optional[Text] ): # Initializing variables with the passed parameters. self.domain = self._create_domain(domain) self.policy_ensemble = self._create_ensemble(policies) if not isinstance(interpreter, NaturalLanguageInterpreter): if interpreter is not None: logger.warning( "Passing a value for interpreter to an agent " "where the value is not an interpreter " "is deprecated. Construct the interpreter, before" "passing it to the agent, e.g. " "`interpreter = NaturalLanguageInterpreter.create(" "nlu)`.") interpreter = NaturalLanguageInterpreter.create(interpreter, None) self.interpreter = interpreter self.nlg = NaturalLanguageGenerator.create(generator, self.domain) self.tracker_store = self.create_tracker_store( tracker_store, self.domain) self.action_endpoint = action_endpoint self._set_fingerprint(fingerprint)
def load( cls, path, # type: Text interpreter=None, # type: Union[NLI, Text, None] tracker_store=None, # type: Optional[TrackerStore] action_factory=None # type: Optional[Text] ): # type: (Text, Any, Optional[TrackerStore]) -> Agent """Load a persisted model from the passed path.""" if path is None: raise ValueError("No domain path specified.") if os.path.isfile(path): raise ValueError("You are trying to load a MODEL from a file " "('{}'), which is not possible. \n" "The persisted path should be a directory " "containing the various model files. \n\n" "If you want to load training data instead of " "a model, use `agent.load_data(...)` " "instead.".format(path)) ensemble = PolicyEnsemble.load(path) domain = TemplateDomain.load(os.path.join(path, "domain.yml"), action_factory) # ensures the domain hasn't changed between test and train domain.compare_with_specification(path) _interpreter = NaturalLanguageInterpreter.create(interpreter) _tracker_store = cls.create_tracker_store(tracker_store, domain) return cls(domain, ensemble, _interpreter, _tracker_store)
def __init__( self, domain: Union[Text, Domain] = None, policies: Union[PolicyEnsemble, List[Policy], None] = None, interpreter: Optional[NaturalLanguageInterpreter] = None, generator: Union[EndpointConfig, 'NLG', None] = None, tracker_store: Optional['TrackerStore'] = None, action_endpoint: Optional[EndpointConfig] = None, fingerprint: Optional[Text] = None ): # Initializing variables with the passed parameters. self.domain = self._create_domain(domain) if self.domain: self.domain.add_requested_slot() self.policy_ensemble = self._create_ensemble(policies) if self._form_policy_not_present(): raise InvalidDomain( "You have defined a form action, but haven't added the " "FormPolicy to your policy ensemble." ) self.interpreter = NaturalLanguageInterpreter.create(interpreter) self.nlg = NaturalLanguageGenerator.create(generator, self.domain) self.tracker_store = self.create_tracker_store( tracker_store, self.domain) self.action_endpoint = action_endpoint self._set_fingerprint(fingerprint)
def load(cls, path, interpreter=None, tracker_store=None, action_factory=None, rules_file=None, generator=None, create_dispatcher=None): # type: (Text, Any, Optional[TrackerStore]) -> Agent if path is None: raise ValueError("No domain path specified.") domain = TemplateDomain.load(os.path.join(path, "domain.yml"), action_factory) # ensures the domain hasn't changed between test and train domain.compare_with_specification(path) ensemble = PolicyEnsemble.load(path) _interpreter = NaturalLanguageInterpreter.create(interpreter) _tracker_store = cls.create_tracker_store(tracker_store, domain) return cls(domain=domain, policies=ensemble, interpreter=_interpreter, tracker_store=_tracker_store, rules_file=rules_file, generator=generator, create_dispatcher=create_dispatcher)
def run(core_dir, nlu_dir): pika_broker = None if ENABLE_ANALYTICS: pika_broker = PikaProducer(url, username, password, queue=queue) configs = { 'user': os.getenv('ROCKETCHAT_BOT_USERNAME'), 'password': os.getenv('ROCKETCHAT_BOT_PASSWORD'), 'server_url': os.getenv('ROCKETCHAT_URL'), } input_channel = RocketChatInput(user=configs['user'], password=configs['password'], server_url=configs['server_url']) _tracker_store = InMemoryTrackerStore(domain=None, event_broker=pika_broker) _endpoints = AvailableEndpoints.read_endpoints(None) _interpreter = NaturalLanguageInterpreter.create(nlu_dir) _agent = load_agent(core_dir, interpreter=_interpreter, tracker_store=_tracker_store, endpoints=_endpoints) http_server = start_server([input_channel], "", "", 5005, _agent) try: http_server.serve_forever() except Exception as exc: logger.exception(exc)
def run(core_dir, nlu_dir, endpoints_file): configs = { 'user': os.getenv('ROCKETCHAT_BOT_USERNAME'), 'password': os.getenv('ROCKETCHAT_BOT_PASSWORD'), 'server_url': os.getenv('ROCKETCHAT_URL'), } input_channel = RocketChatInput(user=configs['user'], password=configs['password'], server_url=configs['server_url']) _endpoints = AvailableEndpoints.read_endpoints(endpoints_file) _interpreter = NaturalLanguageInterpreter.create(nlu_dir, _endpoints.nlu) _tracker_store = ElasticTrackerStore() _agent = load_agent(core_dir, interpreter=_interpreter, tracker_store=_tracker_store, endpoints=_endpoints) http_server = start_server([input_channel], "", "", 5005, _agent) try: http_server.serve_forever() except Exception as exc: logger.exception(exc)
def test_dialog(): agent = Agent.load(output_path, interpreter=NaturalLanguageInterpreter.create(None)) agent.handle_channel(ConsoleInputChannel()) return agent
def create_agent(): interpreter = NaturalLanguageInterpreter.create("models/nlu/current") from rasa_core.utils import EndpointConfig action_endpoint = EndpointConfig(url="http://localhost:5056/webhook") agent = Agent.load("models/dialog", interpreter=interpreter, action_endpoint=action_endpoint) return agent
def run(): endpoints = AvailableEndpoints.read_endpoints('config/endpoints.yml') interpreter = NaturalLanguageInterpreter.create('models/ticket/nlu_bot',endpoints.nlu) agent = load_agent("models/dialogue", interpreter=interpreter, endpoints=endpoints) serve_application(agent,channel='rest') # serve_application(agent) return agent
def load(cls, path, interpreter=None, tracker_store=None): # type: (Text, Any, Optional[TrackerStore]) -> Agent domain = TemplateDomain.load(os.path.join(path, "domain.yml")) # ensures the domain hasn't changed between test and train domain.compare_with_specification(path) featurizer = Featurizer.load(path) ensemble = PolicyEnsemble.load(path, featurizer) _interpreter = NaturalLanguageInterpreter.create(interpreter) _tracker_store = cls._create_tracker_store(tracker_store, domain) return cls(domain, ensemble, featurizer, _interpreter, _tracker_store)
def trainingBot(to_bot_queue, to_human_queue, base_model, output_model, nlu_model, training_data): utils.configure_colored_logging(loglevel="INFO") max_history = None interactive_learning_on = True channel = TrainingInputChannel(to_bot_queue, to_human_queue) preloaded_model = True if preloaded_model: agent = CustomAgent.load(base_model, NaturalLanguageInterpreter.create(nlu_model)) training_data = agent.load_data(training_data) agent.train_online_preloaded_model(training_data, input_channel=channel, model_path=output_model) else: agent = CustomAgent( "domain.yml", policies=[ MemoizationPolicy(max_history=max_history), KerasPolicy( MaxHistoryTrackerFeaturizer(BinarySingleStateFeaturizer(), max_history=max_history)), FallbackPolicy(fallback_action_name="utter_fallback", nlu_threshold=0.3) ]) training_data = agent.load_data(training_data) agent.interpreter = NaturalLanguageInterpreter.create(nlu_model) agent.train_online(training_data, input_channel=channel, model_path=output_model, augmentation_factor=50, epochs=250, batch_size=10, validation_split=0.2) agent.persist(output_model)
def interpreter_from_args( nlu_model, # type: Union[Text, NaturalLanguageInterpreter, None] nlu_endpoint # type: Optional[EndpointConfig] ): # type: (...) -> Optional[NaturalLanguageInterpreter] """Create an interpreter from the commandline arguments. Depending on which values are passed for model and endpoint, this will create the corresponding interpreter (either loading the model locally or setting up an endpoint based interpreter).""" if isinstance(nlu_model, NaturalLanguageInterpreter): return nlu_model if nlu_model: name_parts = os.path.split(nlu_model) else: name_parts = [] if len(name_parts) == 1: if nlu_endpoint: # using the default project name return RasaNLUHttpInterpreter(name_parts[0], nlu_endpoint) else: return NaturalLanguageInterpreter.create(nlu_model) elif len(name_parts) == 2: if nlu_endpoint: return RasaNLUHttpInterpreter(name_parts[1], nlu_endpoint, name_parts[0]) else: return NaturalLanguageInterpreter.create(nlu_model) else: if nlu_endpoint: raise Exception("You have configured an endpoint to use for " "the NLU model. To use it, you need to " "specify the model to use with " "`--nlu project/model`.") else: return NaturalLanguageInterpreter.create(nlu_model)
def __init__( self, domain, # type: Union[Text, Domain] policies=None, # type: Union[PolicyEnsemble, List[Policy], None] interpreter=None, # type: Union[NLI, Text, None] tracker_store=None # type: Optional[TrackerStore] ): self.domain = self._create_domain(domain) self.policy_ensemble = self._create_ensemble(policies) self.interpreter = NaturalLanguageInterpreter.create(interpreter) self.tracker_store = self.create_tracker_store(tracker_store, self.domain)
def __init__(self, domain, policies=None, featurizer=None, interpreter=None, tracker_store=None): self.domain = self._create_domain(domain) self.featurizer = self._create_featurizer(featurizer) self.policy_ensemble = self._create_ensemble(policies) self.interpreter = NaturalLanguageInterpreter.create(interpreter) self.tracker_store = self._create_tracker_store( tracker_store, self.domain)
async def train_agent_on_start(args, endpoints, additional_arguments, app, loop): _interpreter = NaturalLanguageInterpreter.create(args.get("nlu"), endpoints.nlu) model_directory = args.get("out", tempfile.mkdtemp(suffix="_core_model")) _agent = await train(args.get("domain"), args.get("stories"), model_directory, _interpreter, endpoints, args.get("dump_stories"), args.get("config")[0], None, additional_arguments) app.agent = _agent
def interpreter_from_args( nlu_model, # type: Union[Text, NaturalLanguageInterpreter, None] nlu_endpoint # type: Optional[EndpointConfig] ): # type: (...) -> Optional[NaturalLanguageInterpreter] """Create an interpreter from the commandline arguments. Depending on which values are passed for model and endpoint, this will create the corresponding interpreter (either loading the model locally or setting up an endpoint based interpreter).""" if isinstance(nlu_model, NaturalLanguageInterpreter): return nlu_model if nlu_model: name_parts = os.path.split(nlu_model) else: name_parts = [] if len(name_parts) == 1: if nlu_endpoint: # using the default project name return RasaNLUHttpInterpreter(name_parts[0], nlu_endpoint) else: return NaturalLanguageInterpreter.create(nlu_model) elif len(name_parts) == 2: if nlu_endpoint: return RasaNLUHttpInterpreter(name_parts[1], nlu_endpoint, name_parts[0]) else: return NaturalLanguageInterpreter.create(nlu_model) else: if nlu_endpoint: raise Exception("You have configured an endpoint to use for " "the NLU model. To use it, you need to " "specify the model to use with " "`--nlu project/model`.") else: return NaturalLanguageInterpreter.create(nlu_model)
def main(): from rasa_core.agent import Agent from rasa_core.interpreter import NaturalLanguageInterpreter from rasa_core.utils import (AvailableEndpoints, set_default_subparser) import rasa_nlu.utils as nlu_utils import rasa_core.cli from rasa_core import utils loop = asyncio.get_event_loop() # Running as standalone python application arg_parser = create_argument_parser() set_default_subparser(arg_parser, 'default') cmdline_arguments = arg_parser.parse_args() logging.basicConfig(level=cmdline_arguments.loglevel) _endpoints = AvailableEndpoints.read_endpoints(cmdline_arguments.endpoints) if cmdline_arguments.output: nlu_utils.create_dir(cmdline_arguments.output) if not cmdline_arguments.core: raise ValueError("you must provide a core model directory to evaluate " "using -d / --core") if cmdline_arguments.mode == 'default': _interpreter = NaturalLanguageInterpreter.create( cmdline_arguments.nlu, _endpoints.nlu) _agent = Agent.load(cmdline_arguments.core, interpreter=_interpreter) stories = loop.run_until_complete( rasa_core.cli.train.stories_from_cli_args(cmdline_arguments)) loop.run_until_complete( test(stories, _agent, cmdline_arguments.max_stories, cmdline_arguments.output, cmdline_arguments.fail_on_prediction_errors, cmdline_arguments.e2e)) elif cmdline_arguments.mode == 'compare': compare(cmdline_arguments.core, cmdline_arguments.stories, cmdline_arguments.output) story_n_path = os.path.join(cmdline_arguments.core, 'num_stories.json') number_of_stories = utils.read_json_file(story_n_path) plot_curve(cmdline_arguments.output, number_of_stories) logger.info("Finished evaluation")
def __init__( self, domain, # type: Union[Text, Domain] policies=None, # type: Union[PolicyEnsemble, List[Policy], None] interpreter=None, # type: Union[NLI, Text, None] generator=None, # type: Union[EndpointConfig, NLG] tracker_store=None # type: Optional[TrackerStore] ): # Initializing variables with the passed parameters. self.domain = self._create_domain(domain) self.policy_ensemble = self._create_ensemble(policies) self.interpreter = NaturalLanguageInterpreter.create(interpreter) self.nlg = NaturalLanguageGenerator.create(generator, self.domain) self.tracker_store = self.create_tracker_store( tracker_store, self.domain)
def __init__( self, domain, # type: Union[Text, Domain] policies=None, # type: Union[PolicyEnsemble, List[Policy], None] interpreter=None, # type: Union[NLI, Text, None] generator=None, # type: Union[EndpointConfig, NLG] tracker_store=None # type: Optional[TrackerStore] ): # Initializing variables with the passed parameters. self.domain = self._create_domain(domain) self.policy_ensemble = self._create_ensemble(policies) self.interpreter = NaturalLanguageInterpreter.create(interpreter) self.nlg = NaturalLanguageGenerator.create(generator, self.domain) self.tracker_store = self.create_tracker_store(tracker_store, self.domain)
def run(core_dir, nlu_dir): _endpoints = AvailableEndpoints.read_endpoints('endpoints.yml') _interpreter = NaturalLanguageInterpreter.create(nlu_dir) input_channel = FacebookInput( fb_verify=VERIFY, # you need tell facebook this token, to confirm your URL fb_secret=SECRET, # your app secret fb_access_token=FACEBOOK_ACCESS_TOKEN # token for the page you subscribed to ) _agent = load_agent(core_dir, interpreter=_interpreter, endpoints=_endpoints) _agent.handle_channels([input_channel], 5001, serve_forever=True)
def test_core(model: Text, stories: Text, endpoints: Text = None, output: Text = DEFAULT_RESULTS_PATH, model_path: Text = None, **kwargs: Dict): import rasa_core.test import rasa_core.utils as core_utils from rasa_nlu import utils as nlu_utils from rasa.model import get_model from rasa_core.interpreter import NaturalLanguageInterpreter from rasa_core.agent import Agent _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints) if output: nlu_utils.create_dir(output) if os.path.isfile(model): model_path = get_model(model) if model_path: # Single model: Normal evaluation loop = asyncio.get_event_loop() model_path = get_model(model) core_path, nlu_path = get_model_subdirectories(model_path) _interpreter = NaturalLanguageInterpreter.create( nlu_path, _endpoints.nlu) _agent = Agent.load(core_path, interpreter=_interpreter) kwargs = minimal_kwargs(kwargs, rasa_core.test) loop.run_until_complete( rasa_core.test(stories, _agent, out_directory=output, **kwargs)) else: from rasa_core.test import compare, plot_curve compare(model, stories, output) story_n_path = os.path.join(model, 'num_stories.json') number_of_stories = core_utils.read_json_file(story_n_path) plot_curve(output, number_of_stories)
def update_model(self, domain: Union[Text, Domain], policy_ensemble: PolicyEnsemble, fingerprint: Optional[Text], interpreter: Optional[NaturalLanguageInterpreter] = None ) -> None: self.domain = domain self.policy_ensemble = policy_ensemble if interpreter: self.interpreter = NaturalLanguageInterpreter.create(interpreter) self._set_fingerprint(fingerprint) # update domain on all instances self.tracker_store.domain = domain if hasattr(self.nlg, "templates"): self.nlg.templates = domain.templates or []
def load_assistant(): messages = [ "Hi! you can chat in this window. Type 'stop' to end the conversation." ] interpreter = NaturalLanguageInterpreter.create( './models/nlu/default/con_nlu') endpoint = EndpointConfig('http://localhost:5055/webhook') agent = Agent.load('./models/dialogue', interpreter=interpreter, action_endpoint=endpoint) print("Your bot is ready to talk! Type your messages here or send 'stop'") while True: a = input() if a == 'stop': break responses = agent.handle_text(a) for response in responses: print(response["text"])
def run_evaluation(file_to_evaluate, fail_on_prediction_errors=False, max_stories=None, use_e2e=False): _endpoints = AvailableEndpoints.read_endpoints(None) _interpreter = NaturalLanguageInterpreter.create(NLU_DIR) _agent = load_agent(CORE_DIR, interpreter=_interpreter, endpoints=_endpoints) completed_trackers = _generate_trackers(file_to_evaluate, _agent, max_stories, use_e2e) story_evaluation, _ = collect_story_predictions(completed_trackers, _agent, fail_on_prediction_errors, use_e2e) _failed_stories = story_evaluation.failed_stories _num_stories = len(completed_trackers) _file_result = FileResult(num_stories=_num_stories, num_failed_stories=len(_failed_stories)) file_message = "EVALUATING STORIES FOR FILE '{}':".format(file_to_evaluate) utils.print_color('\n' + '#' * 80, BOLD_COLOR) utils.print_color(file_message, BOLD_COLOR) files_results[file_to_evaluate] = _file_result if len(_failed_stories) == 0: success_message = "The stories have passed for file '{}'!!" \ .format(file_to_evaluate) utils.print_color('\n' + '=' * len(success_message), BLUE_COLOR) utils.print_color(success_message, BLUE_COLOR) utils.print_color('=' * len(success_message), BLUE_COLOR) else: for failed_story in _failed_stories: process_failed_story(failed_story.export_stories()) story_name = re.search('## (.*)', failed_story.export_stories()).group(1) all_failed_stories.append(file_to_evaluate + ' - ' + story_name) utils.print_color('#' * 80 + '\n', BOLD_COLOR)
def run(core_dir, nlu_dir): _endpoints = AvailableEndpoints.read_endpoints('endpoints.yml') _interpreter = NaturalLanguageInterpreter.create(nlu_dir) input_channel = TelegramInput(access_token=os.getenv( 'TELEGRAM_ACCESS_TOKEN', ''), verify=os.getenv('VERIFY', ''), webhook_url=os.getenv('WEBHOOK_URL', '')) _agent = load_agent(core_dir, interpreter=_interpreter, endpoints=_endpoints) http_server = _agent.handle_channels([input_channel], 5001, "") try: http_server.serve_forever() except Exception as exc: logger.exception(exc)
def loadAgent(path, interpreter=None, tracker_store=None, action_factory=None, core_server=None): # type: (Text, Any, Optional[TrackerStore]) -> Agent if path is None: raise ValueError("No domain path specified.") domain = SnipsDomain.load(os.path.join(path, "domain.yml"), action_factory, core_server) # ensures the domain hasn't changed between test and train domain.compare_with_specification(path) featurizer = Featurizer.load(path) ensemble = PolicyEnsemble.load(path, featurizer) _interpreter = NaturalLanguageInterpreter.create(interpreter) _tracker_store = SnipsMqttAgent.create_tracker_store( tracker_store, domain) print("CREATED SNIPS AGENT") return SnipsMqttAgent(domain, ensemble, featurizer, _interpreter, _tracker_store)
def __init__(self, domain: Union[Text, Domain] = None, policies: Union[PolicyEnsemble, List[Policy], None] = None, interpreter: Optional[NaturalLanguageInterpreter] = None, generator: Union[EndpointConfig, 'NLG', None] = None, tracker_store: Optional['TrackerStore'] = None, action_endpoint: Optional[EndpointConfig] = None, fingerprint: Optional[Text] = None): # Initializing variables with the passed parameters. # if domain points to a folder, try to list and merge domain files if os.path.isdir(domain): for file in os.listdir(domain): filepath = domain + file logger.info("Loading domain {}...".format(filepath)) if file.endswith(".yml"): loaded_domain = self._create_domain(filepath) if not hasattr(self, 'domain') or self.domain is None: self.domain = loaded_domain else: logger.info("Merging domain {}...".format(filepath)) self.domain = self.domain.merge(loaded_domain) else: self.domain = self._create_domain(domain) if self.domain: self.domain.add_requested_slot() self.policy_ensemble = self._create_ensemble(policies) if self._form_policy_not_present(): raise InvalidDomain( "You have defined a form action, but haven't added the " "FormPolicy to your policy ensemble.") self.interpreter = NaturalLanguageInterpreter.create(interpreter) self.nlg = NaturalLanguageGenerator.create(generator, self.domain) self.tracker_store = self.create_tracker_store(tracker_store, self.domain) self.action_endpoint = action_endpoint self._set_fingerprint(fingerprint)
def run(core_dir, nlu_dir): configs = { 'user': os.getenv('ROCKETCHAT_BOT_USERNAME'), 'password': os.getenv('ROCKETCHAT_BOT_PASSWORD'), 'server_url': os.getenv('ROCKETCHAT_URL'), } input_channel = RocketChatInput( user=configs['user'], password=configs['password'], server_url=configs['server_url'] ) _endpoints = AvailableEndpoints.read_endpoints(None) _interpreter = NaturalLanguageInterpreter.create(nlu_dir) elastic_user = os.getenv('ELASTICSEARCH_USER') if elastic_user is None: _tracker_store = ElasticTrackerStore( domain = os.getenv('ELASTICSEARCH_URL', 'elasticsearch:9200') ) else: _tracker_store = ElasticTrackerStore( domain = os.getenv('ELASTICSEARCH_URL', 'elasticsearch:9200'), user = os.getenv('ELASTICSEARCH_USER', 'user'), password = os.getenv('ELASTICSEARCH_PASSWORD', 'password'), scheme = os.getenv('ELASTICSEARCH_HTTP_SCHEME', 'http'), scheme_port = os.getenv('ELASTICSEARCH_PORT', '80') ) _agent = load_agent(core_dir, interpreter=_interpreter, tracker_store=_tracker_store, endpoints=_endpoints) http_server = start_server([input_channel], "", "", 5005, _agent) try: http_server.serve_forever() except Exception as exc: logger.exception(exc)
def serve_application(model_directory, # type: Text nlu_model=None, # type: Optional[Text] tracker_dump=None, # type: Optional[Text] port=constants.DEFAULT_SERVER_PORT, # type: int endpoints=None, # type: Optional[Text] enable_api=True # type: bool ): from rasa_core import run _endpoints = run.read_endpoints(endpoints) nlu = NaturalLanguageInterpreter.create(nlu_model, _endpoints.nlu) input_channels = run.create_http_input_channels("cmdline", None) agent = load_agent(model_directory, interpreter=nlu, endpoints=_endpoints) http_server = run.start_server(input_channels, None, None, port=port, initial_agent=agent, enable_api=enable_api) tracker = load_tracker_from_json(tracker_dump, agent.domain) run.start_cmdline_io(constants.DEFAULT_SERVER_URL, http_server.stop, sender_id=tracker.sender_id) replay_events(tracker, agent) try: http_server.serve_forever() except Exception as exc: logger.exception(exc)
def _create_interpreter( interp # type: Union[Text, NLI, None] ): # type: (...) -> NLI return NaturalLanguageInterpreter.create(interp)
arg_parser = run.create_argument_parser() cmdline_args = arg_parser.parse_args() logging.getLogger('werkzeug').setLevel(logging.WARN) logging.getLogger('matplotlib').setLevel(logging.WARN) utils.configure_colored_logging(cmdline_args.loglevel) utils.configure_file_logging(cmdline_args.loglevel, cmdline_args.log_file) logger.warning("USING `rasa_core.server` is deprecated and will be " "removed in the future. Use `rasa_core.run --enable_api` " "instead.") logger.info("Rasa process starting") _endpoints = run.read_endpoints(cmdline_args.endpoints) _interpreter = NaturalLanguageInterpreter.create(cmdline_args.nlu, _endpoints.nlu) _agent = run.load_agent(cmdline_args.core, interpreter=_interpreter, endpoints=_endpoints) run.serve_application(_agent, cmdline_args.connector, cmdline_args.port, cmdline_args.credentials, cmdline_args.cors, cmdline_args.auth_token, cmdline_args.enable_api)