def run(model: Text, endpoints: Text, connector: Text = None, credentials: Text = None, **kwargs: Dict): """Runs a Rasa model. Args: model: Path to model archive. endpoints: Path to endpoints file. connector: Connector which should be use (overwrites `credentials` field). credentials: Path to channel credentials file. **kwargs: Additional arguments which are passed to `rasa_core.run.serve_application`. """ import rasa_core.run model_path = get_model(model) _agent = create_agent(model_path, endpoints) if not connector and not credentials: channel = "cmdline" logger.info("No chat connector configured, falling back to the " "command line. Use `rasa configure channel` to connect" "the bot to e.g. facebook messenger.") else: channel = connector kwargs = minimal_kwargs(kwargs, rasa_core.run.serve_application) rasa_core.run.serve_application(_agent, channel=channel, credentials_file=credentials, **kwargs) shutil.rmtree(model_path)
def perform_nlu_cross_validation(config: Text, nlu: Text, kwargs: Optional[Dict[Text, Any]]): import rasa.nlu.config from rasa.nlu.test import ( drop_intents_below_freq, cross_validate, return_results, return_entity_results, ) kwargs = kwargs or {} folds = int(kwargs.get("folds", 3)) nlu_config = rasa.nlu.config.load(config) data = rasa.nlu.training_data.load_data(nlu) data = drop_intents_below_freq(data, cutoff=folds) kwargs = minimal_kwargs(kwargs, cross_validate) results, entity_results = cross_validate(data, folds, nlu_config, **kwargs) logger.info("CV evaluation (n={})".format(folds)) if any(results): logger.info("Intent evaluation results") return_results(results.train, "train") return_results(results.test, "test") if any(entity_results): logger.info("Entity evaluation results") return_entity_results(entity_results.train, "train") return_entity_results(entity_results.test, "test")
def test_nlu(model: Text, nlu_data: Text, **kwargs: Dict): from rasa_nlu.test import run_evaluation unpacked_model = get_model(model) nlu_model = os.path.join(unpacked_model, "nlu") kwargs = minimal_kwargs(kwargs, run_evaluation) run_evaluation(nlu_data, nlu_model, **kwargs)
def test_core( model: Optional[Text] = None, stories: Optional[Text] = None, endpoints: Optional[Text] = None, output: Text = DEFAULT_RESULTS_PATH, kwargs: Optional[Dict] = None, ): import rasa.core.test import rasa.core.utils as core_utils from rasa.nlu import utils as nlu_utils from rasa.model import get_model from rasa.core.interpreter import NaturalLanguageInterpreter from rasa.core.agent import Agent _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints) if kwargs is None: kwargs = {} if output: nlu_utils.create_dir(output) unpacked_model = get_model(model) if unpacked_model is None: print_error( "Unable to test: could not find a model. Use 'rasa train' to train a " "Rasa model." ) return core_path, nlu_path = get_model_subdirectories(unpacked_model) if not os.path.exists(core_path): print_error( "Unable to test: could not find a Core model. Use 'rasa train' to " "train a model." ) use_e2e = kwargs["e2e"] if "e2e" in kwargs else False _interpreter = RegexInterpreter() if use_e2e: if os.path.exists(nlu_path): _interpreter = NaturalLanguageInterpreter.create(nlu_path, _endpoints.nlu) else: print_warning( "No NLU model found. Using default 'RegexInterpreter' for end-to-end " "evaluation." ) _agent = Agent.load(unpacked_model, interpreter=_interpreter) kwargs = minimal_kwargs(kwargs, rasa.core.test, ["stories", "agent"]) loop = asyncio.get_event_loop() loop.run_until_complete( rasa.core.test(stories, _agent, out_directory=output, **kwargs) )
def test_nlu(model: Optional[Text], nlu_data: Optional[Text], kwargs: Optional[Dict]): from rasa.nlu.test import run_evaluation unpacked_model = get_model(model) nlu_model = os.path.join(unpacked_model, "nlu") if os.path.exists(nlu_model): kwargs = minimal_kwargs(kwargs, run_evaluation, ["data_path", "model"]) run_evaluation(nlu_data, nlu_model, **kwargs)
def test_core( model: Optional[Text] = None, stories: Optional[Text] = None, endpoints: Optional[Text] = None, output: Text = DEFAULT_RESULTS_PATH, model_path: Optional[Text] = None, kwargs: Optional[Dict] = None, ): import rasa.core.test import rasa.core.utils as core_utils from rasa.nlu import utils as nlu_utils from rasa.model import get_model from rasa.core.interpreter import NaturalLanguageInterpreter from rasa.core.agent import Agent _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints) if kwargs is None: kwargs = {} if output: nlu_utils.create_dir(output) if os.path.isfile(model): model_path = get_model(model) if model_path: # Single model: Normal evaluation loop = asyncio.get_event_loop() model_path = get_model(model) core_path, nlu_path = get_model_subdirectories(model_path) if os.path.exists(core_path) and os.path.exists(nlu_path): _interpreter = NaturalLanguageInterpreter.create(nlu_path, _endpoints.nlu) _agent = Agent.load(core_path, interpreter=_interpreter) kwargs = minimal_kwargs(kwargs, rasa.core.test, ["stories", "agent"]) loop.run_until_complete( rasa.core.test(stories, _agent, out_directory=output, **kwargs) ) else: logger.warning( "Not able to test. Make sure both models, core and " "nlu, are available." ) else: from rasa.core.test import compare, plot_curve compare(model, stories, output) story_n_path = os.path.join(model, "num_stories.json") number_of_stories = core_utils.read_json_file(story_n_path) plot_curve(output, number_of_stories)
def run(model: Text, endpoints: Text, connector: Text = None, credentials: Text = None, **kwargs: Dict): """Runs a Rasa model. Args: model: Path to model archive. endpoints: Path to endpoints file. connector: Connector which should be use (overwrites `credentials` field). credentials: Path to channel credentials file. **kwargs: Additional arguments which are passed to `rasa.core.run.serve_application`. """ import rasa.core.run import rasa.nlu.run from rasa.core.utils import AvailableEndpoints model_path = get_model(model) if not model_path: logger.error("No model found. Train a model before running the " "server using `rasa train`.") return core_path, nlu_path = get_model_subdirectories(model_path) _endpoints = AvailableEndpoints.read_endpoints(endpoints) if not connector and not credentials: channel = "cmdline" logger.info("No chat connector configured, falling back to the " "command line. Use `rasa configure channel` to connect" "the bot to e.g. facebook messenger.") else: channel = connector if os.path.exists(core_path): kwargs = minimal_kwargs(kwargs, rasa.core.run.serve_application) rasa.core.run.serve_application(core_path, nlu_path, channel=channel, credentials_file=credentials, endpoints=_endpoints, **kwargs) # TODO: No core model was found, run only nlu server for now elif os.path.exists(nlu_path): rasa.nlu.run.run_cmdline(nlu_path) shutil.rmtree(model_path)
def run( model: Text, endpoints: Text, connector: Text = None, credentials: Text = None, **kwargs: Dict ): """Runs a Rasa model. Args: model: Path to model archive. endpoints: Path to endpoints file. connector: Connector which should be use (overwrites `credentials` field). credentials: Path to channel credentials file. **kwargs: Additional arguments which are passed to `rasa.core.run.serve_application`. """ import rasa.core.run import rasa.nlu.run from rasa.core.utils import AvailableEndpoints model_path = get_model(model) if not model_path: print_error( "No model found. Train a model before running the " "server using `rasa train`." ) return _endpoints = AvailableEndpoints.read_endpoints(endpoints) if not connector and not credentials: connector = "rest" print_warning( "No chat connector configured, falling back to the " "REST input channel. To connect your bot to another channel, " "read the docs here: {}/user-guide/" "messaging-and-voice-channels".format(DOCS_BASE_URL) ) kwargs = minimal_kwargs(kwargs, rasa.core.run.serve_application) rasa.core.run.serve_application( model, channel=connector, credentials=credentials, endpoints=_endpoints, **kwargs ) shutil.rmtree(model_path)
def test_core(model: Text, stories: Text, endpoints: Text = None, output: Text = DEFAULT_RESULTS_PATH, model_path: Text = None, **kwargs: Dict): import rasa.core.test import rasa.core.utils as core_utils from rasa_nlu import utils as nlu_utils from rasa.model import get_model from rasa.core.interpreter import NaturalLanguageInterpreter from rasa.core.agent import Agent _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints) if output: nlu_utils.create_dir(output) if os.path.isfile(model): model_path = get_model(model) if model_path: # Single model: Normal evaluation loop = asyncio.get_event_loop() model_path = get_model(model) core_path, nlu_path = get_model_subdirectories(model_path) _interpreter = NaturalLanguageInterpreter.create( nlu_path, _endpoints.nlu) _agent = Agent.load(core_path, interpreter=_interpreter) kwargs = minimal_kwargs(kwargs, rasa.core.test) loop.run_until_complete( rasa.core.test(stories, _agent, out_directory=output, **kwargs)) else: from rasa.core.test import compare, plot_curve compare(model, stories, output) story_n_path = os.path.join(model, 'num_stories.json') number_of_stories = core_utils.read_json_file(story_n_path) plot_curve(output, number_of_stories)
def test_nlu(model: Optional[Text], nlu_data: Optional[Text], kwargs: Optional[Dict]): from rasa.nlu.test import run_evaluation unpacked_model = get_model(model) if unpacked_model is None: print_error( "Could not find any model. Use 'rasa train nlu' to train an NLU model." ) return nlu_model = os.path.join(unpacked_model, "nlu") if os.path.exists(nlu_model): kwargs = minimal_kwargs(kwargs, run_evaluation, ["data_path", "model"]) run_evaluation(nlu_data, nlu_model, **kwargs) else: print_error( "Could not find any model. Use 'rasa train nlu' to train an NLU model." )
def test_core( model: Optional[Text] = None, stories: Optional[Text] = None, endpoints: Optional[Text] = None, output: Text = DEFAULT_RESULTS_PATH, kwargs: Optional[Dict] = None, ): import rasa.core.test import rasa.core.utils as core_utils from rasa.nlu import utils as nlu_utils from rasa.model import get_model from rasa.core.interpreter import NaturalLanguageInterpreter from rasa.core.agent import Agent _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints) if kwargs is None: kwargs = {} if output: nlu_utils.create_dir(output) loop = asyncio.get_event_loop() model_path = get_model(model) core_path, nlu_path = get_model_subdirectories(model_path) if os.path.exists(core_path) and os.path.exists(nlu_path): _interpreter = NaturalLanguageInterpreter.create( nlu_path, _endpoints.nlu) _agent = Agent.load(model_path, interpreter=_interpreter) kwargs = minimal_kwargs(kwargs, rasa.core.test, ["stories", "agent"]) loop.run_until_complete( rasa.core.test(stories, _agent, out_directory=output, **kwargs)) else: print_error( "Not able to test. Make sure both models - core and nlu - are available." )