def test_nlu( model: Optional[Text], nlu_data: Optional[Text], output_directory: Text = DEFAULT_RESULTS_PATH, kwargs: Optional[Dict] = None, ): from rasa.nlu.test import run_evaluation from rasa.model import get_model try: unpacked_model = get_model(model) except ModelNotFound: print_error( "Could not find any model. Use 'rasa train nlu' to train a " "Rasa model and provide it via the '--model' argument.") return io_utils.create_directory(output_directory) nlu_model = os.path.join(unpacked_model, "nlu") if os.path.exists(nlu_model): kwargs = utils.minimal_kwargs(kwargs, run_evaluation, ["data_path", "model"]) run_evaluation(nlu_data, nlu_model, output_directory=output_directory, **kwargs) else: print_error( "Could not find any model. Use 'rasa train nlu' to train a " "Rasa model and provide it via the '--model' argument.")
def perform_nlu_cross_validation(config: Text, nlu: Text, output: Text, kwargs: Optional[Dict[Text, Any]]): import rasa.nlu.config from rasa.nlu.test import ( drop_intents_below_freq, cross_validate, return_results, return_entity_results, ) kwargs = kwargs or {} folds = int(kwargs.get("folds", 3)) nlu_config = rasa.nlu.config.load(config) data = rasa.nlu.training_data.load_data(nlu) data = drop_intents_below_freq(data, cutoff=folds) kwargs = utils.minimal_kwargs(kwargs, cross_validate) results, entity_results = cross_validate(data, folds, nlu_config, output, **kwargs) logger.info("CV evaluation (n={})".format(folds)) if any(results): logger.info("Intent evaluation results") return_results(results.train, "train") return_results(results.test, "test") if any(entity_results): logger.info("Entity evaluation results") return_entity_results(entity_results.train, "train") return_entity_results(entity_results.test, "test")
def _importer_from_dict( importer_config: Dict, config_path: Text, domain_path: Optional[Text] = None, training_data_paths: Optional[List[Text]] = None, training_type: Optional[TrainingType] = TrainingType.BOTH, ) -> Optional["TrainingDataImporter"]: from rasa.importers.multi_project import MultiProjectImporter from rasa.importers.rasa import RasaFileImporter module_path = importer_config.pop("name", None) if module_path == RasaFileImporter.__name__: importer_class = RasaFileImporter elif module_path == MultiProjectImporter.__name__: importer_class = MultiProjectImporter else: try: importer_class = rasa.shared.utils.common.class_from_module_path( module_path) except (AttributeError, ImportError): logging.warning(f"Importer '{module_path}' not found.") return None importer_config = dict(training_type=training_type, **importer_config) constructor_arguments = common_utils.minimal_kwargs( importer_config, importer_class) return importer_class(config_path, domain_path, training_data_paths, **constructor_arguments)
def _importer_from_dict( importer_config: Dict, config_path: Text, domain_path: Optional[Text] = None, training_data_paths: Optional[List[Text]] = None, ) -> Optional["TrainingDataImporter"]: from rasa.importers.skill import SkillSelector from rasa.importers.rasa import RasaFileImporter module_path = importer_config.pop("name", None) if module_path == RasaFileImporter.__name__: importer_class = RasaFileImporter elif module_path == SkillSelector.__name__: importer_class = SkillSelector else: try: importer_class = common_utils.class_from_module_path(module_path) except (AttributeError, ImportError): logging.warning("Importer '{}' not found.".format(module_path)) return None constructor_arguments = common_utils.minimal_kwargs( importer_config, importer_class ) return importer_class( config_path, domain_path, training_data_paths, **constructor_arguments )
def test_core( model: Optional[Text] = None, stories: Optional[Text] = None, endpoints: Optional[Text] = None, output: Text = DEFAULT_RESULTS_PATH, kwargs: Optional[Dict] = None, ): import rasa.core.test import rasa.core.utils as core_utils import rasa.model from rasa.core.interpreter import RegexInterpreter, NaturalLanguageInterpreter from rasa.core.agent import Agent _endpoints = core_utils.AvailableEndpoints.read_endpoints(endpoints) if kwargs is None: kwargs = {} if output: io_utils.create_directory(output) try: unpacked_model = rasa.model.get_model(model) except ModelNotFound: print_error( "Unable to test: could not find a model. Use 'rasa train' to train a " "Rasa model and provide it via the '--model' argument." ) return core_path, nlu_path = rasa.model.get_model_subdirectories(unpacked_model) if not core_path: print_error( "Unable to test: could not find a Core model. Use 'rasa train' to train a " "Rasa model and provide it via the '--model' argument." ) use_e2e = kwargs["e2e"] if "e2e" in kwargs else False _interpreter = RegexInterpreter() if use_e2e: if nlu_path: _interpreter = NaturalLanguageInterpreter.create(_endpoints.nlu or nlu_path) else: print_warning( "No NLU model found. Using default 'RegexInterpreter' for end-to-end " "evaluation." ) _agent = Agent.load(unpacked_model, interpreter=_interpreter) kwargs = utils.minimal_kwargs(kwargs, rasa.core.test, ["stories", "agent"]) loop = asyncio.get_event_loop() loop.run_until_complete( rasa.core.test(stories, _agent, out_directory=output, **kwargs) )
def test_core( model: Optional[Text] = None, stories: Optional[Text] = None, output: Text = DEFAULT_RESULTS_PATH, additional_arguments: Optional[Dict] = None, ) -> None: import rasa.model from rasa.core.interpreter import RegexInterpreter from rasa.core.agent import Agent if additional_arguments is None: additional_arguments = {} if output: io_utils.create_directory(output) try: unpacked_model = rasa.model.get_model(model) except ModelNotFound: cli_utils.print_error( "Unable to test: could not find a model. Use 'rasa train' to train a " "Rasa model and provide it via the '--model' argument." ) return _agent = Agent.load(unpacked_model) if _agent.policy_ensemble is None: cli_utils.print_error( "Unable to test: could not find a Core model. Use 'rasa train' to train a " "Rasa model and provide it via the '--model' argument." ) if isinstance(_agent.interpreter, RegexInterpreter): cli_utils.print_warning( "No NLU model found. Using default 'RegexInterpreter' for end-to-end " "evaluation. If you added actual user messages to your test stories " "this will likely lead to the tests failing. In that case, you need " "to train a NLU model first, e.g. using `rasa train`." ) from rasa.core.test import test kwargs = utils.minimal_kwargs(additional_arguments, test, ["stories", "agent"]) _test_core(stories, _agent, output, **kwargs)
def run( model: Text, endpoints: Text, connector: Text = None, credentials: Text = None, **kwargs: Dict ): """Runs a Rasa model. Args: model: Path to model archive. endpoints: Path to endpoints file. connector: Connector which should be use (overwrites `credentials` field). credentials: Path to channel credentials file. **kwargs: Additional arguments which are passed to `rasa.core.run.serve_application`. """ import rasa.core.run import rasa.nlu.run from rasa.core.utils import AvailableEndpoints import rasa.utils.common as utils _endpoints = AvailableEndpoints.read_endpoints(endpoints) if not connector and not credentials: connector = "rest" print_warning( "No chat connector configured, falling back to the " "REST input channel. To connect your bot to another channel, " "read the docs here: {}/user-guide/" "messaging-and-voice-channels".format(DOCS_BASE_URL) ) kwargs = utils.minimal_kwargs(kwargs, rasa.core.run.serve_application) rasa.core.run.serve_application( model, channel=connector, credentials=credentials, endpoints=_endpoints, **kwargs )
def test_nlu(model: Optional[Text], nlu_data: Optional[Text], kwargs: Optional[Dict]): from rasa.nlu.test import run_evaluation try: unpacked_model = get_model(model) except ModelNotFound: print_error( "Could not find any model. Use 'rasa train nlu' to train an NLU model." ) return nlu_model = os.path.join(unpacked_model, "nlu") if os.path.exists(nlu_model): kwargs = utils.minimal_kwargs(kwargs, run_evaluation, ["data_path", "model"]) run_evaluation(nlu_data, nlu_model, **kwargs) else: print_error( "Could not find any model. Use 'rasa train nlu' to train an NLU model." )
def perform_nlu_cross_validation( config: Text, nlu: Text, output: Text, additional_arguments: Optional[Dict[Text, Any]], ): import rasa.nlu.config from rasa.nlu.test import ( drop_intents_below_freq, cross_validate, log_results, log_entity_results, ) additional_arguments = additional_arguments or {} folds = int(additional_arguments.get("folds", 3)) nlu_config = rasa.nlu.config.load(config) data = rasa.shared.nlu.training_data.loading.load_data(nlu) data = drop_intents_below_freq(data, cutoff=folds) kwargs = utils.minimal_kwargs(additional_arguments, cross_validate) results, entity_results, response_selection_results = cross_validate( data, folds, nlu_config, output, **kwargs ) logger.info(f"CV evaluation (n={folds})") if any(results): logger.info("Intent evaluation results") log_results(results.train, "train") log_results(results.test, "test") if any(entity_results): logger.info("Entity evaluation results") log_entity_results(entity_results.train, "train") log_entity_results(entity_results.test, "test") if any(response_selection_results): logger.info("Response Selection evaluation results") log_results(response_selection_results.train, "train") log_results(response_selection_results.test, "test")