def interactive(args: argparse.Namespace) -> None: _set_not_required_args(args) file_importer = TrainingDataImporter.load_from_config( args.config, args.domain, args.data ) if args.model is None: loop = asyncio.get_event_loop() story_graph = loop.run_until_complete(file_importer.get_stories()) if not story_graph or story_graph.is_empty(): utils.print_error_and_exit( "Could not run interactive learning without either core data or a model containing core data." ) zipped_model = train.train_core(args) if args.core_only else train.train(args) if not zipped_model: utils.print_error_and_exit( "Could not train an initial model. Either pass paths " "to the relevant training files (`--data`, `--config`, `--domain`), " "or use 'rasa train' to train a model." ) else: zipped_model = get_provided_model(args.model) if not (zipped_model and os.path.exists(zipped_model)): utils.print_error_and_exit( f"Interactive learning process cannot be started as no initial model was " f"found at path '{args.model}'. Use 'rasa train' to train a model." ) if not args.skip_visualization: logger.info(f"Loading visualization data from {args.data}.") perform_interactive_learning(args, zipped_model, file_importer)
def get_training_data(): importer = TrainingDataImporter.load_from_config("../config.yml", "../base/domain-eng.yml", ["../base/data/"]) loop = asyncio.get_event_loop() data = loop.run_until_complete(importer.get_nlu_data()) return set(i.text.lower().strip() for i in data.intent_examples)
async def train_async( domain: Union[Domain, Text], config: Text, training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, core_additional_arguments: Optional[Dict] = None, nlu_additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: """Trains a Rasa model (Core and NLU). Args: domain: Path to the domain file. config: Path to the config for Core and NLU. training_files: Paths to the training data for Core and NLU. output_path: Output path. force_training: If `True` retrain model even if data has not changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True` if the NLU training data should be persisted with the model. core_additional_arguments: Additional training parameters for core training. nlu_additional_arguments: Additional training parameters forwarded to training method of each NLU component. Returns: Path of the trained model archive. """ file_importer = TrainingDataImporter.load_from_config( config, domain, training_files ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) domain = await file_importer.get_domain() if domain.is_empty(): return await handle_domain_if_not_exists( file_importer, output_path, fixed_model_name ) return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, core_additional_arguments=core_additional_arguments, nlu_additional_arguments=nlu_additional_arguments, )
def test_load_from_config(tmpdir: Path): import rasa.utils.io as io_utils config_path = str(tmpdir / "config.yml") io_utils.write_yaml({"importers": [{ "name": "MultiProjectImporter" }]}, config_path) importer = TrainingDataImporter.load_from_config(config_path) assert isinstance(importer, CombinedDataImporter) assert isinstance(importer._importers[0], MultiProjectImporter)
async def test_example_bot_training_data_not_raises(config_file: Text, domain_file: Text, data_folder: Text): importer = TrainingDataImporter.load_from_config(config_file, domain_file, data_folder) with pytest.warns(None) as record: await importer.get_nlu_data() await importer.get_stories() assert not len(record)
def test_load_from_config(tmpdir: Path): config_path = str(tmpdir / "config.yml") rasa.shared.utils.io.write_yaml( {"importers": [{ "name": "MultiProjectImporter" }]}, config_path) importer = TrainingDataImporter.load_from_config(config_path) assert isinstance(importer, E2EImporter) assert isinstance(importer.importer, RetrievalModelsDataImporter) assert isinstance(importer.importer._importer._importers[0], MultiProjectImporter)
async def test_example_bot_training_on_initial_project(tmp_path: Path): # we need to test this one separately, as we can't test it in place # configuration suggestions would otherwise change the initial file scaffold.create_initial_project(str(tmp_path)) importer = TrainingDataImporter.load_from_config( str(tmp_path / "config.yml"), str(tmp_path / "domain.yml"), str(tmp_path / "data"), ) with pytest.warns(None) as record: await importer.get_nlu_data() await importer.get_stories() assert not len(record)
async def test_formbot_example(): sys.path.append("examples/formbot/") project = Path("examples/formbot/") config = str(project / "config.yml") domain = str(project / "domain.yml") training_dir = project / "data" training_files = [ str(training_dir / "rules.yml"), str(training_dir / "stories.yml"), ] importer = TrainingDataImporter.load_from_config(config, domain, training_files) endpoint = EndpointConfig("https://example.com/webhooks/actions") endpoints = AvailableEndpoints(action=endpoint) agent = await train( domain, importer, str(project / "models" / "dialogue"), endpoints=endpoints, policy_config="examples/formbot/config.yml", ) async def mock_form_happy_path(input_text, output_text, slot=None): if slot: form = "restaurant_form" template = f"utter_ask_{slot}" else: form = None template = "utter_submit" response = { "events": [ { "event": "form", "name": form, "timestamp": None }, { "event": "slot", "timestamp": None, "name": "requested_slot", "value": slot, }, ], "responses": [{ "template": template }], } with aioresponses() as mocked: mocked.post("https://example.com/webhooks/actions", payload=response, repeat=True) responses = await agent.handle_text(input_text) assert responses[0]["text"] == output_text async def mock_form_unhappy_path(input_text, output_text, slot): response_error = { "error": f"Failed to extract slot {slot} with action restaurant_form", "action_name": "restaurant_form", } with aioresponses() as mocked: # noinspection PyTypeChecker mocked.post( "https://example.com/webhooks/actions", repeat=True, exception=ClientResponseError(400, "", json.dumps(response_error)), ) responses = await agent.handle_text(input_text) assert responses[0]["text"] == output_text await mock_form_happy_path("/request_restaurant", "what cuisine?", slot="cuisine") await mock_form_unhappy_path("/chitchat", "chitchat", slot="cuisine") await mock_form_happy_path('/inform{"cuisine": "mexican"}', "how many people?", slot="num_people") await mock_form_happy_path('/inform{"number": "2"}', "do you want to seat outside?", slot="outdoor_seating") await mock_form_happy_path("/affirm", "please provide additional preferences", slot="preferences") responses = await agent.handle_text("/restart") assert responses[0]["text"] == "restarted" responses = await agent.handle_text("/greet") assert (responses[0]["text"] == "Hello! I am restaurant search assistant! How can I help?") await mock_form_happy_path("/request_restaurant", "what cuisine?", slot="cuisine") await mock_form_happy_path('/inform{"cuisine": "mexican"}', "how many people?", slot="num_people") await mock_form_happy_path('/inform{"number": "2"}', "do you want to seat outside?", slot="outdoor_seating") await mock_form_unhappy_path("/stop", "do you want to continue?", slot="outdoor_seating") await mock_form_happy_path("/affirm", "do you want to seat outside?", slot="outdoor_seating") await mock_form_happy_path("/affirm", "please provide additional preferences", slot="preferences") await mock_form_happy_path( "/deny", "please give your feedback on your experience so far", slot="feedback") await mock_form_happy_path('/inform{"feedback": "great"}', "All done!") responses = await agent.handle_text("/thankyou") assert responses[0]["text"] == "you are welcome :)"