def test_auto_configure(language: Text, keys_to_configure: Set[Text]): if sys.platform == "win32" or not language == "en": default_config = io_utils.read_config_file(DEFAULT_CONFIG_OTHER_LANGUAGE) else: default_config = io_utils.read_config_file(DEFAULT_CONFIG_EN) config = autoconfig._auto_configure({"language": language}, keys_to_configure) for k in keys_to_configure: assert config[k] == default_config[k] # given keys are configured correctly assert config.get("language") == language config.pop("language") assert len(config) == len(keys_to_configure) # no other keys are configured
def _auto_configure(config: Dict[Text, Any], keys_to_configure: Set[Text]) -> Dict[Text, Any]: """Complete a config by adding automatic configuration for the specified keys. Args: config: The provided configuration. keys_to_configure: Keys to be configured automatically (e.g. `policies`). Returns: The resulting configuration including both the provided and the automatically configured keys. """ import pkg_resources if keys_to_configure: logger.debug( f"The provided configuration does not contain the key(s) " f"{common_utils.transform_collection_to_sentence(keys_to_configure)}. " f"Values will be provided from the default configuration.") if sys.platform == "win32": filename = "default_config_other_language.yml" elif config.get("language") == "en": filename = "default_config_en.yml" else: filename = "default_config_other_language.yml" default_config_file = pkg_resources.resource_filename(__name__, filename) default_config = io_utils.read_config_file(default_config_file) config = copy.deepcopy(config) for key in keys_to_configure: config[key] = default_config[key] return config
def __init__( self, config_file: Text, domain_path: Optional[Text] = None, training_data_paths: Optional[Union[List[Text], Text]] = None, project_directory: Optional[Text] = None, ): self.config = io_utils.read_config_file(config_file) if domain_path: self._domain_paths = [domain_path] else: self._domain_paths = [] self._story_paths = [] self._nlu_paths = [] self._imports = set() self._additional_paths = training_data_paths or [] self._project_directory = project_directory or os.path.dirname( config_file) self._init_from_dict(self.config, self._project_directory) extra_story_files, extra_nlu_files = data.get_core_nlu_files( training_data_paths) self._story_paths += list(extra_story_files) self._nlu_paths += list(extra_nlu_files) logger.debug("Selected skills: {}".format("".join( ["\n-{}".format(i) for i in self._imports])))
def __init__( self, config_file: Text, domain_path: Optional[Text] = None, training_data_paths: Optional[Union[List[Text], Text]] = None, project_directory: Optional[Text] = None, ): self.config = io_utils.read_config_file(config_file) if domain_path: self._domain_paths = [domain_path] else: self._domain_paths = [] self._story_paths = [] self._nlu_paths = [] self._imports = [] self._additional_paths = training_data_paths or [] self._project_directory = project_directory or os.path.dirname( config_file) self._init_from_dict(self.config, self._project_directory) extra_story_files, extra_nlu_files = data.get_core_nlu_files( training_data_paths) self._story_paths += list(extra_story_files) self._nlu_paths += list(extra_nlu_files) logger.debug("Selected projects: {}".format("".join( [f"\n-{i}" for i in self._imports]))) rasa.utils.common.mark_as_experimental_feature( feature_name="MultiProjectImporter")
def get_configuration( config_file_path: Text, training_type: Optional[TrainingType] = TrainingType.BOTH ) -> Dict[Text, Any]: """Determine configuration from a configuration file. Keys that are provided and have a value in the file are kept. Keys that are not provided are configured automatically. Args: config_file_path: The path to the configuration file. training_type: NLU, CORE or BOTH depending on what is trained. """ if not config_file_path or not os.path.exists(config_file_path): logger.debug( "No configuration file was provided to the TrainingDataImporter.") return {} config = io_utils.read_config_file(config_file_path) missing_keys = _get_missing_config_keys(config, training_type) keys_to_configure = _get_unspecified_autoconfigurable_keys( config, training_type) if keys_to_configure: config = _auto_configure(config, keys_to_configure) _dump_config(config, config_file_path, missing_keys, keys_to_configure, training_type) return config
async def save_from_path(self, path: Text, bot: Text, overwrite: bool = True, user="******"): try: story_files, nlu_files = get_core_nlu_files( os.path.join(path, DEFAULT_DATA_PATH)) nlu = utils.training_data_from_paths(nlu_files, "en") domain = Domain.from_file(os.path.join(path, DEFAULT_DOMAIN_PATH)) domain.check_missing_templates() story_steps = await StoryFileReader.read_from_files( story_files, domain) config = read_config_file(os.path.join(path, DEFAULT_CONFIG_PATH)) self.save_domain(domain, bot, user) self.save_stories(story_steps, bot, user) self.save_nlu(nlu, bot, user) self.save_config(config, bot, user) except InvalidDomain as e: logging.info(e) raise AppException("""Failed to validate yaml file. Please make sure the file is initial and all mandatory parameters are specified""" ) except Exception as e: logging.info(e) raise AppException(e)
def fetch_configs(self, bot: Text): try: configs = Configs.objects.get(bot=bot) except DoesNotExist as e: logging.info(e) configs = Configs._from_son(read_config_file("./template/config.yml")) return configs
def _init_from_file(self, path: Text) -> None: path = os.path.abspath(path) if os.path.exists(path) and data.is_config_file(path): config = io_utils.read_config_file(path) parent_directory = os.path.dirname(path) self._init_from_dict(config, parent_directory) else: raise_warning(f"'{path}' does not exist or is not a valid config file.")
def test_add_missing_config_keys_to_file(tmp_path: Path, config_path: Path, missing_keys: Set[Text]): config_file = str(tmp_path / "config.yml") shutil.copyfile(str(config_path), config_file) autoconfig._add_missing_config_keys_to_file(config_file, missing_keys) config_after_addition = io_utils.read_config_file(config_file) assert all(key in config_after_addition for key in missing_keys)
def test_auto_configure(keys_to_configure: Set[Text]): default_config = io_utils.read_config_file(DEFAULT_CONFIG) config = autoconfig._auto_configure({}, keys_to_configure) for k in keys_to_configure: assert config[k] == default_config[ k] # given keys are configured correctly assert len(config) == len( keys_to_configure) # no other keys are configured
def load_from_config( config_path: Text, domain_path: Optional[Text] = None, training_data_paths: Optional[List[Text]] = None, ) -> "TrainingDataImporter": """Loads a ``TrainingDataImporter`` instance from a configuration file.""" config = io_utils.read_config_file(config_path) return TrainingDataImporter.load_from_dict( config, config_path, domain_path, training_data_paths )
def _is_config_file_as_expected(config_file_path: Text, missing_keys: Set[Text], auto_configured_keys: Set[Text]) -> bool: try: content = io_utils.read_config_file(config_file_path) except ValueError: content = "" return (bool(content) and missing_keys == _get_missing_config_keys(content) and auto_configured_keys == _get_unspecified_autoconfigurable_keys(content))
def test_dump_config_missing_file(tmp_path: Path, capsys: CaptureFixture): config_path = tmp_path / "non_existent_config.yml" config = io_utils.read_config_file(str(SOME_CONFIG)) autoconfig._dump_config(config, str(config_path), set(), {"policies"}) assert not config_path.exists() captured = capsys.readouterr() assert "has been removed or modified" in captured.out
def __init__( self, config_paths: Optional[Dict[Text, Text]] = None, domain_path: Optional[Text] = None, training_data_path: Optional[Text] = None, ): # keep only policies in core_config self.core_config = {'policies': io_utils.read_config_file( config_paths[list(config_paths.keys())[0]] )['policies']} self._stories_path = os.path.join(training_data_path, 'stories.md') # keep all but policies in nlu_config self.nlu_config = {} for lang in config_paths: self.nlu_config[lang] = io_utils.read_config_file(config_paths[lang]) del self.nlu_config[lang]['policies'] self.nlu_config[lang]['data'] = 'data_for_' + lang # so rasa.nlu.train.train makes the right get_nlu_data call self.nlu_config[lang]['path'] = os.path.join(training_data_path, 'nlu', '{}.md'.format(lang)) self._domain_path = domain_path
def _from_file(cls, path: Text, skill_selector: "SkillSelector") -> "SkillSelector": from rasa import data # pytype: disable=pyi-error path = os.path.abspath(path) if os.path.exists(path) and data.is_config_file(path): config = io_utils.read_config_file(path) parent_directory = os.path.dirname(path) return cls._from_dict(config, parent_directory, skill_selector) return cls.all_skills()
def test_prepare_credentials_for_rasa_x_if_rasa_channel_not_given( tmpdir: Path): credentials_path = str(tmpdir / "credentials.yml") io_utils.write_yaml_file({}, credentials_path) tmp_credentials = x._prepare_credentials_for_rasa_x( credentials_path, "http://localhost:5002") actual = io_utils.read_config_file(tmp_credentials) assert actual["rasa"]["url"] == "http://localhost:5002"
def test_set_config(): response = client.put( "/api/bot/config", headers={ "Authorization": pytest.token_type + " " + pytest.access_token }, json=read_config_file('./template/config/default.yml')) actual = response.json() assert actual['data'] is None assert actual['error_code'] == 0 assert actual['message'] == "Config saved!" assert actual['success']
def test_prepare_credentials_if_already_valid(tmpdir: Path): credentials_path = str(tmpdir / "credentials.yml") credentials = { "rasa": {"url": "my-custom-url"}, "another-channel": {"url": "some-url"}, } io_utils.write_yaml_file(credentials, credentials_path) x._prepare_credentials_for_rasa_x(credentials_path) actual = io_utils.read_config_file(credentials_path) assert actual == credentials
def _is_config_file_as_expected( config_file_path: Text, missing_keys: Set[Text], auto_configured_keys: Set[Text], training_type: Optional[TrainingType] = TrainingType.BOTH, ) -> bool: try: content = io_utils.read_config_file(config_file_path) except ValueError: content = "" return (bool(content) and missing_keys == _get_missing_config_keys( content, training_type) and auto_configured_keys == _get_unspecified_autoconfigurable_keys( content, training_type))
def __init__( self, config_file: Optional[Text] = None, domain_path: Optional[Text] = None, training_data_paths: Optional[Union[List[Text], Text]] = None, ): if config_file and os.path.exists(config_file): self.config = io_utils.read_config_file(config_file) else: self.config = {} self._domain_path = domain_path self._story_files, self._nlu_files = data.get_core_nlu_files( training_data_paths)
def _prepare_credentials_for_rasa_x(credentials_path: Optional[Text], rasa_x_url: Optional[Text] = None) -> Text: credentials_path = get_validated_path(credentials_path, "credentials", DEFAULT_CREDENTIALS_PATH, True) if credentials_path: credentials = io_utils.read_config_file(credentials_path) else: credentials = {} # this makes sure the Rasa X is properly configured no matter what if rasa_x_url: credentials["rasa"] = {"url": rasa_x_url} dumped_credentials = yaml.dump(credentials, default_flow_style=False) tmp_credentials = io_utils.create_temporary_file(dumped_credentials, "yml") return tmp_credentials
def test_set_config_policy_error(): data = read_config_file('./template/config/default.yml') data['policies'].append({"name": "TestPolicy"}) response = client.put("/api/bot/config", headers={ "Authorization": pytest.token_type + " " + pytest.access_token }, json=data) actual = response.json() assert actual['data'] is None assert actual['error_code'] == 422 assert actual[ 'message'] == "Module for policy 'TestPolicy' could not be loaded. Please make sure the name is a valid policy." assert not actual['success']
async def train(request): body = await request.json() if 'rasa_nlu_data' not in body: raise HTTPException(422) model_name = request.path_params["model_name"] nlu_data = body["rasa_nlu_data"] if 'config' in body: config = read_yaml(body['config']) else: config = read_config_file('./config.yml') async_trainer.train(nlu_data, model_name, config) return JSONResponse({"message": "Training started"})
def test_set_config_pipeline_error(): data = read_config_file('./template/config/default.yml') data['pipeline'].append({"name": "TestFeaturizer"}) response = client.put("/api/bot/config", headers={ "Authorization": pytest.token_type + " " + pytest.access_token }, json=data) actual = response.json() print(actual) assert actual['data'] is None assert actual['error_code'] == 422 assert actual[ 'message'] == """Cannot find class 'TestFeaturizer' from global namespace. Please check that there is no typo in the class name and that you have imported the class into the global namespace.""" assert not actual['success']
def save_from_path(self, path: Text, bot: Text, user="******"): try: story_files, nlu_files = get_core_nlu_files( os.path.join(path, DEFAULT_DATA_PATH)) nlu = utils.training_data_from_paths(nlu_files, "en") domain = Domain.from_file(os.path.join(path, DEFAULT_DOMAIN_PATH)) loop = asyncio.new_event_loop() story_steps = loop.run_until_complete( StoryFileReader.read_from_files(story_files, domain)) self.save_domain(domain, bot, user) self.save_stories(story_steps, bot, user) self.save_nlu(nlu, bot, user) self.__save_config( read_config_file(os.path.join(path, DEFAULT_CONFIG_PATH)), bot, user) except InvalidDomain as e: logging.info(e) raise AppException("""Failed to validate yaml file. Please make sure the file is initial and all mandatory parameters are specified""" ) except Exception as e: logging.info(e) raise AppException(e)
def __init__( self, config_file: Optional[Union[List[Text], Text]] = None, domain_path: Optional[Text] = None, training_data_paths: Optional[Union[List[Text], Text]] = None, ): self._domain_path = domain_path self._story_files, self._nlu_files = data.get_core_nlu_files( training_data_paths) self.core_config = {} self.nlu_config = {} if config_file: if not isinstance(config_file, list): config_file = [config_file] for file in config_file: if not os.path.exists(file): continue config = io_utils.read_config_file(file) lang = config["language"] self.core_config = {"policies": config["policies"]} self.nlu_config[lang] = { "pipeline": config["pipeline"], "language": lang }
import requests from http import HTTPStatus from typing import Any, Text, Dict, List, Union, Optional from rasa.constants import DEFAULT_CREDENTIALS_PATH from rasa.utils.io import read_config_file from rasa_sdk import Action, Tracker from rasa_sdk.forms import FormAction from rasa_sdk.events import UserUtteranceReverted, SlotSet from rasa_sdk.executor import CollectingDispatcher credentials_file = DEFAULT_CREDENTIALS_PATH all_credentials = read_config_file(credentials_file) restdb_credentials = all_credentials.get('restdb') database = restdb_credentials['database'] collection = restdb_credentials['collection'] headers = { "content-type": "application/json", "x-apikey": restdb_credentials['apikey'], "cache-control": "no-cache", } """Revertible mapped actions for explanations""" class ActionExplainOxygen(Action): def name(self): return "action_explain_oxygen"