def test_expand_raw_dict_error_if_missing_key(): mapping = {'another_key': 'value'} d = {'some_settting': '{{key}}'} with pytest.raises(KeyError) as excinfo: expand_raw_dictionary(d, mapping) expected = ('"Error replacing placeholder: \'key\' is undefined.' ' Loaded env: {\'another_key\': \'value\'}"') assert expected in str(excinfo.value)
def test_expand_raw_dict_nested(): mapping = EnvDict({'key': 'value'}) d = { 'section': { 'some_settting': '{{key}}' }, 'list': ['{{key}}', '{{key}}'] } assert (expand_raw_dictionary(d, mapping) == { 'section': { 'some_settting': 'value' }, 'list': ['value', 'value'] })
def __init__(self, data, env=None, lazy_import=False, reload=False, parent_path=None): if isinstance(data, (str, Path)): if parent_path is not None: raise ValueError('parent_path must be None when ' f'initializing {type(self).__name__} with ' 'a path to a YAML spec') # this is only used to display an error message with the path # to the loaded file path_for_errors = data # resolve the parent path to make sources and products unambiguous # even if the current working directory changes path_to_entry_point = Path(data).resolve() self._parent_path = str(path_to_entry_point.parent) content = Path(data).read_text() try: data = yaml.safe_load(content) except (yaml.parser.ParserError, yaml.constructor.ConstructorError) as e: error = e else: error = None if error: if '{{' in content or '}}' in content: raise DAGSpecInitializationError( 'Failed to initialize spec. It looks like ' 'you\'re using placeholders (i.e. {{placeholder}}). ' 'Make sure values are enclosed in parentheses ' '(e.g. key: "{{placeholder}}"). Original ' 'parser error:\n\n' f'{error}') else: raise error else: path_for_errors = None # FIXME: add test cases, some of those features wont work if # _parent_path is None. We should make sure that we either raise # an error if _parent_path is needed or use the current working # directory if it's appropriate - this is mostly to make relative # paths consistent: they should be relative to the file that # contains them self._parent_path = (None if not parent_path else str( Path(parent_path).resolve())) # try to look env.yaml in default locations env_default_path = default.path_to_env(self._parent_path) self.data = data if isinstance(self.data, list): self.data = {'tasks': self.data} # validate keys defined at the top (nested keys are not validated here) self._validate_top_keys(self.data, path_for_errors) logger.debug('DAGSpec enviroment:\n%s', pp.pformat(env)) env = env or dict() # NOTE: when loading from a path, EnvDict recursively looks # at parent folders, this is useful when loading envs # in nested directories where scripts/functions need the env # but here, since we just need this for the spec, we might # want to turn it off. should we add a parameter to EnvDict # to control this? if env_default_path: defaults = yaml.safe_load(Path(env_default_path).read_text()) self.env = EnvDict(env, path_to_here=self._parent_path, defaults=defaults) else: self.env = EnvDict(env, path_to_here=self._parent_path) self.data = expand_raw_dictionary(self.data, self.env) logger.debug('Expanded DAGSpec:\n%s', pp.pformat(data)) # if there is a "location" top key, we don't have to do anything else # as we will just load the dotted path when .to_dag() is called if 'location' not in self.data: Meta.initialize_inplace(self.data) import_tasks_from = self.data['meta']['import_tasks_from'] if import_tasks_from is not None: # when using a relative path in "import_tasks_from", we must # make it absolute... if not Path(import_tasks_from).is_absolute(): # use _parent_path if there is one if self._parent_path: self.data['meta']['import_tasks_from'] = str( Path(self._parent_path, import_tasks_from)) # otherwise just make it absolute else: self.data['meta']['import_tasks_from'] = str( Path(import_tasks_from).resolve()) imported = yaml.safe_load( Path(self.data['meta']['import_tasks_from']).read_text()) if self.env is not None: imported = expand_raw_dictionaries(imported, self.env) # relative paths here are relative to the file where they # are declared base_path = Path(self.data['meta']['import_tasks_from']).parent for task in imported: add_base_path_to_source_if_relative(task, base_path=base_path) self.data['tasks'].extend(imported) self.data['tasks'] = [ normalize_task(task) for task in self.data['tasks'] ] # make sure the folder where the pipeline is located is in sys.path # otherwise dynamic imports needed by TaskSpec will fail with add_to_sys_path(self._parent_path, chdir=False): self.data['tasks'] = [ TaskSpec(t, self.data['meta'], project_root=self._parent_path, lazy_import=lazy_import, reload=reload) for t in self.data['tasks'] ] else: self.data['meta'] = Meta.empty()
def test_expand_raw_dictionary_parses_literals(): raw = {'a': '{{a}}', 'b': '{{b}}', 'c': '{{c}}'} mapping = {'a': {1, 2, 3}, 'b': [1, 2, 3], 'c': {'z': 1}} out = expand_raw_dictionary(raw, mapping) assert out == mapping
def test_expand_raw_dictionary(): mapping = EnvDict({'key': 'value'}) d = {'some_setting': '{{key}}'} assert expand_raw_dictionary(d, mapping) == {'some_setting': 'value'}