コード例 #1
0
def test_bogus_imports():
    finder = get_default_finder()
    reset_collections_loader_state(finder)

    # ensure ImportError on known-bogus imports
    bogus_imports = ['bogus_toplevel', 'ansible_collections.bogusns', 'ansible_collections.testns.boguscoll',
                     'ansible_collections.testns.testcoll.bogussub', 'ansible_collections.ansible.builtin.bogussub']
    for bogus_import in bogus_imports:
        with pytest.raises(ImportError):
            import_module(bogus_import)
コード例 #2
0
    def run(self, terms, variables=None, **kwargs):
        result = []
        self.set_options(var_options=variables, direct=kwargs)
        not_found = self.get_option('result_not_found')
        no_version = self.get_option('result_no_version')

        for term in terms:
            if not FQCN_RE.match(term):
                raise AnsibleLookupError(
                    '"{term}" is not a FQCN'.format(term=term))

            try:
                collection_pkg = import_module(
                    'ansible_collections.{fqcn}'.format(fqcn=term))
            except ImportError:
                # Collection not found
                result.append(not_found)
                continue

            try:
                data = load_collection_meta(collection_pkg,
                                            no_version=no_version)
            except Exception as exc:
                raise AnsibleLookupError(
                    'Error while loading metadata for {fqcn}: {error}'.format(
                        fqcn=term, error=exc))

            result.append(data.get('version', no_version))

        return result
コード例 #3
0
    def get_collection_path(self, collection_name):
        if not AnsibleCollectionRef.is_valid_collection_name(collection_name):
            raise ValueError('{0} is not a valid collection name'.format(to_native(collection_name)))

        m = import_module('ansible_collections.{0}'.format(collection_name))

        return m.__file__
コード例 #4
0
def get_collection_role_path(role_name, collection_list=None):
    acr = AnsibleCollectionRef.try_parse_fqcr(role_name, 'role')

    if acr:
        # looks like a valid qualified collection ref; skip the collection_list
        collection_list = [acr.collection]
        subdirs = acr.subdirs
        resource = acr.resource
    elif not collection_list:
        return None  # not a FQ role and no collection search list spec'd, nothing to do
    else:
        resource = role_name  # treat as unqualified, loop through the collection search list to try and resolve
        subdirs = ''

    for collection_name in collection_list:
        try:
            acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type='role')
            # FIXME: error handling/logging; need to catch any import failures and move along

            # FIXME: this line shouldn't be necessary, but py2 pkgutil.get_data is delegating back to built-in loader when it shouldn't
            pkg = import_module(acr.n_python_package_name)

            if pkg is not None:
                # the package is now loaded, get the collection's package and ask where it lives
                path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
                return resource, to_text(path, errors='surrogate_or_strict'), collection_name

        except IOError:
            continue
        except Exception as ex:
            # FIXME: pick out typical import errors first, then error logging
            continue

    return None
コード例 #5
0
    def _add_module(self, name, wrapper=False):
        m, ext = name
        m = to_text(m)
        if m.startswith("Ansible."):
            # Builtin util, use plugin loader to get the data
            mu_path = ps_module_utils_loader.find_plugin(m, ext)

            if not mu_path:
                raise AnsibleError(
                    'Could not find imported module support code '
                    'for \'%s\'' % m)

            module_util_data = to_bytes(_slurp(mu_path))
        else:
            # Collection util, load the package data based on the util import.
            submodules = tuple(m.split("."))
            n_package_name = to_native('.'.join(submodules[:-1]),
                                       errors='surrogate_or_strict')
            n_resource_name = to_native(submodules[-1] + ext,
                                        errors='surrogate_or_strict')

            try:
                module_util = import_module(to_native(n_package_name))
                module_util_data = to_bytes(pkgutil.get_data(
                    n_package_name, n_resource_name),
                                            errors='surrogate_or_strict')

                # Get the path of the util which is required for coverage collection.
                resource_paths = list(module_util.__path__)
                if len(resource_paths) != 1:
                    # This should never happen with a collection but we are just being defensive about it.
                    raise AnsibleError(
                        "Internal error: Referenced module_util package '%s' contains 0 or multiple "
                        "import locations when we only expect 1." %
                        n_package_name)
                mu_path = os.path.join(resource_paths[0], n_resource_name)
            except OSError as err:
                if err.errno == errno.ENOENT:
                    raise AnsibleError(
                        'Could not find collection imported module support code for \'%s\''
                        % to_native(m))
                else:
                    raise

        util_info = {
            'data': module_util_data,
            'path': to_text(mu_path),
        }
        if ext == ".psm1":
            self.ps_modules[m] = util_info
        else:
            if wrapper:
                self.cs_utils_wrapper[m] = util_info
            else:
                self.cs_utils_module[m] = util_info
        self.scan_module(module_util_data,
                         wrapper=wrapper,
                         powershell=(ext == ".psm1"))
コード例 #6
0
ファイル: loader.py プロジェクト: taftsanders/ansible
    def _find_fq_plugin(self, fq_name, extension):
        """Search builtin paths to find a plugin. No external paths are searched,
        meaning plugins inside roles inside collections will be ignored.
        """

        plugin_type = AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(
            self.subdir)

        acr = AnsibleCollectionRef.from_fqcr(fq_name, plugin_type)

        n_resource = to_native(acr.resource, errors='strict')
        # we want this before the extension is added
        full_name = '{0}.{1}'.format(acr.n_python_package_name, n_resource)

        if extension:
            n_resource += extension

        pkg = sys.modules.get(acr.n_python_package_name)
        if not pkg:
            # FIXME: there must be cheaper/safer way to do this
            pkg = import_module(acr.n_python_package_name)

        # if the package is one of our flatmaps, we need to consult its loader to find the path, since the file could be
        # anywhere in the tree
        if hasattr(pkg, '__loader__') and isinstance(pkg.__loader__,
                                                     AnsibleFlatMapLoader):
            try:
                file_path = pkg.__loader__.find_file(n_resource)
                return full_name, to_text(file_path)
            except IOError:
                # this loader already takes care of extensionless files, so if we didn't find it, just bail
                return None, None

        pkg_path = os.path.dirname(pkg.__file__)

        n_resource_path = os.path.join(pkg_path, n_resource)

        # FIXME: and is file or file link or ...
        if os.path.exists(n_resource_path):
            return full_name, to_text(n_resource_path)

        # look for any matching extension in the package location (sans filter)
        ext_blacklist = ['.pyc', '.pyo']
        found_files = [
            f for f in glob.iglob(os.path.join(pkg_path, n_resource) + '.*') if
            os.path.isfile(f) and os.path.splitext(f)[1] not in ext_blacklist
        ]

        if not found_files:
            return None, None

        if len(found_files) > 1:
            # TODO: warn?
            pass

        return full_name, to_text(found_files[0])
コード例 #7
0
ファイル: doc.py プロジェクト: zhuozecheng/ansible
    def _get_keywords_docs(keys):

        data = {}
        descs = DocCLI._list_keywords()
        for keyword in keys:
            if keyword.startswith('with_'):
                keyword = 'loop'
            try:
                # if no desc, typeerror raised ends this block
                kdata = {'description': descs[keyword]}

                # get playbook objects for keyword and use first to get keyword attributes
                kdata['applies_to'] = []
                for pobj in PB_OBJECTS:
                    if pobj not in PB_LOADED:
                        obj_class = 'ansible.playbook.%s' % pobj.lower()
                        loaded_class = importlib.import_module(obj_class)
                        PB_LOADED[pobj] = getattr(loaded_class, pobj, None)

                    if keyword in PB_LOADED[pobj]._valid_attrs:
                        kdata['applies_to'].append(pobj)

                        # we should only need these once
                        if 'type' not in kdata:

                            fa = getattr(PB_LOADED[pobj], '_%s' % keyword)
                            if getattr(fa, 'private'):
                                kdata = {}
                                raise KeyError

                            kdata['type'] = getattr(fa, 'isa', 'string')

                            if keyword.endswith('when'):
                                kdata['template'] = 'implicit'
                            elif getattr(fa, 'static'):
                                kdata['template'] = 'static'
                            else:
                                kdata['template'] = 'explicit'

                            # those that require no processing
                            for visible in ('alias', 'priority'):
                                kdata[visible] = getattr(fa, visible)

                # remove None keys
                for k in list(kdata.keys()):
                    if kdata[k] is None:
                        del kdata[k]

                data[keyword] = kdata

            except KeyError as e:
                display.warning("Skipping Invalid keyword '%s' specified: %s" %
                                (keyword, to_native(e)))

        return data
コード例 #8
0
def test_on_collection_load():
    finder = get_default_finder()
    reset_collections_loader_state(finder)

    load_handler = MagicMock()
    AnsibleCollectionConfig.on_collection_load += load_handler

    m = import_module('ansible_collections.testns.testcoll')
    load_handler.assert_called_once_with(collection_name='testns.testcoll', collection_path=os.path.dirname(m.__file__))

    _meta = _get_collection_metadata('testns.testcoll')
    assert _meta
    # FIXME: compare to disk

    finder = get_default_finder()
    reset_collections_loader_state(finder)

    AnsibleCollectionConfig.on_collection_load += MagicMock(side_effect=Exception('bang'))
    with pytest.raises(Exception) as ex:
        import_module('ansible_collections.testns.testcoll')
    assert 'bang' in str(ex.value)
コード例 #9
0
def test_collpkg_loader_load_module():
    reset_collections_loader_state()
    with patch('ansible.utils.collection_loader.AnsibleCollectionConfig') as p:
        for name in [
                'ansible_collections.ansible.builtin',
                'ansible_collections.testns.testcoll'
        ]:
            parent_pkg = name.rpartition('.')[0]
            module_to_load = name.rpartition('.')[2]
            paths = extend_paths(default_test_collection_paths, parent_pkg)
            existing_child_paths = [
                p for p in extend_paths(paths, module_to_load)
                if os.path.exists(p)
            ]
            is_builtin = 'ansible.builtin' in name
            if name in sys.modules:
                del sys.modules[name]
            loader = _AnsibleCollectionPkgLoader(name, path_list=paths)
            assert repr(loader).startswith('_AnsibleCollectionPkgLoader(path=')
            module = loader.load_module(name)
            assert module.__name__ == name
            assert isinstance(module.__loader__, _AnsibleCollectionPkgLoader)
            if is_builtin:
                assert module.__path__ == []
            else:
                assert module.__path__ == [existing_child_paths[0]]

            assert module.__package__ == name
            if is_builtin:
                assert module.__file__ == '<ansible_synthetic_collection_package>'
            else:
                assert module.__file__.endswith(
                    '__synthetic__') and os.path.isdir(
                        os.path.dirname(module.__file__))
            assert sys.modules.get(name) == module

            assert hasattr(module, '_collection_meta') and isinstance(
                module._collection_meta, dict)

            # FIXME: validate _collection_meta contents match what's on disk (or not)

            # if the module has metadata, try loading it with busted metadata
            if module._collection_meta:
                _collection_finder = import_module(
                    'ansible.utils.collection_loader._collection_finder')
                with patch.object(_collection_finder,
                                  '_meta_yml_to_dict',
                                  side_effect=Exception('bang')):
                    with pytest.raises(Exception) as ex:
                        _AnsibleCollectionPkgLoader(
                            name, path_list=paths).load_module(name)
                    assert 'error parsing collection metadata' in str(ex.value)
コード例 #10
0
    def __init__(self, config=None):
        if config:
            paths = config.get_config_value('COLLECTIONS_PATHS')
        else:
            paths = os.environ.get('ANSIBLE_COLLECTIONS_PATHS',
                                   '').split(os.pathsep)

        if isinstance(paths, string_types):
            paths = [paths]
        elif paths is None:
            paths = []

        # expand any placeholders in configured paths
        paths = [
            to_native(os.path.expanduser(p), errors='surrogate_or_strict')
            for p in paths
        ]

        # Append all ``ansible_collections`` dirs from sys.path to the end
        for path in sys.path:
            if (path not in paths and os.path.isdir(
                    to_bytes(
                        os.path.join(path, 'ansible_collections'),
                        errors='surrogate_or_strict',
                    ))):
                paths.append(path)

        self._n_configured_paths = paths

        self._n_playbook_paths = []
        self._default_collection = None
        # pre-inject grafted package maps so we can force them to use the right loader instead of potentially delegating to a "normal" loader
        for syn_pkg_def in (p for p in iteritems(_SYNTHETIC_PACKAGES)
                            if p[1].get('graft')):
            pkg_name = syn_pkg_def[0]
            pkg_def = syn_pkg_def[1]

            newmod = ModuleType(pkg_name)
            newmod.__package__ = pkg_name
            newmod.__file__ = '<ansible_synthetic_collection_package>'
            pkg_type = pkg_def.get('type')

            # TODO: need to rethink map style so we can just delegate all the loading

            if pkg_type == 'flatmap':
                newmod.__loader__ = AnsibleFlatMapLoader(
                    import_module(pkg_def['flatmap']))
            newmod.__path__ = []

            sys.modules[pkg_name] = newmod
コード例 #11
0
    def __getitem__(self, key):
        if not isinstance(key, string_types):
            raise ValueError('key must be a string')

        key = to_native(key)

        if '.' not in key:  # might be a built-in value, delegate to base dict
            return self._delegatee.__getitem__(key)

        func = self._collection_jinja_func_cache.get(key)

        if func:
            return func

        acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)

        if not acr:
            raise KeyError('invalid plugin name: {0}'.format(key))

        try:
            pkg = import_module(acr.n_python_package_name)
        except ImportError:
            raise KeyError()

        parent_prefix = acr.collection

        if acr.subdirs:
            parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)

        for dummy, module_name, ispkg in pkgutil.iter_modules(
                pkg.__path__, prefix=parent_prefix + '.'):
            if ispkg:
                continue

            try:
                plugin_impl = self._pluginloader.get(module_name)
            except Exception as e:
                raise TemplateSyntaxError(to_native(e), 0)

            method_map = getattr(plugin_impl, self._method_map_name)

            for f in iteritems(method_map()):
                fq_name = '.'.join((parent_prefix, f[0]))
                # FIXME: detect/warn on intra-collection function name collisions
                self._collection_jinja_func_cache[fq_name] = f[1]

        function_impl = self._collection_jinja_func_cache[key]
        return function_impl
コード例 #12
0
def test_collection_get_data():
    finder = get_default_finder()
    reset_collections_loader_state(finder)

    # something that's there
    d = pkgutil.get_data('ansible_collections.testns.testcoll', 'plugins/action/my_action.py')
    assert b'hello from my_action.py' in d

    # something that's not there
    d = pkgutil.get_data('ansible_collections.testns.testcoll', 'bogus/bogus')
    assert d is None

    with pytest.raises(ValueError):
        plugins_pkg = import_module('ansible_collections.ansible.builtin')
        assert not os.path.exists(os.path.dirname(plugins_pkg.__file__))
        d = pkgutil.get_data('ansible_collections.ansible.builtin', 'plugins/connection/local.py')
コード例 #13
0
    def _find_module(self, fullname, path, load):
        # this loader is only concerned with items under the Ansible Collections namespace hierarchy, ignore others
        if not fullname.startswith(
                'ansible_collections.') and fullname != 'ansible_collections':
            return False, None

        if sys.modules.get(fullname):
            if not load:
                return True, None

            return True, sys.modules[fullname]

        newmod = None

        # this loader implements key functionality for Ansible collections
        # * implicit distributed namespace packages for the root Ansible namespace (no pkgutil.extend_path hackery reqd)
        # * implicit package support for Python 2.7 (no need for __init__.py in collections, except to use standard Py2.7 tooling)
        # * preventing controller-side code injection during collection loading
        # * (default loader would execute arbitrary package code from all __init__.py's)

        parent_pkg_name = '.'.join(fullname.split('.')[:-1])

        parent_pkg = sys.modules.get(parent_pkg_name)

        if parent_pkg_name and not parent_pkg:
            raise ImportError(
                'parent package {0} not found'.format(parent_pkg_name))

        # are we at or below the collection level? eg a.mynamespace.mycollection.something.else
        # if so, we don't want distributed namespace behavior; first mynamespace.mycollection on the path is where
        # we'll load everything from (ie, don't fall back to another mynamespace.mycollection lower on the path)
        sub_collection = fullname.count('.') > 1

        synpkg_def = _SYNTHETIC_PACKAGES.get(fullname)
        synpkg_remainder = ''

        if not synpkg_def:
            # if the parent is a grafted package, we have some special work to do, otherwise just look for stuff on disk
            parent_synpkg_def = _SYNTHETIC_PACKAGES.get(parent_pkg_name)
            if parent_synpkg_def and parent_synpkg_def.get('graft'):
                synpkg_def = parent_synpkg_def
                synpkg_remainder = '.' + fullname.rpartition('.')[2]

        # FUTURE: collapse as much of this back to on-demand as possible (maybe stub packages that get replaced when actually loaded?)
        if synpkg_def:
            pkg_type = synpkg_def.get('type')
            if not pkg_type:
                raise KeyError(
                    'invalid synthetic package type (no package "type" specified)'
                )
            if pkg_type == 'map':
                map_package = synpkg_def.get('map')

                if not map_package:
                    raise KeyError(
                        'invalid synthetic map package definition (no target "map" defined)'
                    )

                if not load:
                    return True, None

                mod = import_module(map_package + synpkg_remainder)

                sys.modules[fullname] = mod

                return True, mod
            elif pkg_type == 'flatmap':
                raise NotImplementedError()
            elif pkg_type == 'pkg_only':
                if not load:
                    return True, None

                newmod = ModuleType(fullname)
                newmod.__package__ = fullname
                newmod.__file__ = '<ansible_synthetic_collection_package>'
                newmod.__loader__ = self
                newmod.__path__ = []

                if not synpkg_def.get('allow_external_subpackages'):
                    # if external subpackages are NOT allowed, we're done
                    sys.modules[fullname] = newmod
                    return True, newmod

                # if external subpackages ARE allowed, check for on-disk implementations and return a normal
                # package if we find one, otherwise return the one we created here

        if not parent_pkg:  # top-level package, look for NS subpackages on all collection paths
            package_paths = [
                self._extend_path_with_ns(p, fullname)
                for p in self.n_collection_paths
            ]
        else:  # subpackage; search in all subpaths (we'll limit later inside a collection)
            package_paths = [
                self._extend_path_with_ns(p, fullname)
                for p in parent_pkg.__path__
            ]

        for candidate_child_path in package_paths:
            code_object = None
            is_package = True
            location = None
            # check for implicit sub-package first
            if os.path.isdir(to_bytes(candidate_child_path)):
                # Py3.x implicit namespace packages don't have a file location, so they don't support get_data
                # (which assumes the parent dir or that the loader has an internal mapping); so we have to provide
                # a bogus leaf file on the __file__ attribute for pkgutil.get_data to strip off
                location = os.path.join(candidate_child_path, '__synthetic__')
            else:
                for source_path in [
                        os.path.join(candidate_child_path, '__init__.py'),
                        candidate_child_path + '.py'
                ]:
                    if not os.path.isfile(to_bytes(source_path)):
                        continue

                    if not load:
                        return True, None

                    with open(to_bytes(source_path), 'rb') as fd:
                        source = fd.read()

                    code_object = compile(source=source,
                                          filename=source_path,
                                          mode='exec',
                                          flags=0,
                                          dont_inherit=True)
                    location = source_path
                    is_package = source_path.endswith('__init__.py')
                    break

                if not location:
                    continue

            newmod = ModuleType(fullname)
            newmod.__file__ = location
            newmod.__loader__ = self

            if is_package:
                if sub_collection:  # we never want to search multiple instances of the same collection; use first found
                    newmod.__path__ = [candidate_child_path]
                else:
                    newmod.__path__ = package_paths

                newmod.__package__ = fullname
            else:
                newmod.__package__ = parent_pkg_name

            sys.modules[fullname] = newmod

            if code_object:
                # FIXME: decide cases where we don't actually want to exec the code?
                exec(code_object, newmod.__dict__)

            return True, newmod

        # even if we didn't find one on disk, fall back to a synthetic package if we have one...
        if newmod:
            sys.modules[fullname] = newmod
            return True, newmod

        # FIXME: need to handle the "no dirs present" case for at least the root and synthetic internal collections like ansible.builtin

        return False, None
コード例 #14
0
ファイル: __init__.py プロジェクト: littlekign/ansible
    def __getitem__(self, key):
        original_key = key
        self._load_ansible_plugins()

        try:
            if not isinstance(key, string_types):
                raise ValueError('key must be a string')

            key = to_native(key)

            if '.' not in key:  # might be a built-in or legacy, check the delegatee dict first, then try for a last-chance base redirect
                func = self._delegatee.get(key)

                if func:
                    return func

            key, leaf_key = get_fqcr_and_name(key)
            seen = set()

            while True:
                if key in seen:
                    raise TemplateSyntaxError(
                        'recursive collection redirect found for %r' %
                        original_key, 0)
                seen.add(key)

                acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)

                if not acr:
                    raise KeyError('invalid plugin name: {0}'.format(key))

                ts = _get_collection_metadata(acr.collection)

                # TODO: implement cycle detection (unified across collection redir as well)

                routing_entry = ts.get('plugin_routing',
                                       {}).get(self._dirname,
                                               {}).get(leaf_key, {})

                deprecation_entry = routing_entry.get('deprecation')
                if deprecation_entry:
                    warning_text = deprecation_entry.get('warning_text')
                    removal_date = deprecation_entry.get('removal_date')
                    removal_version = deprecation_entry.get('removal_version')

                    if not warning_text:
                        warning_text = '{0} "{1}" is deprecated'.format(
                            self._dirname, key)

                    display.deprecated(warning_text,
                                       version=removal_version,
                                       date=removal_date,
                                       collection_name=acr.collection)

                tombstone_entry = routing_entry.get('tombstone')

                if tombstone_entry:
                    warning_text = tombstone_entry.get('warning_text')
                    removal_date = tombstone_entry.get('removal_date')
                    removal_version = tombstone_entry.get('removal_version')

                    if not warning_text:
                        warning_text = '{0} "{1}" has been removed'.format(
                            self._dirname, key)

                    exc_msg = display.get_deprecation_message(
                        warning_text,
                        version=removal_version,
                        date=removal_date,
                        collection_name=acr.collection,
                        removed=True)

                    raise AnsiblePluginRemovedError(exc_msg)

                redirect = routing_entry.get('redirect', None)
                if redirect:
                    next_key, leaf_key = get_fqcr_and_name(
                        redirect, collection=acr.collection)
                    display.vvv(
                        'redirecting (type: {0}) {1}.{2} to {3}'.format(
                            self._dirname, acr.collection, acr.resource,
                            next_key))
                    key = next_key
                else:
                    break

            func = self._collection_jinja_func_cache.get(key)

            if func:
                return func

            try:
                pkg = import_module(acr.n_python_package_name)
            except ImportError:
                raise KeyError()

            parent_prefix = acr.collection

            if acr.subdirs:
                parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)

            # TODO: implement collection-level redirect

            for dummy, module_name, ispkg in pkgutil.iter_modules(
                    pkg.__path__, prefix=parent_prefix + '.'):
                if ispkg:
                    continue

                try:
                    plugin_impl = self._pluginloader.get(module_name)
                except Exception as e:
                    raise TemplateSyntaxError(to_native(e), 0)

                try:
                    method_map = getattr(plugin_impl, self._method_map_name)
                    func_items = method_map().items()
                except Exception as e:
                    display.warning(
                        "Skipping %s plugin %s as it seems to be invalid: %r" %
                        (self._dirname, to_text(
                            plugin_impl._original_path), e), )
                    continue

                for func_name, func in func_items:
                    fq_name = '.'.join((parent_prefix, func_name))
                    # FIXME: detect/warn on intra-collection function name collisions
                    if self._pluginloader.class_name == 'FilterModule':
                        if fq_name.startswith(('ansible.builtin.', 'ansible.legacy.')) and \
                                func_name in C.STRING_TYPE_FILTERS:
                            self._collection_jinja_func_cache[
                                fq_name] = _wrap_native_text(func)
                        else:
                            self._collection_jinja_func_cache[
                                fq_name] = _unroll_iterator(func)
                    else:
                        self._collection_jinja_func_cache[fq_name] = func

            function_impl = self._collection_jinja_func_cache[key]
            return function_impl
        except AnsiblePluginRemovedError as apre:
            raise TemplateSyntaxError(to_native(apre), 0)
        except KeyError:
            raise
        except Exception as ex:
            display.warning(
                'an unexpected error occurred during Jinja2 environment setup: {0}'
                .format(to_native(ex)))
            display.vvv(
                'exception during Jinja2 environment setup: {0}'.format(
                    format_exc()))
            raise TemplateSyntaxError(to_native(ex), 0)
コード例 #15
0
    def __getitem__(self, key):
        try:
            if not isinstance(key, string_types):
                raise ValueError('key must be a string')

            key = to_native(key)

            if '.' not in key:  # might be a built-in or legacy, check the delegatee dict first, then try for a last-chance base redirect
                func = self._delegatee.get(key)

                if func:
                    return func

                ts = _get_collection_metadata('ansible.builtin')

                # TODO: implement support for collection-backed redirect (currently only builtin)
                # TODO: implement cycle detection (unified across collection redir as well)
                redirect_fqcr = ts.get('plugin_routing', {}).get(self._dirname, {}).get(key, {}).get('redirect', None)
                if redirect_fqcr:
                    acr = AnsibleCollectionRef.from_fqcr(ref=redirect_fqcr, ref_type=self._dirname)
                    display.vvv('redirecting {0} {1} to {2}.{3}'.format(self._dirname, key, acr.collection, acr.resource))
                    key = redirect_fqcr
                # TODO: handle recursive forwarding (not necessary for builtin, but definitely for further collection redirs)

            func = self._collection_jinja_func_cache.get(key)

            if func:
                return func

            acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)

            if not acr:
                raise KeyError('invalid plugin name: {0}'.format(key))

            try:
                pkg = import_module(acr.n_python_package_name)
            except ImportError:
                raise KeyError()

            parent_prefix = acr.collection

            if acr.subdirs:
                parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)

            # TODO: implement collection-level redirect

            for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
                if ispkg:
                    continue

                try:
                    plugin_impl = self._pluginloader.get(module_name)
                except Exception as e:
                    raise TemplateSyntaxError(to_native(e), 0)

                method_map = getattr(plugin_impl, self._method_map_name)

                for f in iteritems(method_map()):
                    fq_name = '.'.join((parent_prefix, f[0]))
                    # FIXME: detect/warn on intra-collection function name collisions
                    self._collection_jinja_func_cache[fq_name] = f[1]

            function_impl = self._collection_jinja_func_cache[key]
            return function_impl
        except KeyError:
            raise
        except Exception as ex:
            display.warning('an unexpected error occurred during Jinja2 environment setup: {0}'.format(to_native(ex)))
            display.vvv('exception during Jinja2 environment setup: {0}'.format(format_exc()))
            raise
コード例 #16
0
ファイル: __init__.py プロジェクト: alexey74/ansible
    def __getitem__(self, key):
        try:
            if not isinstance(key, string_types):
                raise ValueError('key must be a string')

            key = to_native(key)

            if '.' not in key:  # might be a built-in or legacy, check the delegatee dict first, then try for a last-chance base redirect
                func = self._delegatee.get(key)

                if func:
                    return func

                # didn't find it in the pre-built Jinja env, assume it's a former builtin and follow the normal routing path
                leaf_key = key
                key = 'ansible.builtin.' + key
            else:
                leaf_key = key.split('.')[-1]

            acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)

            if not acr:
                raise KeyError('invalid plugin name: {0}'.format(key))

            ts = _get_collection_metadata(acr.collection)

            # TODO: implement support for collection-backed redirect (currently only builtin)
            # TODO: implement cycle detection (unified across collection redir as well)

            routing_entry = ts.get('plugin_routing', {}).get(self._dirname, {}).get(leaf_key, {})

            deprecation_entry = routing_entry.get('deprecation')
            if deprecation_entry:
                warning_text = deprecation_entry.get('warning_text')
                removal_date = deprecation_entry.get('removal_date')
                removal_version = deprecation_entry.get('removal_version')

                if not warning_text:
                    warning_text = '{0} "{1}" is deprecated'.format(self._dirname, key)

                display.deprecated(warning_text, version=removal_version, date=removal_date, collection_name=acr.collection)

            tombstone_entry = routing_entry.get('tombstone')

            if tombstone_entry:
                warning_text = tombstone_entry.get('warning_text')
                removal_date = tombstone_entry.get('removal_date')
                removal_version = tombstone_entry.get('removal_version')

                if not warning_text:
                    warning_text = '{0} "{1}" has been removed'.format(self._dirname, key)

                exc_msg = display.get_deprecation_message(warning_text, version=removal_version, date=removal_date,
                                                          collection_name=acr.collection, removed=True)

                raise AnsiblePluginRemovedError(exc_msg)

            redirect_fqcr = routing_entry.get('redirect', None)
            if redirect_fqcr:
                acr = AnsibleCollectionRef.from_fqcr(ref=redirect_fqcr, ref_type=self._dirname)
                display.vvv('redirecting {0} {1} to {2}.{3}'.format(self._dirname, key, acr.collection, acr.resource))
                key = redirect_fqcr
            # TODO: handle recursive forwarding (not necessary for builtin, but definitely for further collection redirs)

            func = self._collection_jinja_func_cache.get(key)

            if func:
                return func

            try:
                pkg = import_module(acr.n_python_package_name)
            except ImportError:
                raise KeyError()

            parent_prefix = acr.collection

            if acr.subdirs:
                parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)

            # TODO: implement collection-level redirect

            for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
                if ispkg:
                    continue

                try:
                    plugin_impl = self._pluginloader.get(module_name)
                except Exception as e:
                    raise TemplateSyntaxError(to_native(e), 0)

                method_map = getattr(plugin_impl, self._method_map_name)

                for func_name, func in iteritems(method_map()):
                    fq_name = '.'.join((parent_prefix, func_name))
                    # FIXME: detect/warn on intra-collection function name collisions
                    if USE_JINJA2_NATIVE and func_name in C.STRING_TYPE_FILTERS:
                        self._collection_jinja_func_cache[fq_name] = _wrap_native_text(func)
                    else:
                        self._collection_jinja_func_cache[fq_name] = _unroll_iterator(func)

            function_impl = self._collection_jinja_func_cache[key]
            return function_impl
        except AnsiblePluginRemovedError as apre:
            raise TemplateSyntaxError(to_native(apre), 0)
        except KeyError:
            raise
        except Exception as ex:
            display.warning('an unexpected error occurred during Jinja2 environment setup: {0}'.format(to_native(ex)))
            display.vvv('exception during Jinja2 environment setup: {0}'.format(format_exc()))
            raise TemplateSyntaxError(to_native(ex), 0)
コード例 #17
0
    def _add_module(self, name, ext, fqn, optional, wrapper=False):
        m = to_text(name)

        util_fqn = None

        if m.startswith("Ansible."):
            # Builtin util, use plugin loader to get the data
            mu_path = ps_module_utils_loader.find_plugin(m, ext)

            if not mu_path:
                if optional:
                    return

                raise AnsibleError(
                    'Could not find imported module support code '
                    'for \'%s\'' % m)

            module_util_data = to_bytes(_slurp(mu_path))
        else:
            # Collection util, load the package data based on the util import.

            submodules = m.split(".")
            if m.startswith('.'):
                fqn_submodules = fqn.split('.')
                for submodule in submodules:
                    if submodule:
                        break
                    del fqn_submodules[-1]

                submodules = fqn_submodules + [s for s in submodules if s]

            n_package_name = to_native('.'.join(submodules[:-1]),
                                       errors='surrogate_or_strict')
            n_resource_name = to_native(submodules[-1] + ext,
                                        errors='surrogate_or_strict')

            try:
                module_util = import_module(n_package_name)
                pkg_data = pkgutil.get_data(n_package_name, n_resource_name)
                if pkg_data is None:
                    raise ImportError("No package data found")

                module_util_data = to_bytes(pkg_data,
                                            errors='surrogate_or_strict')
                util_fqn = to_text("%s.%s " % (n_package_name, submodules[-1]),
                                   errors='surrogate_or_strict')

                # Get the path of the util which is required for coverage collection.
                resource_paths = list(module_util.__path__)
                if len(resource_paths) != 1:
                    # This should never happen with a collection but we are just being defensive about it.
                    raise AnsibleError(
                        "Internal error: Referenced module_util package '%s' contains 0 or multiple "
                        "import locations when we only expect 1." %
                        n_package_name)
                mu_path = os.path.join(resource_paths[0], n_resource_name)
            except (ImportError, OSError) as err:
                if getattr(err, "errno", errno.ENOENT) == errno.ENOENT:
                    if optional:
                        return

                    raise AnsibleError(
                        'Could not find collection imported module support code for \'%s\''
                        % to_native(m))

                else:
                    raise

        util_info = {
            'data': module_util_data,
            'path': to_text(mu_path),
        }
        if ext == ".psm1":
            self.ps_modules[m] = util_info
        else:
            if wrapper:
                self.cs_utils_wrapper[m] = util_info
            else:
                self.cs_utils_module[m] = util_info
        self.scan_module(module_util_data,
                         fqn=util_fqn,
                         wrapper=wrapper,
                         powershell=(ext == ".psm1"))