Exemple #1
0
    def get_man_text(doc):

        DocCLI.IGNORE = DocCLI.IGNORE + (context.CLIARGS['type'], )
        opt_indent = "        "
        text = []
        pad = display.columns * 0.20
        limit = max(display.columns - int(pad), 70)

        text.append(
            "> %s    (%s)\n" %
            (doc.get(context.CLIARGS['type'],
                     doc.get('plugin_type')).upper(), doc.pop('filename')))

        if isinstance(doc['description'], list):
            desc = " ".join(doc.pop('description'))
        else:
            desc = doc.pop('description')

        text.append("%s\n" % textwrap.fill(DocCLI.tty_ify(desc),
                                           limit,
                                           initial_indent=opt_indent,
                                           subsequent_indent=opt_indent))

        if 'deprecated' in doc and doc['deprecated'] is not None and len(
                doc['deprecated']) > 0:
            text.append("DEPRECATED: \n")
            if isinstance(doc['deprecated'], dict):
                if 'version' in doc['deprecated'] and 'removed_in' not in doc[
                        'deprecated']:
                    doc['deprecated']['removed_in'] = doc['deprecated'][
                        'version']
                text.append(
                    "\tReason: %(why)s\n\tWill be removed in: Ansible %(removed_in)s\n\tAlternatives: %(alternative)s"
                    % doc.pop('deprecated'))
            else:
                text.append("%s" % doc.pop('deprecated'))
            text.append("\n")

        try:
            support_block = DocCLI.get_support_block(doc)
            if support_block:
                text.extend(support_block)
        except Exception:
            pass  # FIXME: not suported by plugins

        if doc.pop('action', False):
            text.append("  * note: %s\n" %
                        "This module has a corresponding action plugin.")

        if 'options' in doc and doc['options']:
            text.append("OPTIONS (= is mandatory):\n")
            DocCLI.add_fields(text, doc.pop('options'), limit, opt_indent)
            text.append('')

        if 'notes' in doc and doc['notes'] and len(doc['notes']) > 0:
            text.append("NOTES:")
            for note in doc['notes']:
                text.append(
                    textwrap.fill(DocCLI.tty_ify(note),
                                  limit - 6,
                                  initial_indent=opt_indent[:-2] + "* ",
                                  subsequent_indent=opt_indent))
            text.append('')
            text.append('')
            del doc['notes']

        if 'seealso' in doc and doc['seealso']:
            text.append("SEE ALSO:")
            for item in doc['seealso']:
                if 'module' in item:
                    text.append(
                        textwrap.fill(DocCLI.tty_ify('Module %s' %
                                                     item['module']),
                                      limit - 6,
                                      initial_indent=opt_indent[:-2] + "* ",
                                      subsequent_indent=opt_indent))
                    description = item.get(
                        'description',
                        'The official documentation on the %s module.' %
                        item['module'])
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(description),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(
                            get_versioned_doclink('modules/%s_module.html' %
                                                  item['module'])),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent))
                elif 'name' in item and 'link' in item and 'description' in item:
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(item['name']),
                                      limit - 6,
                                      initial_indent=opt_indent[:-2] + "* ",
                                      subsequent_indent=opt_indent))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(item['description']),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(item['link']),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))
                elif 'ref' in item and 'description' in item:
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(
                            'Ansible documentation [%s]' % item['ref']),
                                      limit - 6,
                                      initial_indent=opt_indent[:-2] + "* ",
                                      subsequent_indent=opt_indent))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(item['description']),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(
                            get_versioned_doclink('/#stq=%s&stp=1' %
                                                  item['ref'])),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))

            text.append('')
            text.append('')
            del doc['seealso']

        if 'requirements' in doc and doc['requirements'] is not None and len(
                doc['requirements']) > 0:
            req = ", ".join(doc.pop('requirements'))
            text.append("REQUIREMENTS:%s\n" %
                        textwrap.fill(DocCLI.tty_ify(req),
                                      limit - 16,
                                      initial_indent="  ",
                                      subsequent_indent=opt_indent))

        # Generic handler
        for k in sorted(doc):
            if k in DocCLI.IGNORE or not doc[k]:
                continue
            if isinstance(doc[k], string_types):
                text.append('%s: %s' %
                            (k.upper(),
                             textwrap.fill(DocCLI.tty_ify(doc[k]),
                                           limit - (len(k) + 2),
                                           subsequent_indent=opt_indent)))
            elif isinstance(doc[k], (list, tuple)):
                text.append('%s: %s' % (k.upper(), ', '.join(doc[k])))
            else:
                text.append(DocCLI._dump_yaml({k.upper(): doc[k]}, opt_indent))
            del doc[k]
        text.append('')

        if 'plainexamples' in doc and doc['plainexamples'] is not None:
            text.append("EXAMPLES:")
            text.append('')
            if isinstance(doc['plainexamples'], string_types):
                text.append(doc.pop('plainexamples').strip())
            else:
                text.append(
                    yaml.dump(doc.pop('plainexamples'),
                              indent=2,
                              default_flow_style=False))
            text.append('')
            text.append('')

        if 'returndocs' in doc and doc['returndocs'] is not None:
            text.append("RETURN VALUES:")
            if isinstance(doc['returndocs'], string_types):
                text.append(doc.pop('returndocs'))
            else:
                text.append(
                    yaml.dump(doc.pop('returndocs'),
                              indent=2,
                              default_flow_style=False))
        text.append('')

        try:
            metadata_block = DocCLI.get_metadata_block(doc)
            if metadata_block:
                text.extend(metadata_block)
                text.append('')
        except Exception:
            pass  # metadata is optional

        return "\n".join(text)
def discover_interpreter(action, interpreter_name, discovery_mode, task_vars):
    # interpreter discovery is a 2-step process with the target. First, we use a simple shell-agnostic bootstrap to
    # get the system type from uname, and find any random Python that can get us the info we need. For supported
    # target OS types, we'll dispatch a Python script that calls plaform.dist() (for older platforms, where available)
    # and brings back /etc/os-release (if present). The proper Python path is looked up in a table of known
    # distros/versions with included Pythons; if nothing is found, depending on the discovery mode, either the
    # default fallback of /usr/bin/python is used (if we know it's there), or discovery fails.

    # FUTURE: add logical equivalence for "python3" in the case of py3-only modules?
    if interpreter_name != 'python':
        raise ValueError('Interpreter discovery not supported for {0}'.format(
            interpreter_name))

    host = task_vars.get('inventory_hostname', 'unknown')
    res = None
    platform_type = 'unknown'
    found_interpreters = [u'/usr/bin/python']  # fallback value
    is_auto_legacy = discovery_mode.startswith('auto_legacy')
    is_silent = discovery_mode.endswith('_silent')

    try:
        platform_python_map = C.config.get_config_value(
            'INTERPRETER_PYTHON_DISTRO_MAP', variables=task_vars)
        bootstrap_python_list = C.config.get_config_value(
            'INTERPRETER_PYTHON_FALLBACK', variables=task_vars)

        display.vvv(msg=u"Attempting {0} interpreter discovery".format(
            interpreter_name),
                    host=host)

        if getattr(action._connection._shell, "_IS_OPENVMS", False):
            shell_bootstrap = 'WRITE SYS$OUTPUT "PLATFORM" ; SHOW SYSTEM/NOPROCESS ; WRITE SYS$OUTPUT "FOUND" ; SHOW SYMBOL python* ; WRITE SYS$OUTPUT "ENDFOUND"'
        else:
            # not all command -v impls accept a list of commands, so we have to call it once per python
            command_list = [
                "command -v '%s'" % py for py in bootstrap_python_list
            ]
            shell_bootstrap = "echo PLATFORM; uname; echo FOUND; {0}; echo ENDFOUND".format(
                '; '.join(command_list))

        # FUTURE: in most cases we probably don't want to use become, but maybe sometimes we do?
        res = action._low_level_execute_command(shell_bootstrap,
                                                sudoable=False)

        raw_stdout = res.get('stdout', u'').strip()

        match = foundre.match(raw_stdout)

        if not match:
            display.debug(
                u'raw interpreter discovery output: {0}'.format(raw_stdout),
                host=host)
            raise ValueError(
                'unexpected output from Python interpreter discovery')

        platform_type = match.groups()[0].lower().strip()

        if getattr(action._connection._shell, "_IS_OPENVMS", False):
            found_interpreters = [
                interp.strip().split(" ")[0]
                for interp in match.groups()[1].splitlines()
                if interp.strip().startswith('PYTHON')
            ]
        else:
            found_interpreters = [
                interp.strip() for interp in match.groups()[1].splitlines()
                if interp.startswith('/')
            ]

        display.debug(u"found interpreters: {0}".format(found_interpreters),
                      host=host)

        if not found_interpreters:
            action._discovery_warnings.append(
                u'No python interpreters found for host {0} (tried {1})'.
                format(host, bootstrap_python_list))
            # this is lame, but returning None or throwing an exception is uglier
            return u'/usr/bin/python'

        if platform_type != 'linux':
            raise NotImplementedError(
                'unsupported platform for extended discovery: {0}'.format(
                    to_native(platform_type)))

        platform_script = pkgutil.get_data('ansible.executor.discovery',
                                           'python_target.py')

        # FUTURE: respect pipelining setting instead of just if the connection supports it?
        if action._connection.has_pipelining:
            res = action._low_level_execute_command(found_interpreters[0],
                                                    sudoable=False,
                                                    in_data=platform_script)
        else:
            # FUTURE: implement on-disk case (via script action or ?)
            raise NotImplementedError(
                'pipelining support required for extended interpreter discovery'
            )

        platform_info = json.loads(res.get('stdout'))

        distro, version = _get_linux_distro(platform_info)

        if not distro or not version:
            raise NotImplementedError(
                'unable to get Linux distribution/version info')

        version_map = platform_python_map.get(distro.lower().strip())
        if not version_map:
            raise NotImplementedError(
                'unsupported Linux distribution: {0}'.format(distro))

        platform_interpreter = to_text(_version_fuzzy_match(
            version, version_map),
                                       errors='surrogate_or_strict')

        # provide a transition period for hosts that were using /usr/bin/python previously (but shouldn't have been)
        if is_auto_legacy:
            if platform_interpreter != u'/usr/bin/python' and u'/usr/bin/python' in found_interpreters:
                # FIXME: support comments in sivel's deprecation scanner so we can get reminded on this
                if not is_silent:
                    action._discovery_deprecation_warnings.append(
                        dict(
                            msg=
                            u"Distribution {0} {1} on host {2} should use {3}, but is using "
                            u"/usr/bin/python for backward compatibility with prior Ansible releases. "
                            u"A future Ansible release will default to using the discovered platform "
                            u"python for this host. See {4} for more information"
                            .format(
                                distro, version, host, platform_interpreter,
                                get_versioned_doclink(
                                    'reference_appendices/interpreter_discovery.html'
                                )),
                            version='2.12'))
                return u'/usr/bin/python'

        if platform_interpreter not in found_interpreters:
            if platform_interpreter not in bootstrap_python_list:
                # sanity check to make sure we looked for it
                if not is_silent:
                    action._discovery_warnings \
                        .append(u"Platform interpreter {0} on host {1} is missing from bootstrap list"
                                .format(platform_interpreter, host))

            if not is_silent:
                action._discovery_warnings \
                    .append(u"Distribution {0} {1} on host {2} should use {3}, but is using {4}, since the "
                            u"discovered platform python interpreter was not present. See {5} "
                            u"for more information."
                            .format(distro, version, host, platform_interpreter, found_interpreters[0],
                                    get_versioned_doclink('reference_appendices/interpreter_discovery.html')))
            return found_interpreters[0]

        return platform_interpreter
    except NotImplementedError as ex:
        display.vvv(msg=u'Python interpreter discovery fallback ({0})'.format(
            to_text(ex)),
                    host=host)
    except Exception as ex:
        if not is_silent:
            display.warning(
                msg=
                u'Unhandled error in Python interpreter discovery for host {0}: {1}'
                .format(host, to_text(ex)))
            display.debug(msg=u'Interpreter discovery traceback:\n{0}'.format(
                to_text(format_exc())),
                          host=host)
            if res and res.get('stderr'):
                display.vvv(
                    msg=u'Interpreter discovery remote stderr:\n{0}'.format(
                        to_text(res.get('stderr'))),
                    host=host)

    if not is_silent:
        action._discovery_warnings \
            .append(u"Platform {0} on host {1} is using the discovered Python interpreter at {2}, but future installation of "
                    u"another Python interpreter could change the meaning of that path. See {3} "
                    u"for more information."
                    .format(platform_type, host, found_interpreters[0],
                            get_versioned_doclink('reference_appendices/interpreter_discovery.html')))
    return found_interpreters[0]
    def get_man_text(doc, collection_name=''):
        # Create a copy so we don't modify the original
        doc = dict(doc)

        DocCLI.IGNORE = DocCLI.IGNORE + (context.CLIARGS['type'], )
        opt_indent = "        "
        text = []
        pad = display.columns * 0.20
        limit = max(display.columns - int(pad), 70)

        plugin_name = doc.get(context.CLIARGS['type'], doc.get('plugin_type'))
        if collection_name:
            plugin_name = '%s.%s' % (collection_name, plugin_name)

        text.append("> %s    (%s)\n" %
                    (plugin_name.upper(), doc.pop('filename')))

        if isinstance(doc['description'], list):
            desc = " ".join(doc.pop('description'))
        else:
            desc = doc.pop('description')

        text.append("%s\n" % textwrap.fill(DocCLI.tty_ify(desc),
                                           limit,
                                           initial_indent=opt_indent,
                                           subsequent_indent=opt_indent))

        if doc.get('deprecated', False):
            text.append("DEPRECATED: \n")
            if isinstance(doc['deprecated'], dict):
                if 'removed_at_date' in doc['deprecated']:
                    text.append(
                        "\tReason: %(why)s\n\tWill be removed in a release after %(removed_at_date)s\n\tAlternatives: %(alternative)s"
                        % doc.pop('deprecated'))
                else:
                    if 'version' in doc[
                            'deprecated'] and 'removed_in' not in doc[
                                'deprecated']:
                        doc['deprecated']['removed_in'] = doc['deprecated'][
                            'version']
                    text.append(
                        "\tReason: %(why)s\n\tWill be removed in: Ansible %(removed_in)s\n\tAlternatives: %(alternative)s"
                        % doc.pop('deprecated'))
            else:
                text.append("%s" % doc.pop('deprecated'))
            text.append("\n")

        if doc.pop('action', False):
            text.append("  * note: %s\n" %
                        "This module has a corresponding action plugin.")

        if doc.get('options', False):
            text.append("OPTIONS (= is mandatory):\n")
            DocCLI.add_fields(text, doc.pop('options'), limit, opt_indent)
            text.append('')

        if doc.get('notes', False):
            text.append("NOTES:")
            for note in doc['notes']:
                text.append(
                    textwrap.fill(DocCLI.tty_ify(note),
                                  limit - 6,
                                  initial_indent=opt_indent[:-2] + "* ",
                                  subsequent_indent=opt_indent))
            text.append('')
            text.append('')
            del doc['notes']

        if doc.get('seealso', False):
            text.append("SEE ALSO:")
            for item in doc['seealso']:
                if 'module' in item:
                    text.append(
                        textwrap.fill(DocCLI.tty_ify('Module %s' %
                                                     item['module']),
                                      limit - 6,
                                      initial_indent=opt_indent[:-2] + "* ",
                                      subsequent_indent=opt_indent))
                    description = item.get(
                        'description',
                        'The official documentation on the %s module.' %
                        item['module'])
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(description),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(
                            get_versioned_doclink('modules/%s_module.html' %
                                                  item['module'])),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent))
                elif 'name' in item and 'link' in item and 'description' in item:
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(item['name']),
                                      limit - 6,
                                      initial_indent=opt_indent[:-2] + "* ",
                                      subsequent_indent=opt_indent))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(item['description']),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(item['link']),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))
                elif 'ref' in item and 'description' in item:
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(
                            'Ansible documentation [%s]' % item['ref']),
                                      limit - 6,
                                      initial_indent=opt_indent[:-2] + "* ",
                                      subsequent_indent=opt_indent))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(item['description']),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))
                    text.append(
                        textwrap.fill(DocCLI.tty_ify(
                            get_versioned_doclink('/#stq=%s&stp=1' %
                                                  item['ref'])),
                                      limit - 6,
                                      initial_indent=opt_indent + '   ',
                                      subsequent_indent=opt_indent + '   '))

            text.append('')
            text.append('')
            del doc['seealso']

        if doc.get('requirements', False):
            req = ", ".join(doc.pop('requirements'))
            text.append("REQUIREMENTS:%s\n" %
                        textwrap.fill(DocCLI.tty_ify(req),
                                      limit - 16,
                                      initial_indent="  ",
                                      subsequent_indent=opt_indent))

        # Generic handler
        for k in sorted(doc):
            if k in DocCLI.IGNORE or not doc[k]:
                continue
            if isinstance(doc[k], string_types):
                text.append('%s: %s' %
                            (k.upper(),
                             textwrap.fill(DocCLI.tty_ify(doc[k]),
                                           limit - (len(k) + 2),
                                           subsequent_indent=opt_indent)))
            elif isinstance(doc[k], (list, tuple)):
                text.append('%s: %s' % (k.upper(), ', '.join(doc[k])))
            else:
                # use empty indent since this affects the start of the yaml doc, not it's keys
                text.append(DocCLI._dump_yaml({k.upper(): doc[k]}, ''))
            del doc[k]
            text.append('')

        if doc.get('plainexamples', False):
            text.append("EXAMPLES:")
            text.append('')
            if isinstance(doc['plainexamples'], string_types):
                text.append(doc.pop('plainexamples').strip())
            else:
                text.append(
                    yaml.dump(doc.pop('plainexamples'),
                              indent=2,
                              default_flow_style=False))
            text.append('')
            text.append('')

        if doc.get('returndocs', False):
            text.append("RETURN VALUES:")
            DocCLI.add_fields(text,
                              doc.pop('returndocs'),
                              limit,
                              opt_indent,
                              return_values=True)

        return "\n".join(text)
Exemple #4
0
    def execute_init(self):
        """
        Creates the skeleton framework of a role or collection that complies with the Galaxy metadata format.
        """

        galaxy_type = context.CLIARGS['type']
        init_path = context.CLIARGS['init_path']
        force = context.CLIARGS['force']
        obj_skeleton = context.CLIARGS['{0}_skeleton'.format(galaxy_type)]

        obj_name = context.CLIARGS['{0}_name'.format(galaxy_type)]

        inject_data = dict(
            author='your name',
            description='your description',
            company='your company (optional)',
            license='license (GPL-2.0-or-later, MIT, etc)',
            issue_tracker_url='http://example.com/issue/tracker',
            repository_url='http://example.com/repository',
            documentation_url='http://docs.example.com',
            homepage_url='http://example.com',
            min_ansible_version=ansible_version[:3],  # x.y
            ansible_plugin_list_dir=get_versioned_doclink('plugins/plugins.html'),
        )

        if galaxy_type == 'role':
            inject_data['role_name'] = obj_name
            inject_data['role_type'] = context.CLIARGS['role_type']
            inject_data['license'] = 'license (GPL-2.0-or-later, MIT, etc)'
            obj_path = os.path.join(init_path, obj_name)
        elif galaxy_type == 'collection':
            namespace, collection_name = obj_name.split('.', 1)

            inject_data['namespace'] = namespace
            inject_data['collection_name'] = collection_name
            inject_data['license'] = 'GPL-2.0-or-later'
            obj_path = os.path.join(init_path, namespace, collection_name)
        b_obj_path = to_bytes(obj_path, errors='surrogate_or_strict')

        if os.path.exists(b_obj_path):
            if os.path.isfile(obj_path):
                raise AnsibleError("- the path %s already exists, but is a file - aborting" % to_native(obj_path))
            elif not force:
                raise AnsibleError("- the directory %s already exists. "
                                   "You can use --force to re-initialize this directory,\n"
                                   "however it will reset any main.yml files that may have\n"
                                   "been modified there already." % to_native(obj_path))

        if obj_skeleton is not None:
            skeleton_ignore_expressions = C.GALAXY_ROLE_SKELETON_IGNORE
        else:
            obj_skeleton = self.galaxy.default_role_skeleton_path
            skeleton_ignore_expressions = ['^.*/.git_keep$']

        obj_skeleton = os.path.expanduser(obj_skeleton)
        skeleton_ignore_re = [re.compile(x) for x in skeleton_ignore_expressions]

        if not os.path.exists(obj_skeleton):
            raise AnsibleError("- the skeleton path '{0}' does not exist, cannot init {1}".format(
                to_native(obj_skeleton), galaxy_type)
            )

        template_env = Environment(loader=FileSystemLoader(obj_skeleton))

        # create role directory
        if not os.path.exists(b_obj_path):
            os.makedirs(b_obj_path)

        for root, dirs, files in os.walk(obj_skeleton, topdown=True):
            rel_root = os.path.relpath(root, obj_skeleton)
            rel_dirs = rel_root.split(os.sep)
            rel_root_dir = rel_dirs[0]
            if galaxy_type == 'collection':
                # A collection can contain templates in playbooks/*/templates and roles/*/templates
                in_templates_dir = rel_root_dir in ['playbooks', 'roles'] and 'templates' in rel_dirs
            else:
                in_templates_dir = rel_root_dir == 'templates'

            dirs[:] = [d for d in dirs if not any(r.match(d) for r in skeleton_ignore_re)]

            for f in files:
                filename, ext = os.path.splitext(f)
                if any(r.match(os.path.join(rel_root, f)) for r in skeleton_ignore_re):
                    continue
                elif ext == ".j2" and not in_templates_dir:
                    src_template = os.path.join(rel_root, f)
                    dest_file = os.path.join(obj_path, rel_root, filename)
                    template_env.get_template(src_template).stream(inject_data).dump(dest_file, encoding='utf-8')
                else:
                    f_rel_path = os.path.relpath(os.path.join(root, f), obj_skeleton)
                    shutil.copyfile(os.path.join(root, f), os.path.join(obj_path, f_rel_path))

            for d in dirs:
                b_dir_path = to_bytes(os.path.join(obj_path, rel_root, d), errors='surrogate_or_strict')
                if not os.path.exists(b_dir_path):
                    os.makedirs(b_dir_path)

        display.display("- %s was created successfully" % obj_name)
Exemple #5
0
def load_list_of_tasks(ds,
                       play,
                       block=None,
                       role=None,
                       task_include=None,
                       use_handlers=False,
                       variable_manager=None,
                       loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task
    from ansible.playbook.task_include import TaskInclude
    from ansible.playbook.role_include import IncludeRole
    from ansible.playbook.handler_task_include import HandlerTaskInclude
    from ansible.template import Templar
    from ansible.utils.plugin_docs import get_versioned_doclink

    if not isinstance(ds, list):
        raise AnsibleAssertionError(
            'The ds (%s) should be a list but was a %s' % (ds, type(ds)))

    task_list = []
    for task_ds in ds:
        if not isinstance(task_ds, dict):
            raise AnsibleAssertionError(
                'The ds (%s) should be a dict but was a %s' % (ds, type(ds)))

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            args_parser = ModuleArgsParser(task_ds)
            try:
                (action, args,
                 delegate_to) = args_parser.parse(skip_action_validation=True)
            except AnsibleParserError as e:
                # if the raises exception was created with obj=ds args, then it includes the detail
                # so we dont need to add it so we can just re raise.
                if e.obj:
                    raise
                # But if it wasn't, we can add the yaml object now to get more detail
                raise AnsibleParserError(to_native(e), obj=task_ds, orig_exc=e)

            if action in C._ACTION_ALL_INCLUDE_IMPORT_TASKS:

                if use_handlers:
                    include_class = HandlerTaskInclude
                else:
                    include_class = TaskInclude

                t = include_class.load(task_ds,
                                       block=block,
                                       role=role,
                                       task_include=None,
                                       variable_manager=variable_manager,
                                       loader=loader)

                all_vars = variable_manager.get_vars(play=play, task=t)
                templar = Templar(loader=loader, variables=all_vars)

                # check to see if this include is dynamic or static:
                # 1. the user has set the 'static' option to false or true
                # 2. one of the appropriate config options was set
                if action in C._ACTION_INCLUDE_TASKS:
                    is_static = False
                elif action in C._ACTION_IMPORT_TASKS:
                    is_static = True
                else:
                    include_link = get_versioned_doclink(
                        'user_guide/playbooks_reuse_includes.html')
                    display.deprecated(
                        '"include" is deprecated, use include_tasks/import_tasks instead. See %s for details'
                        % include_link, "2.16")
                    is_static = not templar.is_template(
                        t.args['_raw_params']) and t.all_parents_static(
                        ) and not t.loop

                if is_static:
                    if t.loop is not None:
                        if action in C._ACTION_IMPORT_TASKS:
                            raise AnsibleParserError(
                                "You cannot use loops on 'import_tasks' statements. You should use 'include_tasks' instead.",
                                obj=task_ds)
                        else:
                            raise AnsibleParserError(
                                "You cannot use 'static' on an include with a loop",
                                obj=task_ds)

                    # we set a flag to indicate this include was static
                    t.statically_loaded = True

                    # handle relative includes by walking up the list of parent include
                    # tasks and checking the relative result to see if it exists
                    parent_include = block
                    cumulative_path = None

                    found = False
                    subdir = 'tasks'
                    if use_handlers:
                        subdir = 'handlers'
                    while parent_include is not None:
                        if not isinstance(parent_include, TaskInclude):
                            parent_include = parent_include._parent
                            continue
                        try:
                            parent_include_dir = os.path.dirname(
                                templar.template(
                                    parent_include.args.get('_raw_params')))
                        except AnsibleUndefinedVariable as e:
                            if not parent_include.statically_loaded:
                                raise AnsibleParserError(
                                    "Error when evaluating variable in dynamic parent include path: %s. "
                                    "When using static imports, the parent dynamic include cannot utilize host facts "
                                    "or variables from inventory" %
                                    parent_include.args.get('_raw_params'),
                                    obj=task_ds,
                                    suppress_extended_error=True,
                                    orig_exc=e)
                            raise
                        if cumulative_path is None:
                            cumulative_path = parent_include_dir
                        elif not os.path.isabs(cumulative_path):
                            cumulative_path = os.path.join(
                                parent_include_dir, cumulative_path)
                        include_target = templar.template(
                            t.args['_raw_params'])
                        if t._role:
                            new_basedir = os.path.join(t._role._role_path,
                                                       subdir, cumulative_path)
                            include_file = loader.path_dwim_relative(
                                new_basedir, subdir, include_target)
                        else:
                            include_file = loader.path_dwim_relative(
                                loader.get_basedir(), cumulative_path,
                                include_target)

                        if os.path.exists(include_file):
                            found = True
                            break
                        else:
                            parent_include = parent_include._parent

                    if not found:
                        try:
                            include_target = templar.template(
                                t.args['_raw_params'])
                        except AnsibleUndefinedVariable as e:
                            raise AnsibleParserError(
                                "Error when evaluating variable in import path: %s.\n\n"
                                "When using static imports, ensure that any variables used in their names are defined in vars/vars_files\n"
                                "or extra-vars passed in from the command line. Static imports cannot use variables from facts or inventory\n"
                                "sources like group or host vars." %
                                t.args['_raw_params'],
                                obj=task_ds,
                                suppress_extended_error=True,
                                orig_exc=e)
                        if t._role:
                            include_file = loader.path_dwim_relative(
                                t._role._role_path, subdir, include_target)
                        else:
                            include_file = loader.path_dwim(include_target)

                    data = loader.load_from_file(include_file)
                    if not data:
                        display.warning(
                            'file %s is empty and had no tasks to include' %
                            include_file)
                        continue
                    elif not isinstance(data, list):
                        raise AnsibleParserError(
                            "included task files must contain a list of tasks",
                            obj=data)

                    # since we can't send callbacks here, we display a message directly in
                    # the same fashion used by the on_include callback. We also do it here,
                    # because the recursive nature of helper methods means we may be loading
                    # nested includes, and we want the include order printed correctly
                    display.vv("statically imported: %s" % include_file)

                    ti_copy = t.copy(exclude_parent=True)
                    ti_copy._parent = block
                    included_blocks = load_list_of_blocks(
                        data,
                        play=play,
                        parent_block=None,
                        task_include=ti_copy,
                        role=role,
                        use_handlers=use_handlers,
                        loader=loader,
                        variable_manager=variable_manager,
                    )

                    tags = ti_copy.tags[:]

                    # now we extend the tags on each of the included blocks
                    for b in included_blocks:
                        b.tags = list(set(b.tags).union(tags))
                    # END FIXME

                    # FIXME: handlers shouldn't need this special handling, but do
                    #        right now because they don't iterate blocks correctly
                    if use_handlers:
                        for b in included_blocks:
                            task_list.extend(b.block)
                    else:
                        task_list.extend(included_blocks)
                else:
                    t.is_static = False
                    task_list.append(t)

            elif action in C._ACTION_ALL_PROPER_INCLUDE_IMPORT_ROLES:
                ir = IncludeRole.load(
                    task_ds,
                    block=block,
                    role=role,
                    task_include=None,
                    variable_manager=variable_manager,
                    loader=loader,
                )

                #   1. the user has set the 'static' option to false or true
                #   2. one of the appropriate config options was set
                is_static = False
                if action in C._ACTION_IMPORT_ROLE:
                    is_static = True

                if is_static:
                    if ir.loop is not None:
                        if action in C._ACTION_IMPORT_ROLE:
                            raise AnsibleParserError(
                                "You cannot use loops on 'import_role' statements. You should use 'include_role' instead.",
                                obj=task_ds)
                        else:
                            raise AnsibleParserError(
                                "You cannot use 'static' on an include_role with a loop",
                                obj=task_ds)

                    # we set a flag to indicate this include was static
                    ir.statically_loaded = True

                    # template the role name now, if needed
                    all_vars = variable_manager.get_vars(play=play, task=ir)
                    templar = Templar(loader=loader, variables=all_vars)
                    ir._role_name = templar.template(ir._role_name)

                    # uses compiled list from object
                    blocks, _ = ir.get_block_list(
                        variable_manager=variable_manager, loader=loader)
                    task_list.extend(blocks)
                else:
                    # passes task object itself for latter generation of list
                    task_list.append(ir)
            else:
                if use_handlers:
                    t = Handler.load(task_ds,
                                     block=block,
                                     role=role,
                                     task_include=task_include,
                                     variable_manager=variable_manager,
                                     loader=loader)
                else:
                    t = Task.load(task_ds,
                                  block=block,
                                  role=role,
                                  task_include=task_include,
                                  variable_manager=variable_manager,
                                  loader=loader)

                task_list.append(t)

    return task_list
Exemple #6
0
    def execute_init(self):
        """
        Creates the skeleton framework of a role or collection that complies with the Galaxy metadata format.
        Requires a role or collection name. The collection name must be in the format ``<namespace>.<collection>``.
        """

        galaxy_type = context.CLIARGS['type']
        init_path = context.CLIARGS['init_path']
        force = context.CLIARGS['force']
        obj_skeleton = context.CLIARGS['{0}_skeleton'.format(galaxy_type)]

        obj_name = context.CLIARGS['{0}_name'.format(galaxy_type)]

        inject_data = dict(
            description='your description',
            ansible_plugin_list_dir=get_versioned_doclink('plugins/plugins.html'),
        )
        if galaxy_type == 'role':
            inject_data.update(dict(
                author='your name',
                company='your company (optional)',
                license='license (GPL-2.0-or-later, MIT, etc)',
                role_name=obj_name,
                role_type=context.CLIARGS['role_type'],
                issue_tracker_url='http://example.com/issue/tracker',
                repository_url='http://example.com/repository',
                documentation_url='http://docs.example.com',
                homepage_url='http://example.com',
                min_ansible_version=ansible_version[:3],  # x.y
            ))

            obj_path = os.path.join(init_path, obj_name)
        elif galaxy_type == 'collection':
            namespace, collection_name = obj_name.split('.', 1)

            inject_data.update(dict(
                namespace=namespace,
                collection_name=collection_name,
                version='1.0.0',
                readme='README.md',
                authors=['your name <*****@*****.**>'],
                license=['GPL-2.0-or-later'],
                repository='http://example.com/repository',
                documentation='http://docs.example.com',
                homepage='http://example.com',
                issues='http://example.com/issue/tracker',
            ))

            obj_path = os.path.join(init_path, namespace, collection_name)

        b_obj_path = to_bytes(obj_path, errors='surrogate_or_strict')

        if os.path.exists(b_obj_path):
            if os.path.isfile(obj_path):
                raise AnsibleError("- the path %s already exists, but is a file - aborting" % to_native(obj_path))
            elif not force:
                raise AnsibleError("- the directory %s already exists. "
                                   "You can use --force to re-initialize this directory,\n"
                                   "however it will reset any main.yml files that may have\n"
                                   "been modified there already." % to_native(obj_path))

        if obj_skeleton is not None:
            own_skeleton = False
            skeleton_ignore_expressions = C.GALAXY_ROLE_SKELETON_IGNORE
        else:
            own_skeleton = True
            obj_skeleton = self.galaxy.default_role_skeleton_path
            skeleton_ignore_expressions = ['^.*/.git_keep$']

        obj_skeleton = os.path.expanduser(obj_skeleton)
        skeleton_ignore_re = [re.compile(x) for x in skeleton_ignore_expressions]

        if not os.path.exists(obj_skeleton):
            raise AnsibleError("- the skeleton path '{0}' does not exist, cannot init {1}".format(
                to_native(obj_skeleton), galaxy_type)
            )

        template_env = Environment(loader=FileSystemLoader(obj_skeleton))

        # create role directory
        if not os.path.exists(b_obj_path):
            os.makedirs(b_obj_path)

        for root, dirs, files in os.walk(obj_skeleton, topdown=True):
            rel_root = os.path.relpath(root, obj_skeleton)
            rel_dirs = rel_root.split(os.sep)
            rel_root_dir = rel_dirs[0]
            if galaxy_type == 'collection':
                # A collection can contain templates in playbooks/*/templates and roles/*/templates
                in_templates_dir = rel_root_dir in ['playbooks', 'roles'] and 'templates' in rel_dirs
            else:
                in_templates_dir = rel_root_dir == 'templates'

            dirs[:] = [d for d in dirs if not any(r.match(d) for r in skeleton_ignore_re)]

            for f in files:
                filename, ext = os.path.splitext(f)

                if any(r.match(os.path.join(rel_root, f)) for r in skeleton_ignore_re):
                    continue
                elif galaxy_type == 'collection' and own_skeleton and rel_root == '.' and f == 'galaxy.yml.j2':
                    # Special use case for galaxy.yml.j2 in our own default collection skeleton. We build the options
                    # dynamically which requires special options to be set.

                    # The templated data's keys must match the key name but the inject data contains collection_name
                    # instead of name. We just make a copy and change the key back to name for this file.
                    template_data = inject_data.copy()
                    template_data['name'] = template_data.pop('collection_name')

                    meta_value = GalaxyCLI._get_skeleton_galaxy_yml(os.path.join(root, rel_root, f), template_data)
                    b_dest_file = to_bytes(os.path.join(obj_path, rel_root, filename), errors='surrogate_or_strict')
                    with open(b_dest_file, 'wb') as galaxy_obj:
                        galaxy_obj.write(to_bytes(meta_value, errors='surrogate_or_strict'))
                elif ext == ".j2" and not in_templates_dir:
                    src_template = os.path.join(rel_root, f)
                    dest_file = os.path.join(obj_path, rel_root, filename)
                    template_env.get_template(src_template).stream(inject_data).dump(dest_file, encoding='utf-8')
                else:
                    f_rel_path = os.path.relpath(os.path.join(root, f), obj_skeleton)
                    shutil.copyfile(os.path.join(root, f), os.path.join(obj_path, f_rel_path))

            for d in dirs:
                b_dir_path = to_bytes(os.path.join(obj_path, rel_root, d), errors='surrogate_or_strict')
                if not os.path.exists(b_dir_path):
                    os.makedirs(b_dir_path)

        display.display("- %s was created successfully" % obj_name)