Ejemplo n.º 1
0
    def _play_prereqs():
        options = context.CLIARGS

        # all needs loader
        loader = DataLoader()

        basedir = options.get('basedir', False)
        if basedir:
            loader.set_basedir(basedir)
            add_all_plugin_dirs(basedir)
            set_collection_playbook_paths(basedir)

        vault_ids = list(options['vault_ids'])
        default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
        vault_ids = default_vault_ids + vault_ids

        vault_secrets = CLI.setup_vault_secrets(loader,
                                                vault_ids=vault_ids,
                                                vault_password_files=list(options['vault_password_files']),
                                                ask_vault_pass=options['ask_vault_pass'],
                                                auto_prompt=False)
        loader.set_vault_secrets(vault_secrets)

        # create the inventory, and filter it based on the subset specified (if any)
        inventory = InventoryManager(loader=loader, sources=options['inventory'])

        # create the variable manager, which will be shared throughout
        # the code, ensuring a consistent view of global variables
        variable_manager = VariableManager(loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False))

        return loader, inventory, variable_manager
Ejemplo n.º 2
0
    def run(self, playbooks, gather_facts='no'):
        """
        ansible playbook 模式运行任务
        """
        # C.DEFAULT_ROLES_PATH = self.options.roles_path
        b_playbook_dir = os.path.dirname(playbooks[0])
        add_all_plugin_dirs(b_playbook_dir)
        set_collection_playbook_paths([b_playbook_dir])
        loader, inventory, variable_manager = self._play_prereqs(self.options)

        groups = inventory.groups
        data = {}
        for name, group in groups.items():
            if name == 'all':
                continue
            hosts = group.get_hosts()
            for host in hosts:
                host_info = host.serialize()
                del host_info['groups']
                data[name] = host_info
        playbooks = playbooks if type(playbooks) == list else [playbooks]
        executor = PlaybookExecutor(playbooks=playbooks,
                                    inventory=inventory,
                                    variable_manager=variable_manager,
                                    loader=loader,
                                    passwords=self.passwords)
        if executor._tqm and self.callback:
            executor._tqm._stdout_callback = self.callback
        try:
            results = executor.run()
            if not isinstance(results, Iterable):
                return self.callback
            for p in results:
                for idx, play in enumerate(p['plays']):
                    if play._included_path is not None:
                        loader.set_basedir(play._included_path)
                    else:
                        pb_dir = os.path.realpath(
                            os.path.dirname(p['playbook']))
                        loader.set_basedir(pb_dir)
                    if self.options['listtags'] or self.options['listtasks']:
                        pass

                        all_vars = variable_manager.get_vars()
                        for block in play.compile():
                            block = block.filter_tagged_tasks(all_vars)
                            if not block.has_tasks():
                                continue
                            self._process_block(block)

        except AnsibleError as e:
            executor._tqm.cleanup()
            self.loader.cleanup_all_tmp_files()
            raise e
Ejemplo n.º 3
0
def _set_collections_basedir(basedir: str):
    # Sets the playbook directory as playbook_paths for the collection loader
    try:
        # Ansible 2.10+
        # noqa: # pylint:disable=cyclic-import,import-outside-toplevel
        from ansible.utils.collection_loader import AnsibleCollectionConfig

        AnsibleCollectionConfig.playbook_paths = basedir
    except ImportError:
        # Ansible 2.8 or 2.9
        # noqa: # pylint:disable=cyclic-import,import-outside-toplevel
        from ansible.utils.collection_loader import set_collection_playbook_paths

        set_collection_playbook_paths(basedir)
Ejemplo n.º 4
0
    def run(self):

        super(DocCLI, self).run()

        plugin_type = context.CLIARGS['type']

        do_json = context.CLIARGS['json_format']

        if plugin_type in C.DOCUMENTABLE_PLUGINS:
            loader = getattr(plugin_loader, '%s_loader' % plugin_type)
        else:
            raise AnsibleOptionsError(
                "Unknown or undocumentable plugin type: %s" % plugin_type)

        # add to plugin paths from command line
        basedir = context.CLIARGS['basedir']
        if basedir:
            set_collection_playbook_paths(basedir)
            loader.add_directory(basedir, with_subdir=True)
        if context.CLIARGS['module_path']:
            for path in context.CLIARGS['module_path']:
                if path:
                    loader.add_directory(path)

        # save only top level paths for errors
        search_paths = DocCLI.print_paths(loader)
        loader._paths = None  # reset so we can use subdirs below

        # list plugins names and filepath for type
        if context.CLIARGS['list_files']:
            paths = loader._get_paths()
            for path in paths:
                self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))

            plugins = self._get_plugin_list_filenames(loader)
            if do_json:
                jdump(plugins)
            else:
                # format for user
                displace = max(len(x) for x in self.plugin_list)
                linelimit = display.columns - displace - 5
                text = []

                for plugin in plugins.keys():
                    filename = plugins[plugin]
                    text.append(
                        "%-*s %-*.*s" %
                        (displace, plugin, linelimit, len(filename), filename))

                DocCLI.pager("\n".join(text))

        # list file plugins for type (does not read docs, very fast)
        elif context.CLIARGS['list_dir']:
            paths = loader._get_paths()
            for path in paths:
                self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))

            descs = self._get_plugin_list_descriptions(loader)
            if do_json:
                jdump(descs)
            else:
                displace = max(len(x) for x in self.plugin_list)
                linelimit = display.columns - displace - 5
                text = []
                deprecated = []
                for plugin in descs.keys():

                    desc = DocCLI.tty_ify(descs[plugin])

                    if len(desc) > linelimit:
                        desc = desc[:linelimit] + '...'

                    if plugin.startswith('_'):  # Handle deprecated
                        deprecated.append(
                            "%-*s %-*.*s" %
                            (displace, plugin[1:], linelimit, len(desc), desc))
                    else:
                        text.append(
                            "%-*s %-*.*s" %
                            (displace, plugin, linelimit, len(desc), desc))

                    if len(deprecated) > 0:
                        text.append("\nDEPRECATED:")
                        text.extend(deprecated)

                DocCLI.pager("\n".join(text))

        # dump plugin desc/metadata as JSON
        elif context.CLIARGS['dump']:
            plugin_data = {}
            plugin_names = DocCLI.get_all_plugins_of_type(plugin_type)
            for plugin_name in plugin_names:
                plugin_info = DocCLI.get_plugin_metadata(
                    plugin_type, plugin_name)
                if plugin_info is not None:
                    plugin_data[plugin_name] = plugin_info

            jdump(plugin_data)

        else:
            # display specific plugin docs
            if len(context.CLIARGS['args']) == 0:
                raise AnsibleOptionsError("Incorrect options passed")

            # process command line list
            if do_json:
                dump = {}
                for plugin in context.CLIARGS['args']:
                    doc, plainexamples, returndocs, metadata = DocCLI._get_plugin_doc(
                        plugin, loader, plugin_type, search_paths)
                    try:
                        returndocs = yaml.load(returndocs)
                    except Exception:
                        pass
                    if doc:
                        dump[plugin] = {
                            'doc': doc,
                            'examples': plainexamples,
                            'return': returndocs,
                            'metadata': metadata
                        }
                jdump(dump)
            else:
                text = ''
                for plugin in context.CLIARGS['args']:
                    textret = DocCLI.format_plugin_doc(plugin, loader,
                                                       plugin_type,
                                                       search_paths)

                    if textret:
                        text += textret

                if text:
                    DocCLI.pager(text)

        return 0
Ejemplo n.º 5
0
    def run(self):

        super(DocCLI, self).run()

        plugin_type = context.CLIARGS['type']

        do_json = context.CLIARGS['json_format']

        if plugin_type in C.DOCUMENTABLE_PLUGINS:
            loader = getattr(plugin_loader, '%s_loader' % plugin_type)
        else:
            raise AnsibleOptionsError(
                "Unknown or undocumentable plugin type: %s" % plugin_type)

        # add to plugin paths from command line
        basedir = context.CLIARGS['basedir']
        if basedir:
            set_collection_playbook_paths(basedir)
            loader.add_directory(basedir, with_subdir=True)
        if context.CLIARGS['module_path']:
            for path in context.CLIARGS['module_path']:
                if path:
                    loader.add_directory(path)

        # save only top level paths for errors
        search_paths = DocCLI.print_paths(loader)
        loader._paths = None  # reset so we can use subdirs below

        # list plugins names and filepath for type
        if context.CLIARGS['list_files']:
            paths = loader._get_paths()
            for path in paths:
                self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))

            plugins = self._get_plugin_list_filenames(loader)
            if do_json:
                jdump(plugins)
            else:
                # format for user
                displace = max(len(x) for x in self.plugin_list)
                linelimit = display.columns - displace - 5
                text = []

                for plugin in plugins.keys():
                    filename = plugins[plugin]
                    text.append(
                        "%-*s %-*.*s" %
                        (displace, plugin, linelimit, len(filename), filename))

                DocCLI.pager("\n".join(text))

        # list file plugins for type (does not read docs, very fast)
        elif context.CLIARGS['list_dir']:
            paths = loader._get_paths()
            for path in paths:
                self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))

            descs = self._get_plugin_list_descriptions(loader)
            if do_json:
                jdump(descs)
            else:
                displace = max(len(x) for x in self.plugin_list)
                linelimit = display.columns - displace - 5
                text = []
                deprecated = []
                for plugin in descs.keys():

                    desc = DocCLI.tty_ify(descs[plugin])

                    if len(desc) > linelimit:
                        desc = desc[:linelimit] + '...'

                    if plugin.startswith('_'):  # Handle deprecated
                        deprecated.append(
                            "%-*s %-*.*s" %
                            (displace, plugin[1:], linelimit, len(desc), desc))
                    else:
                        text.append(
                            "%-*s %-*.*s" %
                            (displace, plugin, linelimit, len(desc), desc))

                    if len(deprecated) > 0:
                        text.append("\nDEPRECATED:")
                        text.extend(deprecated)

                DocCLI.pager("\n".join(text))

        # dump plugin desc/metadata as JSON
        elif context.CLIARGS['dump']:
            plugin_data = {}
            plugin_names = DocCLI.get_all_plugins_of_type(plugin_type)
            for plugin_name in plugin_names:
                plugin_info = DocCLI.get_plugin_metadata(
                    plugin_type, plugin_name)
                if plugin_info is not None:
                    plugin_data[plugin_name] = plugin_info

            jdump(plugin_data)

        else:
            # display specific plugin docs
            if len(context.CLIARGS['args']) == 0:
                raise AnsibleOptionsError("Incorrect options passed")

            # get the docs for plugins in the command line list
            plugin_docs = {}
            for plugin in context.CLIARGS['args']:
                try:
                    doc, plainexamples, returndocs, metadata = DocCLI._get_plugin_doc(
                        plugin, loader, search_paths)
                except PluginNotFound:
                    display.warning("%s %s not found in:\n%s\n" %
                                    (plugin_type, plugin, search_paths))
                    continue
                except RemovedPlugin:
                    display.warning("%s %s has been removed\n" %
                                    (plugin_type, plugin))
                    continue
                except Exception as e:
                    display.vvv(traceback.format_exc())
                    raise AnsibleError(
                        "%s %s missing documentation (or could not parse"
                        " documentation): %s\n" %
                        (plugin_type, plugin, to_native(e)))

                if not doc:
                    # The doc section existed but was empty
                    continue

                plugin_docs[plugin] = {
                    'doc': doc,
                    'examples': plainexamples,
                    'return': returndocs,
                    'metadata': metadata
                }

            if do_json:
                # Some changes to how json docs are formatted
                for plugin, doc_data in plugin_docs.items():
                    try:
                        doc_data['return'] = yaml.load(doc_data['return'])
                    except Exception:
                        pass

                jdump(plugin_docs)

            else:
                # Some changes to how plain text docs are formatted
                text = []
                for plugin, doc_data in plugin_docs.items():
                    textret = DocCLI.format_plugin_doc(plugin, plugin_type,
                                                       doc_data['doc'],
                                                       doc_data['examples'],
                                                       doc_data['return'],
                                                       doc_data['metadata'])
                    if textret:
                        text.append(textret)

                if text:
                    DocCLI.pager(''.join(text))

        return 0
Ejemplo n.º 6
0
    def run(self):

        super(PlaybookCLI, self).run()

        # Note: slightly wrong, this is written so that implicit localhost
        # manages passwords
        sshpass = None
        becomepass = None
        passwords = {}

        # initial error check, to make sure all specified playbooks are accessible
        # before we start running anything through the playbook executor

        b_playbook_dirs = []
        for playbook in context.CLIARGS['args']:
            if not os.path.exists(playbook):
                raise AnsibleError("the playbook: %s could not be found" % playbook)
            if not (os.path.isfile(playbook) or stat.S_ISFIFO(os.stat(playbook).st_mode)):
                raise AnsibleError("the playbook: %s does not appear to be a file" % playbook)

            b_playbook_dir = os.path.dirname(os.path.abspath(to_bytes(playbook, errors='surrogate_or_strict')))
            # load plugins from all playbooks in case they add callbacks/inventory/etc
            add_all_plugin_dirs(b_playbook_dir)

            b_playbook_dirs.append(b_playbook_dir)

        set_collection_playbook_paths(b_playbook_dirs)

        # don't deal with privilege escalation or passwords when we don't need to
        if not (context.CLIARGS['listhosts'] or context.CLIARGS['listtasks'] or
                context.CLIARGS['listtags'] or context.CLIARGS['syntax']):
            (sshpass, becomepass) = self.ask_passwords()
            passwords = {'conn_pass': sshpass, 'become_pass': becomepass}

        # create base objects
        loader, inventory, variable_manager = self._play_prereqs()

        # (which is not returned in list_hosts()) is taken into account for
        # warning if inventory is empty.  But it can't be taken into account for
        # checking if limit doesn't match any hosts.  Instead we don't worry about
        # limit if only implicit localhost was in inventory to start with.
        #
        # Fix this when we rewrite inventory by making localhost a real host (and thus show up in list_hosts())
        CLI.get_host_list(inventory, context.CLIARGS['subset'])

        # flush fact cache if requested
        if context.CLIARGS['flush_cache']:
            self._flush_cache(inventory, variable_manager)

        # create the playbook executor, which manages running the plays via a task queue manager
        pbex = PlaybookExecutor(playbooks=context.CLIARGS['args'], inventory=inventory,
                                variable_manager=variable_manager, loader=loader,
                                passwords=passwords)

        results = pbex.run()

        if isinstance(results, list):
            for p in results:

                display.display('\nplaybook: %s' % p['playbook'])
                for idx, play in enumerate(p['plays']):
                    if play._included_path is not None:
                        loader.set_basedir(play._included_path)
                    else:
                        pb_dir = os.path.realpath(os.path.dirname(p['playbook']))
                        loader.set_basedir(pb_dir)

                    msg = "\n  play #%d (%s): %s" % (idx + 1, ','.join(play.hosts), play.name)
                    mytags = set(play.tags)
                    msg += '\tTAGS: [%s]' % (','.join(mytags))

                    if context.CLIARGS['listhosts']:
                        playhosts = set(inventory.get_hosts(play.hosts))
                        msg += "\n    pattern: %s\n    hosts (%d):" % (play.hosts, len(playhosts))
                        for host in playhosts:
                            msg += "\n      %s" % host

                    display.display(msg)

                    all_tags = set()
                    if context.CLIARGS['listtags'] or context.CLIARGS['listtasks']:
                        taskmsg = ''
                        if context.CLIARGS['listtasks']:
                            taskmsg = '    tasks:\n'

                        def _process_block(b):
                            taskmsg = ''
                            for task in b.block:
                                if isinstance(task, Block):
                                    taskmsg += _process_block(task)
                                else:
                                    if task.action == 'meta':
                                        continue

                                    all_tags.update(task.tags)
                                    if context.CLIARGS['listtasks']:
                                        cur_tags = list(mytags.union(set(task.tags)))
                                        cur_tags.sort()
                                        if task.name:
                                            taskmsg += "      %s" % task.get_name()
                                        else:
                                            taskmsg += "      %s" % task.action
                                        taskmsg += "\tTAGS: [%s]\n" % ', '.join(cur_tags)

                            return taskmsg

                        all_vars = variable_manager.get_vars(play=play)
                        for block in play.compile():
                            block = block.filter_tagged_tasks(all_vars)
                            if not block.has_tasks():
                                continue
                            taskmsg += _process_block(block)

                        if context.CLIARGS['listtags']:
                            cur_tags = list(mytags.union(all_tags))
                            cur_tags.sort()
                            taskmsg += "      TASK TAGS: [%s]\n" % ', '.join(cur_tags)

                        display.display(taskmsg)

            return 0
        else:
            return results
for playbook in context.CLIARGS['args']:
    if not os.path.exists(playbook):
        raise AnsibleError("the playbook: %s could not be found" % playbook)
    if not (os.path.isfile(playbook)
            or stat.S_ISFIFO(os.stat(playbook).st_mode)):
        raise AnsibleError("the playbook: %s does not appear to be a file" %
                           playbook)

    b_playbook_dir = os.path.dirname(
        os.path.abspath(to_bytes(playbook, errors='surrogate_or_strict')))
    # load plugins from all playbooks in case they add callbacks/inventory/etc
    add_all_plugin_dirs(b_playbook_dir)

    b_playbook_dirs.append(b_playbook_dir)

set_collection_playbook_paths(b_playbook_dirs)

playbook_collection = get_collection_name_from_path(b_playbook_dirs[0])

if playbook_collection:
    display.warning(
        "running playbook inside collection {0}".format(playbook_collection))
    AnsibleCollectionLoader().set_default_collection(playbook_collection)

# don't deal with privilege escalation or passwords when we don't need to
if not (context.CLIARGS['listhosts'] or context.CLIARGS['listtasks']
        or context.CLIARGS['listtags'] or context.CLIARGS['syntax']):
    (sshpass, becomepass) = pb_cli.ask_passwords()
    passwords = {'conn_pass': sshpass, 'become_pass': becomepass}

# create base objects
Ejemplo n.º 8
0
    def run(self):

        super(DocCLI, self).run()

        plugin_type = context.CLIARGS['type']

        if plugin_type in C.DOCUMENTABLE_PLUGINS:
            loader = getattr(plugin_loader, '%s_loader' % plugin_type)
        else:
            raise AnsibleOptionsError("Unknown or undocumentable plugin type: %s" % plugin_type)

        # add to plugin paths from command line
        basedir = context.CLIARGS['basedir']
        if basedir:
            set_collection_playbook_paths(basedir)
            loader.add_directory(basedir, with_subdir=True)
        if context.CLIARGS['module_path']:
            for path in context.CLIARGS['module_path']:
                if path:
                    loader.add_directory(path)

        # save only top level paths for errors
        search_paths = DocCLI.print_paths(loader)
        loader._paths = None  # reset so we can use subdirs below

        # list plugins names and filepath for type
        if context.CLIARGS['list_files']:
            paths = loader._get_paths()
            for path in paths:
                self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))

            list_text = self.get_plugin_list_filenames(loader)
            DocCLI.pager(list_text)
            return 0

        # list plugins for type
        if context.CLIARGS['list_dir']:
            paths = loader._get_paths()
            for path in paths:
                self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))

            DocCLI.pager(self.get_plugin_list_text(loader))
            return 0

        # dump plugin desc/metadata as JSON
        if context.CLIARGS['json_dump']:
            plugin_data = {}
            plugin_names = DocCLI.get_all_plugins_of_type(plugin_type)
            for plugin_name in plugin_names:
                plugin_info = DocCLI.get_plugin_metadata(plugin_type, plugin_name)
                if plugin_info is not None:
                    plugin_data[plugin_name] = plugin_info

            DocCLI.pager(json.dumps(plugin_data, sort_keys=True, indent=4))

            return 0

        if len(context.CLIARGS['args']) == 0:
            raise AnsibleOptionsError("Incorrect options passed")

        # process command line list
        text = ''
        for plugin in context.CLIARGS['args']:
            textret = DocCLI.format_plugin_doc(plugin, loader, plugin_type, search_paths)

            if textret:
                text += textret

        if text:
            DocCLI.pager(text)

        return 0