Esempio n. 1
0
    def run(self):
        super(GrapherCLI, self).run()

        loader, inventory, variable_manager = CLI._play_prereqs()
        # Looks like the display is a singleton. This instruction will NOT return a new instance.
        # This is why we set the verbosity later because someone set it before us.
        display = Display()
        display.verbosity = self.options.verbosity

        grapher = PlaybookGrapher(
            data_loader=loader,
            inventory_manager=inventory,
            variable_manager=variable_manager,
            display=display,
            tags=self.options.tags,
            skip_tags=self.options.skip_tags,
            playbook_filename=self.options.playbook_filename,
            include_role_tasks=self.options.include_role_tasks)

        grapher.make_graph()

        svg_path = grapher.render_graph(self.options.output_filename,
                                        self.options.save_dot_file)
        post_processor = PostProcessor(svg_path=svg_path)
        post_processor.post_process(
            graph_representation=grapher.graph_representation)
        post_processor.write()

        display.display(f"fThe graph has been exported to {svg_path}")

        return svg_path
Esempio n. 2
0
def test_Display_display_lock_fork(monkeypatch):
    lock = MagicMock()
    display = Display()
    monkeypatch.setattr(display, '_lock', lock)
    monkeypatch.setattr(display, '_final_q', MagicMock())
    display.display('foo')
    lock.__enter__.assert_not_called()
Esempio n. 3
0
def cleanup_tmp_file(path, warn=False):
    """
    Removes temporary file or directory. Optionally display a warning if unable
    to remove the file or directory.

    :arg path: Path to file or directory to be removed
    :kwarg warn: Whether or not to display a warning when the file or directory
        cannot be removed
    """
    try:
        if os.path.exists(path):
            try:
                if os.path.isdir(path):
                    shutil.rmtree(path)
                elif os.path.isfile(path):
                    os.unlink(path)
            except Exception as e:
                if warn:
                    # Importing here to avoid circular import
                    from ansible.utils.display import Display
                    display = Display()
                    display.display(
                        u'Unable to remove temporary file {0}'.format(
                            to_text(e)))
    except Exception:
        pass
Esempio n. 4
0
 def test():
     queue = MagicMock()
     display = Display()
     display.set_queue(queue)
     display.display('foo')
     queue.send_display.assert_called_once_with(
         'foo', color=None, stderr=False, screen_only=False, log_only=False, newline=True
     )
def test_display_basic_message(capsys, mocker):
    # Disable logging
    mocker.patch('ansible.utils.display.logger', return_value=None)

    d = Display()
    d.display(u'Some displayed message')
    out, err = capsys.readouterr()
    assert out == 'Some displayed message\n'
    assert err == ''
Esempio n. 6
0
 def display(self,
             msg,
             color=None,
             stderr=False,
             screen_only=False,
             log_only=False):
     OrigDisplay.display(self,
                         msg,
                         color=color,
                         stderr=stderr,
                         screen_only=screen_only,
                         log_only=log_only)
     logger.debug(msg)
    def _hide_sensitive_values(self, result):

        display = Display()

        me = os.path.basename(sys.argv[0])
        target = me.split('-')
        sub = target[1]
        myclass = "%sCLI" % sub.capitalize()
        mycli = getattr(__import__("ansible.cli.%s" % sub, fromlist=[myclass]),
                        myclass)

        try:
            args = [to_text(a, errors='surrogate_or_strict') for a in sys.argv]
        except UnicodeError:
            display.error(
                'Command line args are not in utf-8, unable to continue.  Ansible currently only understands utf-8'
            )
            display.display(u"The full traceback was:\n\n%s" %
                            to_text(traceback.format_exc()))
            exit_code = 6

        cli = mycli(args)
        cli.parse()

        # Get vault_password_file from CLI
        if len(cli.options.vault_password_files) != 0:
            self.vault_password_file = cli.options.vault_password_files[0]
        # Get vault_password_file from ansible.cfg
        else:
            self.vault_password_file = self.get_option('vault_password_file')

        for encrypted_vars_file in self.encrypted_vars_files_list:
            vars_file_decrypted = subprocess.check_output(
                [
                    "ansible-vault", "view", encrypted_vars_file,
                    "--vault-password-file", self.vault_password_file
                ],
                universal_newlines=True)

            sensitive_values_dict = yaml.safe_load(vars_file_decrypted)

            for v in sensitive_values_dict:
                CallbackModule.SENSITIVE_VALUES.add(sensitive_values_dict[v])

        self.check_sensitive_values(result._result)
Esempio n. 8
0
 def __init__(self):
     """
     Creates the instance and sets the logger.
     """
     display = Display()
     self.log = logging.getLogger('transport')
     # TODO: Make verbosity more configurable
     display.verbosity = 1
     if logging.getLevelName(self.log.level) == 'DEBUG':
         display.verbosity = 5
     # replace Displays display method with our own
     display.display = lambda msg, *a, **k: self.log.info(msg)
     super(LogForward, self).__init__(display)
Esempio n. 9
0
 def __init__(self):
     """
     Creates the instance and sets the logger.
     """
     display = Display()
     self.log = logging.getLogger('transport')
     # TODO: Make verbosity more configurable
     display.verbosity = 1
     if logging.getLevelName(self.log.level) == 'DEBUG':
         display.verbosity = 5
     # replace Displays display method with our own
     display.display = lambda msg, *a, **k: self.log.info(msg)
     super(LogForward, self).__init__(display)
Esempio n. 10
0
class CallbackModule(CallbackModule_default):
    CALLBACK_VERSION = 2.0
    CALLBACK_TYPE = 'stdout'
    CALLBACK_NAME = 'protect_data'

    def __init__(self):
        # From CallbackModule
        self._display = Display()
        self.disabled = False
        self.super_ref = super(CallbackModule, self)
        self.super_ref.__init__()

    def _get_item(self, result):
        if result.get('_ansible_no_log', False):
            item = "(censored due to no_log)"
        elif result.get('_ansible_item_label', False):
            item = result.get('_ansible_item_label')
        else:
            item = result.get('item', None)

        if not isinstance(item, dict):
            item = re.sub(r'(.*)\s--token\s([\w.-]+)\s(.*)',
                          r'\1 --token **** \3', item)
        return item

    def v2_runner_item_on_ok(self, result):
        delegated_vars = result._result.get('_ansible_delegated_vars', None)
        self._clean_results(result._result, result._task.action)
        if isinstance(result._task, TaskInclude):
            return
        elif result._result.get('changed', False):
            msg = 'changed'
            color = C.COLOR_CHANGED
        else:
            msg = 'ok'
            color = C.COLOR_OK

        if delegated_vars:
            msg += ": [%s -> %s]" % (result._host.get_name(),
                                     delegated_vars['ansible_host'])
        else:
            msg += ": [%s]" % result._host.get_name()

        msg += " => (item=%s)" % (self._get_item(result._result), )

        if (self._display.verbosity > 0
                or '_ansible_verbose_always' in result._result
            ) and '_ansible_verbose_override' not in result._result:
            msg += " => %s" % self._dump_results(result._result)
        self._display.display(msg, color=color)

    def v2_runner_item_on_failed(self, result):

        delegated_vars = result._result.get('_ansible_delegated_vars', None)
        self._clean_results(result._result, result._task.action)
        self._handle_exception(result._result)

        msg = "failed: "
        if delegated_vars:
            msg += "[%s -> %s]" % (result._host.get_name(),
                                   delegated_vars['ansible_host'])
        else:
            msg += "[%s]" % (result._host.get_name())

        self._handle_warnings(result._result)
        self._display.display(msg + " (item=%s) => %s" % (self._get_item(
            result._result), self._dump_results(result._result)),
                              color=C.COLOR_ERROR)

    def hide_keyword(self, result):
        ret = {}
        for key, value in result.iteritems():
            if isinstance(value, str) or isinstance(value, unicode):
                ret[key] = re.sub(r'(.*)\s--token\s([\w.-]+)\s(.*)',
                                  r'\1 --token **** \3', value)
            else:
                ret[key] = value
        return ret

    def _dump_results(self,
                      result,
                      indent=None,
                      sort_keys=True,
                      keep_invocation=False):
        return super(CallbackModule,
                     self)._dump_results(self.hide_keyword(result), indent,
                                         sort_keys, keep_invocation)
 def display(*args, **kwargs):
     display_instance = Display()
     display_instance.display(*args, **kwargs)
Esempio n. 12
0
class CLI(object):
    """ code behind bin/ansible* programs """

    VALID_ACTIONS = ["No Actions"]

    _ITALIC = re.compile(r"I\(([^)]+)\)")
    _BOLD = re.compile(r"B\(([^)]+)\)")
    _MODULE = re.compile(r"M\(([^)]+)\)")
    _URL = re.compile(r"U\(([^)]+)\)")
    _CONST = re.compile(r"C\(([^)]+)\)")

    PAGER = "less"
    LESS_OPTS = "FRSX"  # -F (quit-if-one-screen) -R (allow raw ansi control chars)
    # -S (chop long lines) -X (disable termcap init and de-init)

    def __init__(self, args, display=None):
        """
        Base init method for all command line programs
        """

        self.args = args
        self.options = None
        self.parser = None
        self.action = None

        if display is None:
            self.display = Display()
        else:
            self.display = display

    def set_action(self):
        """
        Get the action the user wants to execute from the sys argv list.
        """
        for i in range(0, len(self.args)):
            arg = self.args[i]
            if arg in self.VALID_ACTIONS:
                self.action = arg
                del self.args[i]
                break

        if not self.action:
            raise AnsibleOptionsError("Missing required action")

    def execute(self):
        """
        Actually runs a child defined method using the execute_<action> pattern
        """
        fn = getattr(self, "execute_%s" % self.action)
        fn()

    def parse(self):
        raise Exception("Need to implement!")

    def run(self):

        if self.options.verbosity > 0:
            self.display.display("Using %s as config file" % C.CONFIG_FILE)

    @staticmethod
    def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
        """ prompt for vault password and/or password change """

        vault_pass = None
        new_vault_pass = None

        try:
            if ask_vault_pass:
                vault_pass = getpass.getpass(prompt="Vault password: "******"Confirm Vault password: "******"Passwords do not match")

            if ask_new_vault_pass:
                new_vault_pass = getpass.getpass(prompt="New Vault password: "******"Confirm New Vault password: "******"Passwords do not match")
        except EOFError:
            pass

        # enforce no newline chars at the end of passwords
        if vault_pass:
            vault_pass = to_bytes(vault_pass, errors="strict", nonstring="simplerepr").strip()
        if new_vault_pass:
            new_vault_pass = to_bytes(new_vault_pass, errors="strict", nonstring="simplerepr").strip()

        return vault_pass, new_vault_pass

    def ask_passwords(self):
        """ prompt for connection and become passwords if needed """

        op = self.options
        sshpass = None
        becomepass = None
        become_prompt = ""

        try:
            if op.ask_pass:
                sshpass = getpass.getpass(prompt="SSH password: "******"%s password[defaults to SSH password]: " % op.become_method.upper()
                if sshpass:
                    sshpass = to_bytes(sshpass, errors="strict", nonstring="simplerepr")
            else:
                become_prompt = "%s password: "******"":
                    becomepass = sshpass
                if becomepass:
                    becomepass = to_bytes(becomepass)
        except EOFError:
            pass

        return (sshpass, becomepass)

    def normalize_become_options(self):
        """ this keeps backwards compatibility with sudo/su self.options """
        self.options.become_ask_pass = (
            self.options.become_ask_pass
            or self.options.ask_sudo_pass
            or self.options.ask_su_pass
            or C.DEFAULT_BECOME_ASK_PASS
        )
        self.options.become_user = (
            self.options.become_user or self.options.sudo_user or self.options.su_user or C.DEFAULT_BECOME_USER
        )

        if self.options.become:
            pass
        elif self.options.sudo:
            self.options.become = True
            self.options.become_method = "sudo"
        elif self.options.su:
            self.options.become = True
            self.options.become_method = "su"

    def validate_conflicts(self, vault_opts=False, runas_opts=False):
        """ check for conflicting options """

        op = self.options

        if vault_opts:
            # Check for vault related conflicts
            if op.ask_vault_pass and op.vault_password_file:
                self.parser.error("--ask-vault-pass and --vault-password-file are mutually exclusive")

        if runas_opts:
            # Check for privilege escalation conflicts
            if (
                (op.su or op.su_user or op.ask_su_pass)
                and (op.sudo or op.sudo_user or op.ask_sudo_pass)
                or (op.su or op.su_user or op.ask_su_pass)
                and (op.become or op.become_user or op.become_ask_pass)
                or (op.sudo or op.sudo_user or op.ask_sudo_pass)
                and (op.become or op.become_user or op.become_ask_pass)
            ):

                self.parser.error(
                    "Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') "
                    "and su arguments ('-su', '--su-user', and '--ask-su-pass') "
                    "and become arguments ('--become', '--become-user', and '--ask-become-pass')"
                    " are exclusive of each other"
                )

    @staticmethod
    def expand_tilde(option, opt, value, parser):
        setattr(parser.values, option.dest, os.path.expanduser(value))

    @staticmethod
    def base_parser(
        usage="",
        output_opts=False,
        runas_opts=False,
        meta_opts=False,
        runtask_opts=False,
        vault_opts=False,
        async_opts=False,
        connect_opts=False,
        subset_opts=False,
        check_opts=False,
        diff_opts=False,
        epilog=None,
        fork_opts=False,
    ):
        """ create an options parser for most ansible scripts """

        # FIXME: implemente epilog parsing
        # OptionParser.format_epilog = lambda self, formatter: self.epilog

        # base opts
        parser = SortedOptParser(usage, version=CLI.version("%prog"))
        parser.add_option(
            "-v",
            "--verbose",
            dest="verbosity",
            default=0,
            action="count",
            help="verbose mode (-vvv for more, -vvvv to enable connection debugging)",
        )

        if runtask_opts:
            parser.add_option(
                "-i",
                "--inventory-file",
                dest="inventory",
                help="specify inventory host file (default=%s)" % C.DEFAULT_HOST_LIST,
                default=C.DEFAULT_HOST_LIST,
                action="callback",
                callback=CLI.expand_tilde,
                type=str,
            )
            parser.add_option(
                "--list-hosts",
                dest="listhosts",
                action="store_true",
                help="outputs a list of matching hosts; does not execute anything else",
            )
            parser.add_option(
                "-M",
                "--module-path",
                dest="module_path",
                help="specify path(s) to module library (default=%s)" % C.DEFAULT_MODULE_PATH,
                default=None,
                action="callback",
                callback=CLI.expand_tilde,
                type=str,
            )
            parser.add_option(
                "-e",
                "--extra-vars",
                dest="extra_vars",
                action="append",
                help="set additional variables as key=value or YAML/JSON",
                default=[],
            )

        if fork_opts:
            parser.add_option(
                "-f",
                "--forks",
                dest="forks",
                default=C.DEFAULT_FORKS,
                type="int",
                help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS,
            )
            parser.add_option(
                "-l",
                "--limit",
                default=C.DEFAULT_SUBSET,
                dest="subset",
                help="further limit selected hosts to an additional pattern",
            )

        if vault_opts:
            parser.add_option(
                "--ask-vault-pass",
                default=False,
                dest="ask_vault_pass",
                action="store_true",
                help="ask for vault password",
            )
            parser.add_option(
                "--vault-password-file",
                default=C.DEFAULT_VAULT_PASSWORD_FILE,
                dest="vault_password_file",
                help="vault password file",
                action="callback",
                callback=CLI.expand_tilde,
                type=str,
            )

        if subset_opts:
            parser.add_option(
                "-t", "--tags", dest="tags", default="all", help="only run plays and tasks tagged with these values"
            )
            parser.add_option(
                "--skip-tags", dest="skip_tags", help="only run plays and tasks whose tags do not match these values"
            )

        if output_opts:
            parser.add_option("-o", "--one-line", dest="one_line", action="store_true", help="condense output")
            parser.add_option("-t", "--tree", dest="tree", default=None, help="log output to this directory")

        if runas_opts:
            # priv user defaults to root later on to enable detecting when this option was given here
            parser.add_option(
                "-K",
                "--ask-sudo-pass",
                default=C.DEFAULT_ASK_SUDO_PASS,
                dest="ask_sudo_pass",
                action="store_true",
                help="ask for sudo password (deprecated, use become)",
            )
            parser.add_option(
                "--ask-su-pass",
                default=C.DEFAULT_ASK_SU_PASS,
                dest="ask_su_pass",
                action="store_true",
                help="ask for su password (deprecated, use become)",
            )
            parser.add_option(
                "-s",
                "--sudo",
                default=C.DEFAULT_SUDO,
                action="store_true",
                dest="sudo",
                help="run operations with sudo (nopasswd) (deprecated, use become)",
            )
            parser.add_option(
                "-U",
                "--sudo-user",
                dest="sudo_user",
                default=None,
                help="desired sudo user (default=root) (deprecated, use become)",
            )
            parser.add_option(
                "-S",
                "--su",
                default=C.DEFAULT_SU,
                action="store_true",
                help="run operations with su (deprecated, use become)",
            )
            parser.add_option(
                "-R",
                "--su-user",
                default=None,
                help="run operations with su as this user (default=%s) (deprecated, use become)" % C.DEFAULT_SU_USER,
            )

            # consolidated privilege escalation (become)
            parser.add_option(
                "-b",
                "--become",
                default=C.DEFAULT_BECOME,
                action="store_true",
                dest="become",
                help="run operations with become (nopasswd implied)",
            )
            parser.add_option(
                "--become-method",
                dest="become_method",
                default=C.DEFAULT_BECOME_METHOD,
                type="string",
                help="privilege escalation method to use (default=%s), valid choices: [ %s ]"
                % (C.DEFAULT_BECOME_METHOD, " | ".join(C.BECOME_METHODS)),
            )
            parser.add_option(
                "--become-user",
                default=None,
                dest="become_user",
                type="string",
                help="run operations as this user (default=%s)" % C.DEFAULT_BECOME_USER,
            )
            parser.add_option(
                "--ask-become-pass",
                default=False,
                dest="become_ask_pass",
                action="store_true",
                help="ask for privilege escalation password",
            )

        if connect_opts:
            parser.add_option(
                "-k",
                "--ask-pass",
                default=False,
                dest="ask_pass",
                action="store_true",
                help="ask for connection password",
            )
            parser.add_option(
                "--private-key",
                "--key-file",
                default=C.DEFAULT_PRIVATE_KEY_FILE,
                dest="private_key_file",
                help="use this file to authenticate the connection",
            )
            parser.add_option(
                "-u",
                "--user",
                default=C.DEFAULT_REMOTE_USER,
                dest="remote_user",
                help="connect as this user (default=%s)" % C.DEFAULT_REMOTE_USER,
            )
            parser.add_option(
                "-c",
                "--connection",
                dest="connection",
                default=C.DEFAULT_TRANSPORT,
                help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT,
            )
            parser.add_option(
                "-T",
                "--timeout",
                default=C.DEFAULT_TIMEOUT,
                type="int",
                dest="timeout",
                help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT,
            )

        if async_opts:
            parser.add_option(
                "-P",
                "--poll",
                default=C.DEFAULT_POLL_INTERVAL,
                type="int",
                dest="poll_interval",
                help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL,
            )
            parser.add_option(
                "-B",
                "--background",
                dest="seconds",
                type="int",
                default=0,
                help="run asynchronously, failing after X seconds (default=N/A)",
            )

        if check_opts:
            parser.add_option(
                "-C",
                "--check",
                default=False,
                dest="check",
                action="store_true",
                help="don't make any changes; instead, try to predict some of the changes that may occur",
            )
            parser.add_option(
                "--syntax-check",
                dest="syntax",
                action="store_true",
                help="perform a syntax check on the playbook, but do not execute it",
            )

        if diff_opts:
            parser.add_option(
                "-D",
                "--diff",
                default=False,
                dest="diff",
                action="store_true",
                help="when changing (small) files and templates, show the differences in those files; works great with --check",
            )

        if meta_opts:
            parser.add_option(
                "--force-handlers", dest="force_handlers", action="store_true", help="run handlers even if a task fails"
            )
            parser.add_option("--flush-cache", dest="flush_cache", action="store_true", help="clear the fact cache")

        return parser

    @staticmethod
    def version(prog):
        """ return ansible version """
        result = "{0} {1}".format(prog, __version__)
        gitinfo = CLI._gitinfo()
        if gitinfo:
            result = result + " {0}".format(gitinfo)
        result = result + "\n  configured module search path = %s" % C.DEFAULT_MODULE_PATH
        return result

    @staticmethod
    def version_info(gitinfo=False):
        """ return full ansible version info """
        if gitinfo:
            # expensive call, user with care
            ansible_version_string = version("")
        else:
            ansible_version_string = __version__
        ansible_version = ansible_version_string.split()[0]
        ansible_versions = ansible_version.split(".")
        for counter in range(len(ansible_versions)):
            if ansible_versions[counter] == "":
                ansible_versions[counter] = 0
            try:
                ansible_versions[counter] = int(ansible_versions[counter])
            except:
                pass
        if len(ansible_versions) < 3:
            for counter in range(len(ansible_versions), 3):
                ansible_versions.append(0)
        return {
            "string": ansible_version_string.strip(),
            "full": ansible_version,
            "major": ansible_versions[0],
            "minor": ansible_versions[1],
            "revision": ansible_versions[2],
        }

    @staticmethod
    def _git_repo_info(repo_path):
        """ returns a string containing git branch, commit id and commit date """
        result = None
        if os.path.exists(repo_path):
            # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
            if os.path.isfile(repo_path):
                try:
                    gitdir = yaml.safe_load(open(repo_path)).get("gitdir")
                    # There is a possibility the .git file to have an absolute path.
                    if os.path.isabs(gitdir):
                        repo_path = gitdir
                    else:
                        repo_path = os.path.join(repo_path[:-4], gitdir)
                except (IOError, AttributeError):
                    return ""
            f = open(os.path.join(repo_path, "HEAD"))
            branch = f.readline().split("/")[-1].rstrip("\n")
            f.close()
            branch_path = os.path.join(repo_path, "refs", "heads", branch)
            if os.path.exists(branch_path):
                f = open(branch_path)
                commit = f.readline()[:10]
                f.close()
            else:
                # detached HEAD
                commit = branch[:10]
                branch = "detached HEAD"
                branch_path = os.path.join(repo_path, "HEAD")

            date = time.localtime(os.stat(branch_path).st_mtime)
            if time.daylight == 0:
                offset = time.timezone
            else:
                offset = time.altzone
            result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(
                branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36)
            )
        else:
            result = ""
        return result

    @staticmethod
    def _gitinfo():
        basedir = os.path.join(os.path.dirname(__file__), "..", "..", "..")
        repo_path = os.path.join(basedir, ".git")
        result = CLI._git_repo_info(repo_path)
        submodules = os.path.join(basedir, ".gitmodules")
        if not os.path.exists(submodules):
            return result
        f = open(submodules)
        for line in f:
            tokens = line.strip().split(" ")
            if tokens[0] == "path":
                submodule_path = tokens[2]
                submodule_info = CLI._git_repo_info(os.path.join(basedir, submodule_path, ".git"))
                if not submodule_info:
                    submodule_info = " not found - use git submodule update --init " + submodule_path
                result += "\n  {0}: {1}".format(submodule_path, submodule_info)
        f.close()
        return result

    @staticmethod
    def pager(text):
        """ find reasonable way to display text """
        # this is a much simpler form of what is in pydoc.py
        if not sys.stdout.isatty():
            print(text)
        elif "PAGER" in os.environ:
            if sys.platform == "win32":
                print(text)
            else:
                CLI.pager_pipe(text, os.environ["PAGER"])
        elif subprocess.call("(less --version) 2> /dev/null", shell=True) == 0:
            CLI.pager_pipe(text, "less")
        else:
            print(text)

    @staticmethod
    def pager_pipe(text, cmd):
        """ pipe text through a pager """
        if "LESS" not in os.environ:
            os.environ["LESS"] = CLI.LESS_OPTS
        try:
            cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
            cmd.communicate(input=text)
        except IOError:
            pass
        except KeyboardInterrupt:
            pass

    @classmethod
    def tty_ify(self, text):

        t = self._ITALIC.sub("`" + r"\1" + "'", text)  # I(word) => `word'
        t = self._BOLD.sub("*" + r"\1" + "*", t)  # B(word) => *word*
        t = self._MODULE.sub("[" + r"\1" + "]", t)  # M(word) => [word]
        t = self._URL.sub(r"\1", t)  # U(word) => word
        t = self._CONST.sub("`" + r"\1" + "'", t)  # C(word) => `word'

        return t
Esempio n. 13
0
def test_Display_display_lock(monkeypatch):
    lock = MagicMock()
    display = Display()
    monkeypatch.setattr(display, '_lock', lock)
    display.display('foo')
    lock.__enter__.assert_called_once_with()
def display(*args, **kwargs):
  display_instance = Display()
  display_instance.display(*args, **kwargs)
def display(*args, **kwargs):
    """Set up display function for Ansible v2"""
    display_instance = Display()
    display_instance.display(*args, **kwargs)
Esempio n. 16
0
class CLI(object):
    ''' code behind bin/ansible* programs '''

    VALID_ACTIONS = ['No Actions']

    _ITALIC = re.compile(r"I\(([^)]+)\)")
    _BOLD = re.compile(r"B\(([^)]+)\)")
    _MODULE = re.compile(r"M\(([^)]+)\)")
    _URL = re.compile(r"U\(([^)]+)\)")
    _CONST = re.compile(r"C\(([^)]+)\)")

    PAGER = 'less'
    LESS_OPTS = 'FRSX'  # -F (quit-if-one-screen) -R (allow raw ansi control chars)

    # -S (chop long lines) -X (disable termcap init and de-init)

    def __init__(self, args, display=None):
        """
        Base init method for all command line programs
        """

        self.args = args
        self.options = None
        self.parser = None
        self.action = None

        if display is None:
            self.display = Display()
        else:
            self.display = display

    def set_action(self):
        """
        Get the action the user wants to execute from the sys argv list.
        """
        for i in range(0, len(self.args)):
            arg = self.args[i]
            if arg in self.VALID_ACTIONS:
                self.action = arg
                del self.args[i]
                break

        if not self.action:
            raise AnsibleOptionsError("Missing required action")

    def execute(self):
        """
        Actually runs a child defined method using the execute_<action> pattern
        """
        fn = getattr(self, "execute_%s" % self.action)
        fn()

    def parse(self):
        raise Exception("Need to implement!")

    def run(self):

        if self.options.verbosity > 0:
            if C.CONFIG_FILE:
                self.display.display("Using %s as config file" % C.CONFIG_FILE)
            else:
                self.display.display("No config file found; using defaults")

    @staticmethod
    def ask_vault_passwords(ask_vault_pass=False,
                            ask_new_vault_pass=False,
                            confirm_vault=False,
                            confirm_new=False):
        ''' prompt for vault password and/or password change '''

        vault_pass = None
        new_vault_pass = None

        try:
            if ask_vault_pass:
                vault_pass = getpass.getpass(prompt="Vault password: "******"Confirm Vault password: "******"Passwords do not match")

            if ask_new_vault_pass:
                new_vault_pass = getpass.getpass(prompt="New Vault password: "******"Confirm New Vault password: "******"Passwords do not match")
        except EOFError:
            pass

        # enforce no newline chars at the end of passwords
        if vault_pass:
            vault_pass = to_bytes(vault_pass,
                                  errors='strict',
                                  nonstring='simplerepr').strip()
        if new_vault_pass:
            new_vault_pass = to_bytes(new_vault_pass,
                                      errors='strict',
                                      nonstring='simplerepr').strip()

        return vault_pass, new_vault_pass

    def ask_passwords(self):
        ''' prompt for connection and become passwords if needed '''

        op = self.options
        sshpass = None
        becomepass = None
        become_prompt = ''

        try:
            if op.ask_pass:
                sshpass = getpass.getpass(prompt="SSH password: "******"%s password[defaults to SSH password]: " % op.become_method.upper(
                )
                if sshpass:
                    sshpass = to_bytes(sshpass,
                                       errors='strict',
                                       nonstring='simplerepr')
            else:
                become_prompt = "%s password: "******"--ask-vault-pass and --vault-password-file are mutually exclusive"
                )

        if runas_opts:
            # Check for privilege escalation conflicts
            if (op.su or op.su_user or op.ask_su_pass) and \
                        (op.sudo or op.sudo_user or op.ask_sudo_pass) or \
                (op.su or op.su_user or op.ask_su_pass) and \
                        (op.become or op.become_user or op.become_ask_pass) or \
                (op.sudo or op.sudo_user or op.ask_sudo_pass) and \
                        (op.become or op.become_user or op.become_ask_pass):

                self.parser.error(
                    "Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') "
                    "and su arguments ('-su', '--su-user', and '--ask-su-pass') "
                    "and become arguments ('--become', '--become-user', and '--ask-become-pass')"
                    " are exclusive of each other")

        if fork_opts:
            if op.forks < 1:
                self.parser.error(
                    "The number of processes (--forks) must be >= 1")

    @staticmethod
    def expand_tilde(option, opt, value, parser):
        setattr(parser.values, option.dest, os.path.expanduser(value))

    @staticmethod
    def base_parser(usage="",
                    output_opts=False,
                    runas_opts=False,
                    meta_opts=False,
                    runtask_opts=False,
                    vault_opts=False,
                    module_opts=False,
                    async_opts=False,
                    connect_opts=False,
                    subset_opts=False,
                    check_opts=False,
                    inventory_opts=False,
                    epilog=None,
                    fork_opts=False):
        ''' create an options parser for most ansible scripts '''

        #FIXME: implemente epilog parsing
        #OptionParser.format_epilog = lambda self, formatter: self.epilog

        # base opts
        parser = SortedOptParser(usage, version=CLI.version("%prog"))
        parser.add_option(
            '-v',
            '--verbose',
            dest='verbosity',
            default=0,
            action="count",
            help=
            "verbose mode (-vvv for more, -vvvv to enable connection debugging)"
        )

        if inventory_opts:
            parser.add_option('-i',
                              '--inventory-file',
                              dest='inventory',
                              help="specify inventory host file (default=%s)" %
                              C.DEFAULT_HOST_LIST,
                              default=C.DEFAULT_HOST_LIST,
                              action="callback",
                              callback=CLI.expand_tilde,
                              type=str)
            parser.add_option(
                '--list-hosts',
                dest='listhosts',
                action='store_true',
                help=
                'outputs a list of matching hosts; does not execute anything else'
            )
            parser.add_option(
                '-l',
                '--limit',
                default=C.DEFAULT_SUBSET,
                dest='subset',
                help='further limit selected hosts to an additional pattern')

        if module_opts:
            parser.add_option(
                '-M',
                '--module-path',
                dest='module_path',
                default=None,
                help="specify path(s) to module library (default=%s)" %
                C.DEFAULT_MODULE_PATH,
                action="callback",
                callback=CLI.expand_tilde,
                type=str)
        if runtask_opts:
            parser.add_option(
                '-e',
                '--extra-vars',
                dest="extra_vars",
                action="append",
                help="set additional variables as key=value or YAML/JSON",
                default=[])

        if fork_opts:
            parser.add_option(
                '-f',
                '--forks',
                dest='forks',
                default=C.DEFAULT_FORKS,
                type='int',
                help="specify number of parallel processes to use (default=%s)"
                % C.DEFAULT_FORKS)

        if vault_opts:
            parser.add_option('--ask-vault-pass',
                              default=False,
                              dest='ask_vault_pass',
                              action='store_true',
                              help='ask for vault password')
            parser.add_option('--vault-password-file',
                              default=C.DEFAULT_VAULT_PASSWORD_FILE,
                              dest='vault_password_file',
                              help="vault password file",
                              action="callback",
                              callback=CLI.expand_tilde,
                              type=str)
            parser.add_option('--new-vault-password-file',
                              dest='new_vault_password_file',
                              help="new vault password file for rekey",
                              action="callback",
                              callback=CLI.expand_tilde,
                              type=str)
            parser.add_option(
                '--output',
                default=None,
                dest='output_file',
                help='output file name for encrypt or decrypt; use - for stdout'
            )

        if subset_opts:
            parser.add_option(
                '-t',
                '--tags',
                dest='tags',
                default='all',
                help="only run plays and tasks tagged with these values")
            parser.add_option(
                '--skip-tags',
                dest='skip_tags',
                help=
                "only run plays and tasks whose tags do not match these values"
            )

        if output_opts:
            parser.add_option('-o',
                              '--one-line',
                              dest='one_line',
                              action='store_true',
                              help='condense output')
            parser.add_option('-t',
                              '--tree',
                              dest='tree',
                              default=None,
                              help='log output to this directory')

        if runas_opts:
            # priv user defaults to root later on to enable detecting when this option was given here
            parser.add_option(
                '-K',
                '--ask-sudo-pass',
                default=C.DEFAULT_ASK_SUDO_PASS,
                dest='ask_sudo_pass',
                action='store_true',
                help='ask for sudo password (deprecated, use become)')
            parser.add_option(
                '--ask-su-pass',
                default=C.DEFAULT_ASK_SU_PASS,
                dest='ask_su_pass',
                action='store_true',
                help='ask for su password (deprecated, use become)')
            parser.add_option(
                "-s",
                "--sudo",
                default=C.DEFAULT_SUDO,
                action="store_true",
                dest='sudo',
                help=
                "run operations with sudo (nopasswd) (deprecated, use become)")
            parser.add_option(
                '-U',
                '--sudo-user',
                dest='sudo_user',
                default=None,
                help='desired sudo user (default=root) (deprecated, use become)'
            )
            parser.add_option(
                '-S',
                '--su',
                default=C.DEFAULT_SU,
                action='store_true',
                help='run operations with su (deprecated, use become)')
            parser.add_option(
                '-R',
                '--su-user',
                default=None,
                help=
                'run operations with su as this user (default=%s) (deprecated, use become)'
                % C.DEFAULT_SU_USER)

            # consolidated privilege escalation (become)
            parser.add_option(
                "-b",
                "--become",
                default=C.DEFAULT_BECOME,
                action="store_true",
                dest='become',
                help="run operations with become (nopasswd implied)")
            parser.add_option(
                '--become-method',
                dest='become_method',
                default=C.DEFAULT_BECOME_METHOD,
                type='string',
                help=
                "privilege escalation method to use (default=%s), valid choices: [ %s ]"
                % (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
            parser.add_option('--become-user',
                              default=None,
                              dest='become_user',
                              type='string',
                              help='run operations as this user (default=%s)' %
                              C.DEFAULT_BECOME_USER)
            parser.add_option('--ask-become-pass',
                              default=False,
                              dest='become_ask_pass',
                              action='store_true',
                              help='ask for privilege escalation password')

        if connect_opts:
            parser.add_option('-k',
                              '--ask-pass',
                              default=C.DEFAULT_ASK_PASS,
                              dest='ask_pass',
                              action='store_true',
                              help='ask for connection password')
            parser.add_option(
                '--private-key',
                '--key-file',
                default=C.DEFAULT_PRIVATE_KEY_FILE,
                dest='private_key_file',
                help='use this file to authenticate the connection')
            parser.add_option('-u',
                              '--user',
                              default=C.DEFAULT_REMOTE_USER,
                              dest='remote_user',
                              help='connect as this user (default=%s)' %
                              C.DEFAULT_REMOTE_USER)
            parser.add_option('-c',
                              '--connection',
                              dest='connection',
                              default=C.DEFAULT_TRANSPORT,
                              help="connection type to use (default=%s)" %
                              C.DEFAULT_TRANSPORT)
            parser.add_option(
                '-T',
                '--timeout',
                default=C.DEFAULT_TIMEOUT,
                type='int',
                dest='timeout',
                help="override the connection timeout in seconds (default=%s)"
                % C.DEFAULT_TIMEOUT)
            parser.add_option(
                '--ssh-common-args',
                default='',
                dest='ssh_common_args',
                help=
                "specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)"
            )
            parser.add_option(
                '--sftp-extra-args',
                default='',
                dest='sftp_extra_args',
                help=
                "specify extra arguments to pass to sftp only (e.g. -f, -l)")
            parser.add_option(
                '--scp-extra-args',
                default='',
                dest='scp_extra_args',
                help="specify extra arguments to pass to scp only (e.g. -l)")
            parser.add_option(
                '--ssh-extra-args',
                default='',
                dest='ssh_extra_args',
                help="specify extra arguments to pass to ssh only (e.g. -R)")

        if async_opts:
            parser.add_option(
                '-P',
                '--poll',
                default=C.DEFAULT_POLL_INTERVAL,
                type='int',
                dest='poll_interval',
                help="set the poll interval if using -B (default=%s)" %
                C.DEFAULT_POLL_INTERVAL)
            parser.add_option(
                '-B',
                '--background',
                dest='seconds',
                type='int',
                default=0,
                help='run asynchronously, failing after X seconds (default=N/A)'
            )

        if check_opts:
            parser.add_option(
                "-C",
                "--check",
                default=False,
                dest='check',
                action='store_true',
                help=
                "don't make any changes; instead, try to predict some of the changes that may occur"
            )
            parser.add_option(
                '--syntax-check',
                dest='syntax',
                action='store_true',
                help=
                "perform a syntax check on the playbook, but do not execute it"
            )
            parser.add_option(
                "-D",
                "--diff",
                default=False,
                dest='diff',
                action='store_true',
                help=
                "when changing (small) files and templates, show the differences in those files; works great with --check"
            )

        if meta_opts:
            parser.add_option('--force-handlers',
                              default=C.DEFAULT_FORCE_HANDLERS,
                              dest='force_handlers',
                              action='store_true',
                              help="run handlers even if a task fails")
            parser.add_option('--flush-cache',
                              dest='flush_cache',
                              action='store_true',
                              help="clear the fact cache")

        return parser

    @staticmethod
    def version(prog):
        ''' return ansible version '''
        result = "{0} {1}".format(prog, __version__)
        gitinfo = CLI._gitinfo()
        if gitinfo:
            result = result + " {0}".format(gitinfo)
        result += "\n  config file = %s" % C.CONFIG_FILE
        result = result + "\n  configured module search path = %s" % C.DEFAULT_MODULE_PATH
        return result

    @staticmethod
    def version_info(gitinfo=False):
        ''' return full ansible version info '''
        if gitinfo:
            # expensive call, user with care
            ansible_version_string = CLI.version('')
        else:
            ansible_version_string = __version__
        ansible_version = ansible_version_string.split()[0]
        ansible_versions = ansible_version.split('.')
        for counter in range(len(ansible_versions)):
            if ansible_versions[counter] == "":
                ansible_versions[counter] = 0
            try:
                ansible_versions[counter] = int(ansible_versions[counter])
            except:
                pass
        if len(ansible_versions) < 3:
            for counter in range(len(ansible_versions), 3):
                ansible_versions.append(0)
        return {
            'string': ansible_version_string.strip(),
            'full': ansible_version,
            'major': ansible_versions[0],
            'minor': ansible_versions[1],
            'revision': ansible_versions[2]
        }

    @staticmethod
    def _git_repo_info(repo_path):
        ''' returns a string containing git branch, commit id and commit date '''
        result = None
        if os.path.exists(repo_path):
            # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
            if os.path.isfile(repo_path):
                try:
                    gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
                    # There is a possibility the .git file to have an absolute path.
                    if os.path.isabs(gitdir):
                        repo_path = gitdir
                    else:
                        repo_path = os.path.join(repo_path[:-4], gitdir)
                except (IOError, AttributeError):
                    return ''
            f = open(os.path.join(repo_path, "HEAD"))
            branch = f.readline().split('/')[-1].rstrip("\n")
            f.close()
            branch_path = os.path.join(repo_path, "refs", "heads", branch)
            if os.path.exists(branch_path):
                f = open(branch_path)
                commit = f.readline()[:10]
                f.close()
            else:
                # detached HEAD
                commit = branch[:10]
                branch = 'detached HEAD'
                branch_path = os.path.join(repo_path, "HEAD")

            date = time.localtime(os.stat(branch_path).st_mtime)
            if time.daylight == 0:
                offset = time.timezone
            else:
                offset = time.altzone
            result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(
                branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date),
                int(offset / -36))
        else:
            result = ''
        return result

    @staticmethod
    def _gitinfo():
        basedir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
        repo_path = os.path.join(basedir, '.git')
        result = CLI._git_repo_info(repo_path)
        submodules = os.path.join(basedir, '.gitmodules')
        if not os.path.exists(submodules):
            return result
        f = open(submodules)
        for line in f:
            tokens = line.strip().split(' ')
            if tokens[0] == 'path':
                submodule_path = tokens[2]
                submodule_info = CLI._git_repo_info(
                    os.path.join(basedir, submodule_path, '.git'))
                if not submodule_info:
                    submodule_info = ' not found - use git submodule update --init ' + submodule_path
                result += "\n  {0}: {1}".format(submodule_path, submodule_info)
        f.close()
        return result

    def pager(self, text):
        ''' find reasonable way to display text '''
        # this is a much simpler form of what is in pydoc.py
        if not sys.stdout.isatty():
            self.display.display(text)
        elif 'PAGER' in os.environ:
            if sys.platform == 'win32':
                self.display.display(text)
            else:
                self.pager_pipe(text, os.environ['PAGER'])
        elif subprocess.call('(less --version) 2> /dev/null', shell=True) == 0:
            self.pager_pipe(text, 'less')
        else:
            self.display.display(text)

    @staticmethod
    def pager_pipe(text, cmd):
        ''' pipe text through a pager '''
        if 'LESS' not in os.environ:
            os.environ['LESS'] = CLI.LESS_OPTS
        try:
            cmd = subprocess.Popen(cmd,
                                   shell=True,
                                   stdin=subprocess.PIPE,
                                   stdout=sys.stdout)
            cmd.communicate(input=text.encode(sys.stdout.encoding))
        except IOError:
            pass
        except KeyboardInterrupt:
            pass

    @classmethod
    def tty_ify(cls, text):

        t = cls._ITALIC.sub("`" + r"\1" + "'", text)  # I(word) => `word'
        t = cls._BOLD.sub("*" + r"\1" + "*", t)  # B(word) => *word*
        t = cls._MODULE.sub("[" + r"\1" + "]", t)  # M(word) => [word]
        t = cls._URL.sub(r"\1", t)  # U(word) => word
        t = cls._CONST.sub("`" + r"\1" + "'", t)  # C(word) => `word'

        return t

    @staticmethod
    def read_vault_password_file(vault_password_file, loader):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(os.path.expanduser(vault_password_file))
        if not os.path.exists(this_path):
            raise AnsibleError("The vault password file %s was not found" %
                               this_path)

        if loader.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError(
                    "Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file."
                    % (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            vault_pass = stdout.strip('\r\n')
        else:
            try:
                f = open(this_path, "rb")
                vault_pass = f.read().strip()
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError(
                    "Could not read vault password file %s: %s" %
                    (this_path, e))

        return vault_pass

    def get_opt(self, k, defval=""):
        """
        Returns an option from an Optparse values instance.
        """
        try:
            data = getattr(self.options, k)
        except:
            return defval
        if k == "roles_path":
            if os.pathsep in data:
                data = data.split(os.pathsep)[0]
        return data
Esempio n. 17
0
class TaskExecutor:

    '''
    This is the main worker class for the executor pipeline, which
    handles loading an action plugin to actually dispatch the task to
    a given host. This class roughly corresponds to the old Runner()
    class.
    '''

    # Modules that we optimize by squashing loop items into a single call to
    # the module
    SQUASH_ACTIONS = frozenset(C.DEFAULT_SQUASH_ACTIONS)

    def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj):
        self._host              = host
        self._task              = task
        self._job_vars          = job_vars
        self._play_context      = play_context
        self._new_stdin         = new_stdin
        self._loader            = loader
        self._shared_loader_obj = shared_loader_obj

        try:
            from __main__ import display
            self._display = display
        except ImportError:
            from ansible.utils.display import Display
            self._display = Display()

    def run(self):
        '''
        The main executor entrypoint, where we determine if the specified
        task requires looping and either runs the task with 
        '''

        self._display.debug("in run()")

        try:
            # lookup plugins need to know if this task is executing from
            # a role, so that it can properly find files/templates/etc.
            roledir = None
            if self._task._role:
                roledir = self._task._role._role_path
            self._job_vars['roledir'] = roledir

            items = self._get_loop_items()
            if items is not None:
                if len(items) > 0:
                    item_results = self._run_loop(items)

                    # loop through the item results, and remember the changed/failed
                    # result flags based on any item there.
                    changed = False
                    failed  = False
                    for item in item_results:
                        if 'changed' in item and item['changed']:
                           changed = True
                        if 'failed' in item and item['failed']:
                           failed = True

                    # create the overall result item, and set the changed/failed
                    # flags there to reflect the overall result of the loop
                    res = dict(results=item_results)

                    if changed:
                        res['changed'] = True

                    if failed:
                        res['failed'] = True
                        res['msg'] = 'One or more items failed'
                    else:
                        res['msg'] = 'All items completed'
                else:
                    res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
            else:
                self._display.debug("calling self._execute()")
                res = self._execute()
                self._display.debug("_execute() done")

            # make sure changed is set in the result, if it's not present
            if 'changed' not in res:
                res['changed'] = False

            def _clean_res(res):
                if isinstance(res, dict):
                    for k in res.keys():
                        res[k] = _clean_res(res[k])
                elif isinstance(res, list):
                    for idx,item in enumerate(res):
                        res[idx] = _clean_res(item)
                elif isinstance(res, UnsafeProxy):
                    return res._obj
                return res

            self._display.debug("dumping result to json")
            res = _clean_res(res)
            self._display.debug("done dumping result, returning")
            return res
        except AnsibleError as e:
            return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
        finally:
            try:
                self._connection.close()
            except AttributeError:
                pass
            except Exception as e:
                self._display.debug("error closing connection: %s" % to_unicode(e))

    def _get_loop_items(self):
        '''
        Loads a lookup plugin to handle the with_* portion of a task (if specified),
        and returns the items result.
        '''

        # create a copy of the job vars here so that we can modify
        # them temporarily without changing them too early for other
        # parts of the code that might still need a pristine version
        vars_copy = self._job_vars.copy()

        # now we update them with the play context vars
        self._play_context.update_vars(vars_copy)

        templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=vars_copy)
        items = None
        if self._task.loop:
            if self._task.loop in self._shared_loader_obj.lookup_loader:
                #TODO: remove convert_bare true and deprecate this in with_ 
                try:
                    loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True)
                except AnsibleUndefinedVariable as e:
                    if 'has no attribute' in str(e):
                        loop_terms = []
                        self._display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.")
                    else:
                        raise
                items = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
            else:
                raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)

        if items:
            from ansible.vars.unsafe_proxy import UnsafeProxy
            for idx, item in enumerate(items):
                if item is not None and not isinstance(item, UnsafeProxy):
                    items[idx] = UnsafeProxy(item)
        return items

    def _run_loop(self, items):
        '''
        Runs the task with the loop items specified and collates the result
        into an array named 'results' which is inserted into the final result
        along with the item for which the loop ran.
        '''

        results = []

        # make copies of the job vars and task so we can add the item to
        # the variables and re-validate the task with the item variable
        task_vars = self._job_vars.copy()

        items = self._squash_items(items, task_vars)
        for item in items:
            task_vars['item'] = item

            try:
                tmp_task = self._task.copy()
                tmp_play_context = self._play_context.copy()
            except AnsibleParserError as e:
                results.append(dict(failed=True, msg=str(e)))
                continue

            # now we swap the internal task and play context with their copies,
            # execute, and swap them back so we can do the next iteration cleanly
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
            res = self._execute(variables=task_vars)
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)

            # now update the result with the item info, and append the result
            # to the list of results
            res['item'] = item
            results.append(res)

        return results

    def _squash_items(self, items, variables):
        '''
        Squash items down to a comma-separated list for certain modules which support it
        (typically package management modules).
        '''
        if len(items) > 0 and self._task.action in self.SQUASH_ACTIONS:
            final_items = []
            name = self._task.args.pop('name', None) or self._task.args.pop('pkg', None)
            for item in items:
                variables['item'] = item
                templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
                if self._task.evaluate_conditional(templar, variables):
                    if templar._contains_vars(name):
                        new_item = templar.template(name)
                        final_items.append(new_item)
                    else:
                        final_items.append(item)
            joined_items = ",".join(final_items)
            self._task.args['name'] = joined_items
            return [joined_items]
        else:
            return items

    def _execute(self, variables=None):
        '''
        The primary workhorse of the executor system, this runs the task
        on the specified host (which may be the delegated_to host) and handles
        the retry/until and block rescue/always execution
        '''

        if variables is None:
            variables = self._job_vars

        templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)

        # apply the given task's information to the connection info,
        # which may override some fields already set by the play or
        # the options specified on the command line
        self._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=variables, templar=templar)

        # fields set from the play/task may be based on variables, so we have to
        # do the same kind of post validation step on it here before we use it.
        # We also add "magic" variables back into the variables dict to make sure
        # a certain subset of variables exist.
        self._play_context.update_vars(variables)
        self._play_context.post_validate(templar=templar)

        # Evaluate the conditional (if any) for this task, which we do before running
        # the final task post-validation. We do this before the post validation due to
        # the fact that the conditional may specify that the task be skipped due to a
        # variable not being present which would otherwise cause validation to fail
        if not self._task.evaluate_conditional(templar, variables):
            self._display.debug("when evaulation failed, skipping this task")
            return dict(changed=False, skipped=True, skip_reason='Conditional check failed', _ansible_no_log=self._play_context.no_log)

        # Now we do final validation on the task, which sets all fields to their final values.
        # In the case of debug tasks, we save any 'var' params and restore them after validating
        # so that variables are not replaced too early.
        prev_var = None
        if self._task.action == 'debug' and 'var' in self._task.args:
            prev_var = self._task.args.pop('var')

        original_args = self._task.args.copy()
        self._task.post_validate(templar=templar)
        if '_variable_params' in self._task.args:
            variable_params = self._task.args.pop('_variable_params')
            if isinstance(variable_params, dict):
                self._display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
                variable_params.update(self._task.args)
                self._task.args = variable_params

        if prev_var is not None:
            self._task.args['var'] = prev_var

        # if this task is a TaskInclude, we just return now with a success code so the
        # main thread can expand the task list for the given host
        if self._task.action == 'include':
            include_variables = original_args
            include_file = include_variables.get('_raw_params')
            del include_variables['_raw_params']
            return dict(include=include_file, include_variables=include_variables)

        # get the connection and the handler for this execution
        self._connection = self._get_connection(variables=variables, templar=templar)
        self._connection.set_host_overrides(host=self._host)

        self._handler = self._get_action_handler(connection=self._connection, templar=templar)

        # And filter out any fields which were set to default(omit), and got the omit token value
        omit_token = variables.get('omit')
        if omit_token is not None:
            self._task.args = dict((i[0], i[1]) for i in iteritems(self._task.args) if i[1] != omit_token)

        # Read some values from the task, so that we can modify them if need be
        retries = self._task.retries
        if retries <= 0:
            retries = 1

        delay = self._task.delay
        if delay < 0:
            delay = 1

        # make a copy of the job vars here, in case we need to update them
        # with the registered variable value later on when testing conditions
        vars_copy = variables.copy()

        self._display.debug("starting attempt loop")
        result = None
        for attempt in range(retries):
            if attempt > 0:
                # FIXME: this should use the self._display.callback/message passing mechanism
                self._display.display("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result), color="red")
                result['attempts'] = attempt + 1

            self._display.debug("running the handler")
            try:
                result = self._handler.run(task_vars=variables)
            except AnsibleConnectionFailure as e:
                return dict(unreachable=True, msg=str(e))
            self._display.debug("handler run complete")

            if self._task.async > 0:
                # the async_wrapper module returns dumped JSON via its stdout
                # response, so we parse it here and replace the result
                try:
                    result = json.loads(result.get('stdout'))
                except (TypeError, ValueError) as e:
                    return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))

                if self._task.poll > 0:
                    result = self._poll_async_result(result=result, templar=templar)

            # update the local copy of vars with the registered value, if specified,
            # or any facts which may have been generated by the module execution
            if self._task.register:
                vars_copy[self._task.register] = result 

            if 'ansible_facts' in result:
                vars_copy.update(result['ansible_facts'])

            # create a conditional object to evaluate task conditions
            cond = Conditional(loader=self._loader)

            def _evaluate_changed_when_result(result):
                if self._task.changed_when is not None:
                    cond.when = [ self._task.changed_when ]
                    result['changed'] = cond.evaluate_conditional(templar, vars_copy)

            def _evaluate_failed_when_result(result):
                if self._task.failed_when is not None:
                    cond.when = [ self._task.failed_when ]
                    failed_when_result = cond.evaluate_conditional(templar, vars_copy)
                    result['failed_when_result'] = result['failed'] = failed_when_result
                    return failed_when_result
                return False

            if self._task.until:
                cond.when = self._task.until
                if cond.evaluate_conditional(templar, vars_copy):
                    _evaluate_changed_when_result(result)
                    _evaluate_failed_when_result(result)
                    break
            elif (self._task.changed_when is not None or self._task.failed_when is not None) and 'skipped' not in result:
                    _evaluate_changed_when_result(result)
                    if _evaluate_failed_when_result(result):
                        break
            elif 'failed' not in result:
                if result.get('rc', 0) != 0:
                    result['failed'] = True
                else:
                    # if the result is not failed, stop trying
                    break

            if attempt < retries - 1:
                time.sleep(delay)
            else:
                _evaluate_changed_when_result(result)
                _evaluate_failed_when_result(result)

        # do the final update of the local variables here, for both registered
        # values and any facts which may have been created
        if self._task.register:
            variables[self._task.register] = result

        if 'ansible_facts' in result:
            variables.update(result['ansible_facts'])

        # save the notification target in the result, if it was specified, as
        # this task may be running in a loop in which case the notification
        # may be item-specific, ie. "notify: service {{item}}"
        if self._task.notify is not None:
            result['_ansible_notify'] = self._task.notify

        # preserve no_log setting
        result["_ansible_no_log"] = self._play_context.no_log

        # and return
        self._display.debug("attempt loop complete, returning result")
        return result
Esempio n. 18
0
 def display(*args, **kwargs):
     """Set up display function for Ansible v2"""
     display_instance = Display()
     display_instance.display(*args, **kwargs)
Esempio n. 19
0
class Grapher(object):
    """
    Main class to make the graph
    """
    DEFAULT_GRAPH_ATTR = {
        "ratio": "fill",
        "rankdir": "LR",
        "concentrate": "true",
        "ordering": "in"
    }
    DEFAULT_EDGE_ATTR = {"sep": "10", "esep": "5"}

    def __init__(self,
                 data_loader,
                 inventory_manager,
                 variable_manager,
                 playbook_filename,
                 options,
                 graph=None):
        """
        Main grapher responsible to parse the playbook and draw graph
        :param data_loader:
        :type data_loader: ansible.parsing.dataloader.DataLoader
        :param inventory_manager:
        :type inventory_manager: ansible.inventory.manager.InventoryManager
        :param variable_manager:
        :type variable_manager: ansible.vars.manager.VariableManager
        :param options Command line options
        :type options: optparse.Values
        :param playbook_filename:
        :type playbook_filename: str
        :param graph:
        :type graph: Digraph
        """
        self.options = options
        self.variable_manager = variable_manager
        self.inventory_manager = inventory_manager
        self.data_loader = data_loader
        self.playbook_filename = playbook_filename
        self.options.output_filename = self.options.output_filename
        self.rendered_file_path = None
        self.display = Display(verbosity=options.verbosity)

        if self.options.tags is None:
            self.options.tags = ["all"]

        if self.options.skip_tags is None:
            self.options.skip_tags = []

        self.graph_representation = GraphRepresentation()

        self.playbook = Playbook.load(self.playbook_filename,
                                      loader=self.data_loader,
                                      variable_manager=self.variable_manager)

        if graph is None:
            self.graph = CustomDigrah(edge_attr=self.DEFAULT_EDGE_ATTR,
                                      graph_attr=self.DEFAULT_GRAPH_ATTR,
                                      format="svg")

    def template(self, data, variables, fail_on_undefined=False):
        """
        Template the data using Jinja. Return data if an error occurs during the templating
        :param fail_on_undefined:
        :type fail_on_undefined: bool
        :param data:
        :type data: Union[str, ansible.parsing.yaml.objects.AnsibleUnicode]
        :param variables:
        :type variables: dict
        :return:
        """
        try:
            templar = Templar(loader=self.data_loader, variables=variables)
            return templar.template(data, fail_on_undefined=fail_on_undefined)
        except AnsibleError as ansible_error:
            # Sometime we need to export
            if fail_on_undefined:
                raise
            self.display.warning(ansible_error)
            return data

    def make_graph(self):
        """
        Loop through the playbook and make the graph.

        The graph is drawn following this order (https://docs.ansible.com/ansible/2.4/playbooks_reuse_roles.html#using-roles)
        for each play:
            draw pre_tasks
            draw roles
                if  include_role_tasks
                    draw role_tasks
            draw tasks
            draw post_tasks
        :return:
        :rtype:
        """

        # the root node
        self.graph.node(self.playbook_filename, style="dotted", id="root_node")

        # loop through the plays
        for play_counter, play in enumerate(self.playbook.get_plays(), 1):

            # the load basedir is relative to the playbook path
            if play._included_path is not None:
                self.data_loader.set_basedir(play._included_path)
            else:
                self.data_loader.set_basedir(self.playbook._basedir)
            self.display.vvv("Loader basedir set to {}".format(
                self.data_loader.get_basedir()))

            play_vars = self.variable_manager.get_vars(play)
            play_hosts = [
                h.get_name() for h in self.inventory_manager.get_hosts(
                    self.template(play.hosts, play_vars))
            ]
            play_name = "Play #{}: {} ({})".format(play_counter,
                                                   clean_name(play.get_name()),
                                                   len(play_hosts))
            play_name = self.template(play_name, play_vars)

            self.display.banner("Graphing " + play_name)

            play_id = "play_" + str(uuid.uuid4())

            self.graph_representation.add_node(play_id)

            with self.graph.subgraph(name=play_name) as play_subgraph:
                color, play_font_color = get_play_colors(play)
                # play node
                play_subgraph.node(play_name,
                                   id=play_id,
                                   style="filled",
                                   shape="box",
                                   color=color,
                                   fontcolor=play_font_color,
                                   tooltip="     ".join(play_hosts))

                # edge from root node to plays
                play_edge_id = "edge_" + str(uuid.uuid4())
                play_subgraph.edge(self.playbook_filename,
                                   play_name,
                                   id=play_edge_id,
                                   style="bold",
                                   label=str(play_counter),
                                   color=color,
                                   fontcolor=color)

                # loop through the pre_tasks
                self.display.v("Graphing pre_tasks...")
                nb_pre_tasks = 0
                for pre_task_block in play.pre_tasks:
                    nb_pre_tasks = self._include_tasks_in_blocks(
                        current_play=play,
                        graph=play_subgraph,
                        parent_node_name=play_name,
                        parent_node_id=play_id,
                        block=pre_task_block,
                        color=color,
                        current_counter=nb_pre_tasks,
                        play_vars=play_vars,
                        node_name_prefix="[pre_task] ")

                # loop through the roles
                self.display.v("Graphing roles...")
                role_number = 0
                for role in play.get_roles():
                    # Don't insert tasks from ``import/include_role``, preventing
                    # duplicate graphing
                    if role.from_include:
                        continue

                    role_number += 1

                    role_name = "[role] " + clean_name(role.get_name())

                    # the role object doesn't inherit the tags from the play. So we add it manually
                    role.tags = role.tags + play.tags

                    role_not_tagged = ""
                    if not role.evaluate_tags(only_tags=self.options.tags,
                                              skip_tags=self.options.skip_tags,
                                              all_vars=play_vars):
                        role_not_tagged = NOT_TAGGED

                    with self.graph.subgraph(name=role_name,
                                             node_attr={}) as role_subgraph:
                        current_counter = role_number + nb_pre_tasks
                        role_id = "role_" + str(uuid.uuid4()) + role_not_tagged
                        role_subgraph.node(role_name, id=role_id)

                        edge_id = "edge_" + str(uuid.uuid4()) + role_not_tagged

                        # edge from play to role
                        role_subgraph.edge(play_name,
                                           role_name,
                                           label=str(current_counter),
                                           color=color,
                                           fontcolor=color,
                                           id=edge_id)

                        self.graph_representation.add_link(play_id, edge_id)
                        self.graph_representation.add_link(edge_id, role_id)

                        # loop through the tasks of the roles
                        if self.options.include_role_tasks:
                            role_tasks_counter = 0
                            for block in role.compile(play):
                                role_tasks_counter = self._include_tasks_in_blocks(
                                    current_play=play,
                                    graph=role_subgraph,
                                    parent_node_name=role_name,
                                    parent_node_id=role_id,
                                    block=block,
                                    color=color,
                                    play_vars=play_vars,
                                    current_counter=role_tasks_counter,
                                    node_name_prefix="[task] ")
                                role_tasks_counter += 1
                self.display.v(
                    "{} roles added to the graph".format(role_number))

                # loop through the tasks
                self.display.v("Graphing tasks...")
                nb_tasks = 0
                for task_block in play.tasks:
                    nb_tasks = self._include_tasks_in_blocks(
                        current_play=play,
                        graph=play_subgraph,
                        parent_node_name=play_name,
                        parent_node_id=play_id,
                        block=task_block,
                        color=color,
                        current_counter=role_number + nb_pre_tasks,
                        play_vars=play_vars,
                        node_name_prefix="[task] ")

                # loop through the post_tasks
                self.display.v("Graphing post_tasks...")
                for post_task_block in play.post_tasks:
                    self._include_tasks_in_blocks(
                        current_play=play,
                        graph=play_subgraph,
                        parent_node_name=play_name,
                        parent_node_id=play_id,
                        block=post_task_block,
                        color=color,
                        current_counter=nb_tasks,
                        play_vars=play_vars,
                        node_name_prefix="[post_task] ")

            self.display.banner("Done graphing {}".format(play_name))
            self.display.display("")  # just an empty line
            # moving to the next play

    def render_graph(self):
        """
        Render the graph
        :return: The rendered file path
        :rtype: str
        """

        self.rendered_file_path = self.graph.render(
            cleanup=not self.options.save_dot_file,
            filename=self.options.output_filename)
        if self.options.save_dot_file:
            # add .gv extension. The render doesn't add an extension
            final_name = self.options.output_filename + ".dot"
            os.rename(self.options.output_filename, final_name)
            self.display.display(
                "Graphviz dot file has been exported to {}".format(final_name))

        return self.rendered_file_path

    def post_process_svg(self):
        """
        Post process the rendered svg
        :return The post processed file path
        :rtype: str
        :return:
        """
        post_processor = PostProcessor(svg_path=self.rendered_file_path)

        post_processor.post_process(
            graph_representation=self.graph_representation)

        post_processor.write()

        self.display.display("The graph has been exported to {}".format(
            self.rendered_file_path))

        return self.rendered_file_path

    def _include_tasks_in_blocks(self,
                                 current_play,
                                 graph,
                                 parent_node_name,
                                 parent_node_id,
                                 block,
                                 color,
                                 current_counter,
                                 play_vars=None,
                                 node_name_prefix=""):
        """
        Recursively read all the tasks of the block and add it to the graph
        FIXME: This function needs some refactoring. Thinking of a BlockGrapher to handle this
        :param current_play:
        :type current_play: ansible.playbook.play.Play
        :param graph:
        :type graph:
        :param parent_node_name:
        :type parent_node_name: str
        :param parent_node_id:
        :type parent_node_id: str
        :param block:
        :type block: Union[Block,TaskInclude]
        :param color:
        :type color: str
        :param current_counter:
        :type current_counter: int
        :param play_vars:
        :type play_vars: dict
        :param node_name_prefix:
        :type node_name_prefix: str
        :return:
        :rtype:
        """

        loop_counter = current_counter
        # loop through the tasks
        for counter, task_or_block in enumerate(block.block, 1):
            if isinstance(task_or_block, Block):
                loop_counter = self._include_tasks_in_blocks(
                    current_play=current_play,
                    graph=graph,
                    parent_node_name=parent_node_name,
                    parent_node_id=parent_node_id,
                    block=task_or_block,
                    color=color,
                    current_counter=loop_counter,
                    play_vars=play_vars,
                    node_name_prefix=node_name_prefix)
            elif isinstance(
                    task_or_block, TaskInclude
            ):  # include, include_tasks, include_role are dynamic
                # So we need to process it explicitly because Ansible does it during th execution of the playbook

                task_vars = self.variable_manager.get_vars(play=current_play,
                                                           task=task_or_block)

                if isinstance(task_or_block, IncludeRole):

                    self.display.v(
                        "An 'include_role' found. Including tasks from '{}'".
                        format(task_or_block.args["name"]))
                    # here we have an include_role. The class IncludeRole is a subclass of TaskInclude.
                    # We do this because the management of an include_role is different.
                    # See :func:`~ansible.playbook.included_file.IncludedFile.process_include_results` from line 155
                    my_blocks, _ = task_or_block.get_block_list(
                        play=current_play,
                        loader=self.data_loader,
                        variable_manager=self.variable_manager)
                else:
                    self.display.v(
                        "An 'include_tasks' found. Including tasks from '{}'".
                        format(task_or_block.get_name()))
                    templar = Templar(loader=self.data_loader,
                                      variables=task_vars)
                    try:
                        include_file = handle_include_path(
                            original_task=task_or_block,
                            loader=self.data_loader,
                            templar=templar)
                    except AnsibleUndefinedVariable as e:
                        # TODO: mark this task with some special shape or color
                        self.display.warning(
                            "Unable to translate the include task '{}' due to an undefined variable: {}. "
                            "Some variables are available only during the real execution."
                            .format(task_or_block.get_name(), str(e)))
                        loop_counter += 1
                        self._include_task(task_or_block, loop_counter,
                                           task_vars, graph, node_name_prefix,
                                           color, parent_node_id,
                                           parent_node_name)
                        continue

                    data = self.data_loader.load_from_file(include_file)
                    if data is None:
                        self.display.warning(
                            "file %s is empty and had no tasks to include" %
                            include_file)
                        continue
                    elif not isinstance(data, list):
                        raise AnsibleParserError(
                            "included task files must contain a list of tasks",
                            obj=data)

                    # get the blocks from the include_tasks
                    my_blocks = load_list_of_blocks(
                        data,
                        play=current_play,
                        variable_manager=self.variable_manager,
                        role=task_or_block._role,
                        loader=self.data_loader,
                        parent_block=task_or_block)

                for b in my_blocks:  # loop through the blocks inside the included tasks or role
                    loop_counter = self._include_tasks_in_blocks(
                        current_play=current_play,
                        graph=graph,
                        parent_node_name=parent_node_name,
                        parent_node_id=parent_node_id,
                        block=b,
                        color=color,
                        current_counter=loop_counter,
                        play_vars=task_vars,
                        node_name_prefix=node_name_prefix)
            else:
                # check if this task comes from a role and we dont want to include role's task
                if has_role_parent(
                        task_or_block) and not self.options.include_role_tasks:
                    # skip role's task
                    self.display.vv(
                        "The task '{}' has a role as parent and include_role_tasks is false. "
                        "It will be skipped.".format(task_or_block.get_name()))
                    continue

                self._include_task(task_or_block=task_or_block,
                                   loop_counter=loop_counter + 1,
                                   play_vars=play_vars,
                                   graph=graph,
                                   node_name_prefix=node_name_prefix,
                                   color=color,
                                   parent_node_id=parent_node_id,
                                   parent_node_name=parent_node_name)

                loop_counter += 1

        return loop_counter

    def _include_task(self, task_or_block, loop_counter, play_vars, graph,
                      node_name_prefix, color, parent_node_id,
                      parent_node_name):
        """
        Include the task in the graph
        :return:
        :rtype:
        """
        self.display.vv("Adding the task '{}' to the graph".format(
            task_or_block.get_name()))
        # check if the task should be included
        tagged = ''
        if not task_or_block.evaluate_tags(only_tags=self.options.tags,
                                           skip_tags=self.options.skip_tags,
                                           all_vars=play_vars):
            self.display.vv(
                "The task '{}' should not be executed. It will be marked as NOT_TAGGED"
                .format(task_or_block.get_name()))
            tagged = NOT_TAGGED

        task_edge_label = str(loop_counter)
        if len(task_or_block.when) > 0:
            when = "".join(map(str, task_or_block.when))
            task_edge_label += "  [when: " + when + "]"

        task_name = clean_name(
            node_name_prefix +
            self.template(task_or_block.get_name(), play_vars))
        # get prefix id from node_name
        id_prefix = node_name_prefix.replace("[",
                                             "").replace("]",
                                                         "").replace(" ", "_")
        task_id = id_prefix + str(uuid.uuid4()) + tagged
        edge_id = "edge_" + str(uuid.uuid4()) + tagged

        graph.node(task_name, shape="octagon", id=task_id)
        graph.edge(parent_node_name,
                   task_name,
                   label=task_edge_label,
                   color=color,
                   fontcolor=color,
                   style="bold",
                   id=edge_id)
        self.graph_representation.add_link(parent_node_id, edge_id)
        self.graph_representation.add_link(edge_id, task_id)
Esempio n. 20
0
class TaskExecutor:

    '''
    This is the main worker class for the executor pipeline, which
    handles loading an action plugin to actually dispatch the task to
    a given host. This class roughly corresponds to the old Runner()
    class.
    '''

    # Modules that we optimize by squashing loop items into a single call to
    # the module
    SQUASH_ACTIONS = frozenset(C.DEFAULT_SQUASH_ACTIONS)

    def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj):
        self._host              = host
        self._task              = task
        self._job_vars          = job_vars
        self._play_context      = play_context
        self._new_stdin         = new_stdin
        self._loader            = loader
        self._shared_loader_obj = shared_loader_obj

        try:
            from __main__ import display
            self._display = display
        except ImportError:
            from ansible.utils.display import Display
            self._display = Display()

    def run(self):
        '''
        The main executor entrypoint, where we determine if the specified
        task requires looping and either runs the task with 
        '''

        self._display.debug("in run()")

        try:
            # lookup plugins need to know if this task is executing from
            # a role, so that it can properly find files/templates/etc.
            roledir = None
            if self._task._role:
                roledir = self._task._role._role_path
            self._job_vars['roledir'] = roledir

            items = self._get_loop_items()
            if items is not None:
                if len(items) > 0:
                    item_results = self._run_loop(items)

                    # loop through the item results, and remember the changed/failed
                    # result flags based on any item there.
                    changed = False
                    failed  = False
                    for item in item_results:
                        if 'changed' in item and item['changed']:
                           changed = True
                        if 'failed' in item and item['failed']:
                           failed = True

                    # create the overall result item, and set the changed/failed
                    # flags there to reflect the overall result of the loop
                    res = dict(results=item_results)

                    if changed:
                        res['changed'] = True

                    if failed:
                        res['failed'] = True
                        res['msg'] = 'One or more items failed'
                    else:
                        res['msg'] = 'All items completed'
                else:
                    res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
            else:
                self._display.debug("calling self._execute()")
                res = self._execute()
                self._display.debug("_execute() done")

            # make sure changed is set in the result, if it's not present
            if 'changed' not in res:
                res['changed'] = False

            def _clean_res(res):
                if isinstance(res, dict):
                    for k in res.keys():
                        res[k] = _clean_res(res[k])
                elif isinstance(res, list):
                    for idx,item in enumerate(res):
                        res[idx] = _clean_res(item)
                elif isinstance(res, UnsafeProxy):
                    return res._obj
                return res

            self._display.debug("dumping result to json")
            res = _clean_res(res)
            self._display.debug("done dumping result, returning")
            return res
        except AnsibleError as e:
            return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
        finally:
            try:
                self._connection.close()
            except AttributeError:
                pass
            except Exception as e:
                self._display.debug("error closing connection: %s" % to_unicode(e))

    def _get_loop_items(self):
        '''
        Loads a lookup plugin to handle the with_* portion of a task (if specified),
        and returns the items result.
        '''

        # create a copy of the job vars here so that we can modify
        # them temporarily without changing them too early for other
        # parts of the code that might still need a pristine version
        vars_copy = self._job_vars.copy()

        # now we update them with the play context vars
        self._play_context.update_vars(vars_copy)

        templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=vars_copy)
        items = None
        if self._task.loop:
            if self._task.loop in self._shared_loader_obj.lookup_loader:
                #TODO: remove convert_bare true and deprecate this in with_ 
                try:
                    loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True)
                except AnsibleUndefinedVariable as e:
                    if 'has no attribute' in str(e):
                        loop_terms = []
                        self._display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.")
                    else:
                        raise
                items = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
            else:
                raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)

        if items:
            from ansible.vars.unsafe_proxy import UnsafeProxy
            for idx, item in enumerate(items):
                if item is not None and not isinstance(item, UnsafeProxy):
                    items[idx] = UnsafeProxy(item)
        return items

    def _run_loop(self, items):
        '''
        Runs the task with the loop items specified and collates the result
        into an array named 'results' which is inserted into the final result
        along with the item for which the loop ran.
        '''

        results = []

        # make copies of the job vars and task so we can add the item to
        # the variables and re-validate the task with the item variable
        task_vars = self._job_vars.copy()

        items = self._squash_items(items, task_vars)
        for item in items:
            task_vars['item'] = item

            try:
                tmp_task = self._task.copy()
                tmp_play_context = self._play_context.copy()
            except AnsibleParserError as e:
                results.append(dict(failed=True, msg=str(e)))
                continue

            # now we swap the internal task and play context with their copies,
            # execute, and swap them back so we can do the next iteration cleanly
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
            res = self._execute(variables=task_vars)
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)

            # now update the result with the item info, and append the result
            # to the list of results
            res['item'] = item
            results.append(res)

        return results

    def _squash_items(self, items, variables):
        '''
        Squash items down to a comma-separated list for certain modules which support it
        (typically package management modules).
        '''
        if len(items) > 0 and self._task.action in self.SQUASH_ACTIONS:
            final_items = []
            name = self._task.args.pop('name', None) or self._task.args.pop('pkg', None)
            for item in items:
                variables['item'] = item
                templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
                if self._task.evaluate_conditional(templar, variables):
                    if templar._contains_vars(name):
                        new_item = templar.template(name)
                        final_items.append(new_item)
                    else:
                        final_items.append(item)
            joined_items = ",".join(final_items)
            self._task.args['name'] = joined_items
            return [joined_items]
        else:
            return items

    def _execute(self, variables=None):
        '''
        The primary workhorse of the executor system, this runs the task
        on the specified host (which may be the delegated_to host) and handles
        the retry/until and block rescue/always execution
        '''

        if variables is None:
            variables = self._job_vars

        templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)

        context_validation_error = None
        try:
            # apply the given task's information to the connection info,
            # which may override some fields already set by the play or
            # the options specified on the command line
            self._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=variables, templar=templar)

            # fields set from the play/task may be based on variables, so we have to
            # do the same kind of post validation step on it here before we use it.
            self._play_context.post_validate(templar=templar)

            # We also add "magic" variables back into the variables dict to make sure
            # a certain subset of variables exist.
            self._play_context.update_vars(variables)
        except AnsibleError as e:
            # save the error, which we'll raise later if we don't end up
            # skipping this task during the conditional evaluation step
            context_validation_error = e

        # Evaluate the conditional (if any) for this task, which we do before running
        # the final task post-validation. We do this before the post validation due to
        # the fact that the conditional may specify that the task be skipped due to a
        # variable not being present which would otherwise cause validation to fail
        try:
            if not self._task.evaluate_conditional(templar, variables):
                self._display.debug("when evaluation failed, skipping this task")
                return dict(changed=False, skipped=True, skip_reason='Conditional check failed', _ansible_no_log=self._play_context.no_log)
        except AnsibleError:
            # skip conditional exception in the case of includes as the vars needed might not be avaiable except in the included tasks or due to tags
            if self._task.action != 'include':
                raise

        # if we ran into an error while setting up the PlayContext, raise it now
        if context_validation_error is not None:
            raise context_validation_error

        # if this task is a TaskInclude, we just return now with a success code so the
        # main thread can expand the task list for the given host
        if self._task.action == 'include':
            include_variables = self._task.args.copy()
            include_file = include_variables.pop('_raw_params', None)
            if not include_file:
                return dict(failed=True, msg="No include file was specified to the include")

            include_file = templar.template(include_file)
            return dict(include=include_file, include_variables=include_variables)

        # Now we do final validation on the task, which sets all fields to their final values.
        self._task.post_validate(templar=templar)
        if '_variable_params' in self._task.args:
            variable_params = self._task.args.pop('_variable_params')
            if isinstance(variable_params, dict):
                self._display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
                variable_params.update(self._task.args)
                self._task.args = variable_params

        # get the connection and the handler for this execution
        self._connection = self._get_connection(variables=variables, templar=templar)
        self._connection.set_host_overrides(host=self._host)

        self._handler = self._get_action_handler(connection=self._connection, templar=templar)

        # And filter out any fields which were set to default(omit), and got the omit token value
        omit_token = variables.get('omit')
        if omit_token is not None:
            self._task.args = dict((i[0], i[1]) for i in iteritems(self._task.args) if i[1] != omit_token)

        # Read some values from the task, so that we can modify them if need be
        if self._task.until is not None:
            retries = self._task.retries
            if retries <= 0:
                retries = 1
        else:
            retries = 1

        delay = self._task.delay
        if delay < 0:
            delay = 1

        # make a copy of the job vars here, in case we need to update them
        # with the registered variable value later on when testing conditions
        vars_copy = variables.copy()

        self._display.debug("starting attempt loop")
        result = None
        for attempt in range(retries):
            if attempt > 0:
                self._display.display("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result), color="red")
                result['attempts'] = attempt + 1

            self._display.debug("running the handler")
            try:
                result = self._handler.run(task_vars=variables)
            except AnsibleConnectionFailure as e:
                return dict(unreachable=True, msg=str(e))
            self._display.debug("handler run complete")

            if self._task.async > 0:
                # the async_wrapper module returns dumped JSON via its stdout
                # response, so we parse it here and replace the result
                try:
                    result = json.loads(result.get('stdout'))
                except (TypeError, ValueError) as e:
                    return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))

                if self._task.poll > 0:
                    result = self._poll_async_result(result=result, templar=templar)

            # update the local copy of vars with the registered value, if specified,
            # or any facts which may have been generated by the module execution
            if self._task.register:
                vars_copy[self._task.register] = result

            if 'ansible_facts' in result:
                vars_copy.update(result['ansible_facts'])

            # create a conditional object to evaluate task conditions
            cond = Conditional(loader=self._loader)

            def _evaluate_changed_when_result(result):
                if self._task.changed_when is not None:
                    cond.when = [ self._task.changed_when ]
                    result['changed'] = cond.evaluate_conditional(templar, vars_copy)

            def _evaluate_failed_when_result(result):
                if self._task.failed_when is not None:
                    cond.when = [ self._task.failed_when ]
                    failed_when_result = cond.evaluate_conditional(templar, vars_copy)
                    result['failed_when_result'] = result['failed'] = failed_when_result
                    return failed_when_result
                return False

            if self._task.until:
                cond.when = self._task.until
                if cond.evaluate_conditional(templar, vars_copy):
                    _evaluate_changed_when_result(result)
                    _evaluate_failed_when_result(result)
                    break
            elif (self._task.changed_when is not None or self._task.failed_when is not None) and 'skipped' not in result:
                    _evaluate_changed_when_result(result)
                    if _evaluate_failed_when_result(result):
                        break
            elif 'failed' not in result:
                if result.get('rc', 0) != 0:
                    result['failed'] = True
                else:
                    # if the result is not failed, stop trying
                    break

            if attempt < retries - 1:
                time.sleep(delay)
            else:
                _evaluate_changed_when_result(result)
                _evaluate_failed_when_result(result)

        # do the final update of the local variables here, for both registered
        # values and any facts which may have been created
        if self._task.register:
            ### FIXME:
            # If we remove invocation, we should also be removing _ansible*
            # and maybe ansible_facts.
            # Remove invocation from registered vars
            #if 'invocation' in result:
            #    del result['invocation']
            variables[self._task.register] = result

        if 'ansible_facts' in result:
            variables.update(result['ansible_facts'])

        # save the notification target in the result, if it was specified, as
        # this task may be running in a loop in which case the notification
        # may be item-specific, ie. "notify: service {{item}}"
        if self._task.notify is not None:
            result['_ansible_notify'] = self._task.notify

        # preserve no_log setting
        result["_ansible_no_log"] = self._play_context.no_log

        # and return
        self._display.debug("attempt loop complete, returning result")
        return result
Esempio n. 21
0
class CLI(object):
    ''' code behind bin/ansible* programs '''

    VALID_ACTIONS = ['No Actions']

    _ITALIC = re.compile(r"I\(([^)]+)\)")
    _BOLD   = re.compile(r"B\(([^)]+)\)")
    _MODULE = re.compile(r"M\(([^)]+)\)")
    _URL    = re.compile(r"U\(([^)]+)\)")
    _CONST  = re.compile(r"C\(([^)]+)\)")

    PAGER   = 'less'
    LESS_OPTS = 'FRSX'  # -F (quit-if-one-screen) -R (allow raw ansi control chars)
                        # -S (chop long lines) -X (disable termcap init and de-init)

    def __init__(self, args, display=None):
        """
        Base init method for all command line programs
        """

        self.args = args
        self.options = None
        self.parser = None
        self.action = None

        if display is None:
            self.display = Display()
        else:
            self.display = display

    def set_action(self):
        """
        Get the action the user wants to execute from the sys argv list.
        """
        for i in range(0,len(self.args)):
            arg = self.args[i]
            if arg in self.VALID_ACTIONS:
                self.action = arg
                del self.args[i]
                break

        if not self.action:
            raise AnsibleOptionsError("Missing required action")

    def execute(self):
        """
        Actually runs a child defined method using the execute_<action> pattern
        """
        fn = getattr(self, "execute_%s" % self.action)
        fn()

    def parse(self):
        raise Exception("Need to implement!")

    def run(self):

        if self.options.verbosity > 0:
            if C.CONFIG_FILE:
                self.display.display("Using %s as config file" % C.CONFIG_FILE)
            else:
                self.display.display("No config file found; using defaults")

    @staticmethod
    def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
        ''' prompt for vault password and/or password change '''

        vault_pass = None
        new_vault_pass = None

        try:
            if ask_vault_pass:
                vault_pass = getpass.getpass(prompt="Vault password: "******"Confirm Vault password: "******"Passwords do not match")

            if ask_new_vault_pass:
                new_vault_pass = getpass.getpass(prompt="New Vault password: "******"Confirm New Vault password: "******"Passwords do not match")
        except EOFError:
            pass

        # enforce no newline chars at the end of passwords
        if vault_pass:
            vault_pass = to_bytes(vault_pass, errors='strict', nonstring='simplerepr').strip()
        if new_vault_pass:
            new_vault_pass = to_bytes(new_vault_pass, errors='strict', nonstring='simplerepr').strip()

        return vault_pass, new_vault_pass


    def ask_passwords(self):
        ''' prompt for connection and become passwords if needed '''

        op = self.options
        sshpass = None
        becomepass = None
        become_prompt = ''

        try:
            if op.ask_pass:
                sshpass = getpass.getpass(prompt="SSH password: "******"%s password[defaults to SSH password]: " % op.become_method.upper()
                if sshpass:
                    sshpass = to_bytes(sshpass, errors='strict', nonstring='simplerepr')
            else:
                become_prompt = "%s password: "******"--ask-vault-pass and --vault-password-file are mutually exclusive")


        if runas_opts:
            # Check for privilege escalation conflicts
            if (op.su or op.su_user or op.ask_su_pass) and \
                        (op.sudo or op.sudo_user or op.ask_sudo_pass) or \
                (op.su or op.su_user or op.ask_su_pass) and \
                        (op.become or op.become_user or op.become_ask_pass) or \
                (op.sudo or op.sudo_user or op.ask_sudo_pass) and \
                        (op.become or op.become_user or op.become_ask_pass):

                self.parser.error("Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') "
                                  "and su arguments ('-su', '--su-user', and '--ask-su-pass') "
                                  "and become arguments ('--become', '--become-user', and '--ask-become-pass')"
                                  " are exclusive of each other")

        if fork_opts:
            if op.forks < 1:
                self.parser.error("The number of processes (--forks) must be >= 1")

    @staticmethod
    def expand_tilde(option, opt, value, parser):
        setattr(parser.values, option.dest, os.path.expanduser(value))

    @staticmethod
    def base_parser(usage="", output_opts=False, runas_opts=False, meta_opts=False, runtask_opts=False, vault_opts=False, module_opts=False,
        async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False):
        ''' create an options parser for most ansible scripts '''

        #FIXME: implemente epilog parsing
        #OptionParser.format_epilog = lambda self, formatter: self.epilog

        # base opts
        parser = SortedOptParser(usage, version=CLI.version("%prog"))
        parser.add_option('-v','--verbose', dest='verbosity', default=0, action="count",
            help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")

        if inventory_opts:
            parser.add_option('-i', '--inventory-file', dest='inventory',
                help="specify inventory host file (default=%s)" % C.DEFAULT_HOST_LIST,
                default=C.DEFAULT_HOST_LIST, action="callback", callback=CLI.expand_tilde, type=str)
            parser.add_option('--list-hosts', dest='listhosts', action='store_true',
                help='outputs a list of matching hosts; does not execute anything else')
            parser.add_option('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
                help='further limit selected hosts to an additional pattern')

        if module_opts:
            parser.add_option('-M', '--module-path', dest='module_path', default=None,
                help="specify path(s) to module library (default=%s)" % C.DEFAULT_MODULE_PATH,
                action="callback", callback=CLI.expand_tilde, type=str)
        if runtask_opts:
            parser.add_option('-e', '--extra-vars', dest="extra_vars", action="append",
                help="set additional variables as key=value or YAML/JSON", default=[])

        if fork_opts:
            parser.add_option('-f','--forks', dest='forks', default=C.DEFAULT_FORKS, type='int',
                help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)

        if vault_opts:
            parser.add_option('--ask-vault-pass', default=False, dest='ask_vault_pass', action='store_true',
                help='ask for vault password')
            parser.add_option('--vault-password-file', default=C.DEFAULT_VAULT_PASSWORD_FILE,
                dest='vault_password_file', help="vault password file", action="callback",
                callback=CLI.expand_tilde, type=str)
            parser.add_option('--new-vault-password-file',
                dest='new_vault_password_file', help="new vault password file for rekey", action="callback",
                callback=CLI.expand_tilde, type=str)
            parser.add_option('--output', default=None, dest='output_file',
                help='output file name for encrypt or decrypt; use - for stdout')


        if subset_opts:
            parser.add_option('-t', '--tags', dest='tags', default='all',
                help="only run plays and tasks tagged with these values")
            parser.add_option('--skip-tags', dest='skip_tags',
                help="only run plays and tasks whose tags do not match these values")

        if output_opts:
            parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
                help='condense output')
            parser.add_option('-t', '--tree', dest='tree', default=None,
                help='log output to this directory')

        if runas_opts:
            # priv user defaults to root later on to enable detecting when this option was given here
            parser.add_option('-K', '--ask-sudo-pass', default=C.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true',
                help='ask for sudo password (deprecated, use become)')
            parser.add_option('--ask-su-pass', default=C.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true',
                help='ask for su password (deprecated, use become)')
            parser.add_option("-s", "--sudo", default=C.DEFAULT_SUDO, action="store_true", dest='sudo',
                help="run operations with sudo (nopasswd) (deprecated, use become)")
            parser.add_option('-U', '--sudo-user', dest='sudo_user', default=None,
                              help='desired sudo user (default=root) (deprecated, use become)')
            parser.add_option('-S', '--su', default=C.DEFAULT_SU, action='store_true',
                help='run operations with su (deprecated, use become)')
            parser.add_option('-R', '--su-user', default=None,
                help='run operations with su as this user (default=%s) (deprecated, use become)' % C.DEFAULT_SU_USER)

            # consolidated privilege escalation (become)
            parser.add_option("-b", "--become", default=C.DEFAULT_BECOME, action="store_true", dest='become',
                help="run operations with become (nopasswd implied)")
            parser.add_option('--become-method', dest='become_method', default=C.DEFAULT_BECOME_METHOD, type='string',
                help="privilege escalation method to use (default=%s), valid choices: [ %s ]" % (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
            parser.add_option('--become-user', default=None, dest='become_user', type='string',
                help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)
            parser.add_option('--ask-become-pass', default=False, dest='become_ask_pass', action='store_true',
                help='ask for privilege escalation password')


        if connect_opts:
            parser.add_option('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
                help='ask for connection password')
            parser.add_option('--private-key','--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
                help='use this file to authenticate the connection')
            parser.add_option('-u', '--user', default=C.DEFAULT_REMOTE_USER, dest='remote_user',
                help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
            parser.add_option('-c', '--connection', dest='connection', default=C.DEFAULT_TRANSPORT,
                help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
            parser.add_option('-T', '--timeout', default=C.DEFAULT_TIMEOUT, type='int', dest='timeout',
                help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
            parser.add_option('--ssh-common-args', default='', dest='ssh_common_args',
                help="specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)")
            parser.add_option('--sftp-extra-args', default='', dest='sftp_extra_args',
                help="specify extra arguments to pass to sftp only (e.g. -f, -l)")
            parser.add_option('--scp-extra-args', default='', dest='scp_extra_args',
                help="specify extra arguments to pass to scp only (e.g. -l)")
            parser.add_option('--ssh-extra-args', default='', dest='ssh_extra_args',
                help="specify extra arguments to pass to ssh only (e.g. -R)")

        if async_opts:
            parser.add_option('-P', '--poll', default=C.DEFAULT_POLL_INTERVAL, type='int', dest='poll_interval',
                help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL)
            parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
                help='run asynchronously, failing after X seconds (default=N/A)')

        if check_opts:
            parser.add_option("-C", "--check", default=False, dest='check', action='store_true',
                help="don't make any changes; instead, try to predict some of the changes that may occur")
            parser.add_option('--syntax-check', dest='syntax', action='store_true',
                help="perform a syntax check on the playbook, but do not execute it")
            parser.add_option("-D", "--diff", default=False, dest='diff', action='store_true',
                help="when changing (small) files and templates, show the differences in those files; works great with --check")

        if meta_opts:
            parser.add_option('--force-handlers', default=C.DEFAULT_FORCE_HANDLERS, dest='force_handlers', action='store_true',
                help="run handlers even if a task fails")
            parser.add_option('--flush-cache', dest='flush_cache', action='store_true',
                help="clear the fact cache")

        return parser

    @staticmethod
    def version(prog):
        ''' return ansible version '''
        result = "{0} {1}".format(prog, __version__)
        gitinfo = CLI._gitinfo()
        if gitinfo:
            result = result + " {0}".format(gitinfo)
        result += "\n  config file = %s" % C.CONFIG_FILE
        result = result + "\n  configured module search path = %s" % C.DEFAULT_MODULE_PATH
        return result

    @staticmethod
    def version_info(gitinfo=False):
        ''' return full ansible version info '''
        if gitinfo:
            # expensive call, user with care
            ansible_version_string = CLI.version('')
        else:
            ansible_version_string = __version__
        ansible_version = ansible_version_string.split()[0]
        ansible_versions = ansible_version.split('.')
        for counter in range(len(ansible_versions)):
            if ansible_versions[counter] == "":
                ansible_versions[counter] = 0
            try:
                ansible_versions[counter] = int(ansible_versions[counter])
            except:
                pass
        if len(ansible_versions) < 3:
            for counter in range(len(ansible_versions), 3):
                ansible_versions.append(0)
        return {'string':      ansible_version_string.strip(),
                'full':        ansible_version,
                'major':       ansible_versions[0],
                'minor':       ansible_versions[1],
                'revision':    ansible_versions[2]}

    @staticmethod
    def _git_repo_info(repo_path):
        ''' returns a string containing git branch, commit id and commit date '''
        result = None
        if os.path.exists(repo_path):
            # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
            if os.path.isfile(repo_path):
                try:
                    gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
                    # There is a possibility the .git file to have an absolute path.
                    if os.path.isabs(gitdir):
                        repo_path = gitdir
                    else:
                        repo_path = os.path.join(repo_path[:-4], gitdir)
                except (IOError, AttributeError):
                    return ''
            f = open(os.path.join(repo_path, "HEAD"))
            branch = f.readline().split('/')[-1].rstrip("\n")
            f.close()
            branch_path = os.path.join(repo_path, "refs", "heads", branch)
            if os.path.exists(branch_path):
                f = open(branch_path)
                commit = f.readline()[:10]
                f.close()
            else:
                # detached HEAD
                commit = branch[:10]
                branch = 'detached HEAD'
                branch_path = os.path.join(repo_path, "HEAD")

            date = time.localtime(os.stat(branch_path).st_mtime)
            if time.daylight == 0:
                offset = time.timezone
            else:
                offset = time.altzone
            result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
                time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
        else:
            result = ''
        return result

    @staticmethod
    def _gitinfo():
        basedir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
        repo_path = os.path.join(basedir, '.git')
        result = CLI._git_repo_info(repo_path)
        submodules = os.path.join(basedir, '.gitmodules')
        if not os.path.exists(submodules):
           return result
        f = open(submodules)
        for line in f:
            tokens = line.strip().split(' ')
            if tokens[0] == 'path':
                submodule_path = tokens[2]
                submodule_info = CLI._git_repo_info(os.path.join(basedir, submodule_path, '.git'))
                if not submodule_info:
                    submodule_info = ' not found - use git submodule update --init ' + submodule_path
                result += "\n  {0}: {1}".format(submodule_path, submodule_info)
        f.close()
        return result


    def pager(self, text):
        ''' find reasonable way to display text '''
        # this is a much simpler form of what is in pydoc.py
        if not sys.stdout.isatty():
            self.display.display(text)
        elif 'PAGER' in os.environ:
            if sys.platform == 'win32':
                self.display.display(text)
            else:
                self.pager_pipe(text, os.environ['PAGER'])
        elif subprocess.call('(less --version) 2> /dev/null', shell = True) == 0:
            self.pager_pipe(text, 'less')
        else:
            self.display.display(text)

    @staticmethod
    def pager_pipe(text, cmd):
        ''' pipe text through a pager '''
        if 'LESS' not in os.environ:
            os.environ['LESS'] = CLI.LESS_OPTS
        try:
            cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
            cmd.communicate(input=text.encode(sys.stdout.encoding))
        except IOError:
            pass
        except KeyboardInterrupt:
            pass

    @classmethod
    def tty_ify(self, text):

        t = self._ITALIC.sub("`" + r"\1" + "'", text)    # I(word) => `word'
        t = self._BOLD.sub("*" + r"\1" + "*", t)         # B(word) => *word*
        t = self._MODULE.sub("[" + r"\1" + "]", t)       # M(word) => [word]
        t = self._URL.sub(r"\1", t)                      # U(word) => word
        t = self._CONST.sub("`" + r"\1" + "'", t)        # C(word) => `word'

        return t

    @staticmethod
    def read_vault_password_file(vault_password_file, loader):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(os.path.expanduser(vault_password_file))
        if not os.path.exists(this_path):
            raise AnsibleError("The vault password file %s was not found" % this_path)

        if loader.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            vault_pass = stdout.strip('\r\n')
        else:
            try:
                f = open(this_path, "rb")
                vault_pass=f.read().strip()
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))

        return vault_pass

    def get_opt(self, k, defval=""):
        """
        Returns an option from an Optparse values instance.
        """
        try:
            data = getattr(self.options, k)
        except:
            return defval
        if k == "roles_path":
            if os.pathsep in data:
                data = data.split(os.pathsep)[0]
        return data