Esempio n. 1
0
 def __init__(self):
     """
     Creates the instance and sets the logger.
     """
     display = Display()
     self.log = logging.getLogger('transport')
     # TODO: Make verbosity more configurable
     display.verbosity = 1
     if logging.getLevelName(self.log.level) == 'DEBUG':
         display.verbosity = 5
     # replace Displays display method with our own
     display.display = lambda msg, *a, **k: self.log.info(msg)
     super(LogForward, self).__init__(display)
Esempio n. 2
0
def pytest_configure(config):
    """Validate --ansible-* parameters."""
    log.debug("pytest_configure() called")

    config.addinivalue_line("markers", "ansible(**kwargs): Ansible integration")

    # Enable connection debugging
    if config.option.verbose > 0:
        if hasattr(ansible.utils, 'VERBOSITY'):
            ansible.utils.VERBOSITY = int(config.option.verbose)
        else:
            from ansible.utils.display import Display
            display = Display()
            display.verbosity = int(config.option.verbose)

    assert config.pluginmanager.register(PyTestAnsiblePlugin(config), "ansible")
Esempio n. 3
0
    def find_plugin(self, name, suffixes=None):
        ''' Find a plugin named name '''

        if not suffixes:
            if self.class_name:
                suffixes = ['.py']
            else:
                suffixes = ['.py', '']

        potential_names = frozenset('%s%s' % (name, s) for s in suffixes)
        for full_name in potential_names:
            if full_name in self._plugin_path_cache:
                return self._plugin_path_cache[full_name]

        found = None
        for path in [p for p in self._get_paths() if p not in self._searched_paths]:
            if os.path.isdir(path):
                try:
                    full_paths = (os.path.join(path, f) for f in os.listdir(path))
                except OSError as e:
                    d = Display()
                    d.warning("Error accessing plugin paths: %s" % str(e))
                for full_path in (f for f in full_paths if os.path.isfile(f)):
                    for suffix in suffixes:
                        if full_path.endswith(suffix):
                            full_name = os.path.basename(full_path)
                            break
                    else: # Yes, this is a for-else: http://bit.ly/1ElPkyg
                        continue

                    if full_name not in self._plugin_path_cache:
                        self._plugin_path_cache[full_name] = full_path

            self._searched_paths.add(path)
            for full_name in potential_names:
                if full_name in self._plugin_path_cache:
                    return self._plugin_path_cache[full_name]

        # if nothing is found, try finding alias/deprecated
        if not name.startswith('_'):
            for alias_name in ('_%s' % n for n in potential_names):
                # We've already cached all the paths at this point
                if alias_name in self._plugin_path_cache:
                    return self._plugin_path_cache[alias_name]

        return None
Esempio n. 4
0
class Config(object):

    def __init__(self, configfile):
        self.display = Display()
        self.configfile = configfile
        self.logfile = None
        self.loglevel = None

    @property
    def parse_configfile(self):
        """
        Retrieve configuration parameters from the config file
        """
        try:
            with open(self.configfile, "r") as f:
                config = yaml.load(f)
        except:
            self.display.error(
                "Can't read configuration file %s" % self.configfile
            )
            sys.exit(1)
        return config
Esempio n. 5
0
    def __init__(self, args, display=None):
        """
        Base init method for all command line programs
        """

        self.args = args
        self.options = None
        self.parser = None
        self.action = None

        if display is None:
            self.display = Display()
        else:
            self.display = display
    def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj):
        self._host              = host
        self._task              = task
        self._job_vars          = job_vars
        self._play_context      = play_context
        self._new_stdin         = new_stdin
        self._loader            = loader
        self._shared_loader_obj = shared_loader_obj

        try:
            from __main__ import display
            self._display = display
        except ImportError:
            from ansible.utils.display import Display
            self._display = Display()
Esempio n. 7
0
    def find_plugin(self, name, suffixes=None):
        ''' Find a plugin named name '''

        if not suffixes:
            if self.class_name:
                suffixes = ['.py']
            else:
                suffixes = ['.py', '']

        potential_names = frozenset('%s%s' % (name, s) for s in suffixes)
        for full_name in potential_names:
            if full_name in self._plugin_path_cache:
                return self._plugin_path_cache[full_name]

        found = None
        for path in [p for p in self._get_paths() if p not in self._searched_paths]:
            if os.path.isdir(path):
                try:
                    full_paths = (os.path.join(path, f) for f in os.listdir(path))
                except OSError as e:
                    d = Display()
                    d.warning("Error accessing plugin paths: %s" % str(e))
                for full_path in (f for f in full_paths if os.path.isfile(f)):
                    for suffix in suffixes:
                        if full_path.endswith(suffix):
                            full_name = os.path.basename(full_path)
                            break
                    else: # Yes, this is a for-else: http://bit.ly/1ElPkyg
                        continue

                    if full_name not in self._plugin_path_cache:
                        self._plugin_path_cache[full_name] = full_path

            self._searched_paths.add(path)
            for full_name in potential_names:
                if full_name in self._plugin_path_cache:
                    return self._plugin_path_cache[full_name]

        # if nothing is found, try finding alias/deprecated
        if not name.startswith('_'):
            for alias_name in ('_%s' % n for n in potential_names):
                # We've already cached all the paths at this point
                if alias_name in self._plugin_path_cache:
                    if not os.path.islink(self._plugin_path_cache[alias_name]):
                        d = Display()
                        d.warning('%s has been deprecated, which means '
                                  'it is kept for backwards compatibility '
                                  'but usage is discouraged. The module '
                                  'documentation details page may explain '
                                  'more about this rationale.' %
                                  name.lstrip('_'))
                    return self._plugin_path_cache[alias_name]

        return None
def main():
    variable_manager = VariableManager()
    loader = DataLoader()
    passwd = None
    become_passwd = None
    display = Display()

    parser = argparse.ArgumentParser()
    prepare_parser(parser)
    args = parser.parse_args()
    if args.askpass:
        passwd = getpass.getpass("SSH password:"******"BECOME password "
                                        "[defaults to SSH password]:")
        if become_passwd == "":
            become_passwd = passwd

    options = Options(
        connection=args.connection,
        module_path=args.module_path,
        forks=args.forks,
        become=args.become,
        become_method=args.become_method,
        become_user=args.become_user,
        check=args.check,
        remote_user=args.remote_user,
        private_key_file=args.private_key_file,
        ssh_common_args=None,
        sftp_extra_args=None,
        scp_extra_args=None,
        ssh_extra_args=None,
        verbosity=args.verbose
    )

    display.verbosity = args.verbose
    cb = CallbackModule(display)
    if not os.path.isfile(args.inventory):
        exit("ERROR! Can't open host list")

    inventory = Inventory(
        loader=loader,
        variable_manager=variable_manager,
        host_list=args.inventory
    )

    inventory.subset(args.subset)

    play_source = dict(
        name="Assign roles %s" % args.roles,
        hosts='all',
        gather_facts='no',
        roles=args.roles)

    variable_manager.set_inventory(inventory)
    play = Play().load(
        play_source,
        variable_manager=variable_manager,
        loader=loader
    )

    tqm = None
    try:
        tqm = TaskQueueManager(
            inventory=inventory,
            variable_manager=variable_manager,
            loader=loader,
            options=options,
            passwords={'conn_pass': passwd, 'become_pass': become_passwd},
            stdout_callback=cb
        )
        tqm.run(play)
    finally:
        if tqm is not None:
            tqm.cleanup()
def display(*args, **kwargs):
  display_instance = Display()
  display_instance.display(*args, **kwargs)
Esempio n. 10
0
class CLI(object):
    ''' code behind bin/ansible* programs '''

    VALID_ACTIONS = ['No Actions']

    _ITALIC = re.compile(r"I\(([^)]+)\)")
    _BOLD   = re.compile(r"B\(([^)]+)\)")
    _MODULE = re.compile(r"M\(([^)]+)\)")
    _URL    = re.compile(r"U\(([^)]+)\)")
    _CONST  = re.compile(r"C\(([^)]+)\)")

    PAGER   = 'less'
    LESS_OPTS = 'FRSX'  # -F (quit-if-one-screen) -R (allow raw ansi control chars)
                        # -S (chop long lines) -X (disable termcap init and de-init)

    def __init__(self, args, display=None):
        """
        Base init method for all command line programs
        """

        self.args = args
        self.options = None
        self.parser = None
        self.action = None

        if display is None:
            self.display = Display()
        else:
            self.display = display

    def set_action(self):
        """
        Get the action the user wants to execute from the sys argv list.
        """
        for i in range(0,len(self.args)):
            arg = self.args[i]
            if arg in self.VALID_ACTIONS:
                self.action = arg
                del self.args[i]
                break

        if not self.action:
            raise AnsibleOptionsError("Missing required action")

    def execute(self):
        """
        Actually runs a child defined method using the execute_<action> pattern
        """
        fn = getattr(self, "execute_%s" % self.action)
        fn()

    def parse(self):
        raise Exception("Need to implement!")

    def run(self):

        if self.options.verbosity > 0:
            if C.CONFIG_FILE:
                self.display.display("Using %s as config file" % C.CONFIG_FILE)
            else:
                self.display.display("No config file found; using defaults")

    @staticmethod
    def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
        ''' prompt for vault password and/or password change '''

        vault_pass = None
        new_vault_pass = None

        try:
            if ask_vault_pass:
                vault_pass = getpass.getpass(prompt="Vault password: "******"Confirm Vault password: "******"Passwords do not match")

            if ask_new_vault_pass:
                new_vault_pass = getpass.getpass(prompt="New Vault password: "******"Confirm New Vault password: "******"Passwords do not match")
        except EOFError:
            pass

        # enforce no newline chars at the end of passwords
        if vault_pass:
            vault_pass = to_bytes(vault_pass, errors='strict', nonstring='simplerepr').strip()
        if new_vault_pass:
            new_vault_pass = to_bytes(new_vault_pass, errors='strict', nonstring='simplerepr').strip()

        return vault_pass, new_vault_pass


    def ask_passwords(self):
        ''' prompt for connection and become passwords if needed '''

        op = self.options
        sshpass = None
        becomepass = None
        become_prompt = ''

        try:
            if op.ask_pass:
                sshpass = getpass.getpass(prompt="SSH password: "******"%s password[defaults to SSH password]: " % op.become_method.upper()
                if sshpass:
                    sshpass = to_bytes(sshpass, errors='strict', nonstring='simplerepr')
            else:
                become_prompt = "%s password: "******"--ask-vault-pass and --vault-password-file are mutually exclusive")


        if runas_opts:
            # Check for privilege escalation conflicts
            if (op.su or op.su_user or op.ask_su_pass) and \
                        (op.sudo or op.sudo_user or op.ask_sudo_pass) or \
                (op.su or op.su_user or op.ask_su_pass) and \
                        (op.become or op.become_user or op.become_ask_pass) or \
                (op.sudo or op.sudo_user or op.ask_sudo_pass) and \
                        (op.become or op.become_user or op.become_ask_pass):

                self.parser.error("Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') "
                                  "and su arguments ('-su', '--su-user', and '--ask-su-pass') "
                                  "and become arguments ('--become', '--become-user', and '--ask-become-pass')"
                                  " are exclusive of each other")

        if fork_opts:
            if op.forks < 1:
                self.parser.error("The number of processes (--forks) must be >= 1")

    @staticmethod
    def expand_tilde(option, opt, value, parser):
        setattr(parser.values, option.dest, os.path.expanduser(value))

    @staticmethod
    def base_parser(usage="", output_opts=False, runas_opts=False, meta_opts=False, runtask_opts=False, vault_opts=False, module_opts=False,
        async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False):
        ''' create an options parser for most ansible scripts '''

        #FIXME: implemente epilog parsing
        #OptionParser.format_epilog = lambda self, formatter: self.epilog

        # base opts
        parser = SortedOptParser(usage, version=CLI.version("%prog"))
        parser.add_option('-v','--verbose', dest='verbosity', default=0, action="count",
            help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")

        if inventory_opts:
            parser.add_option('-i', '--inventory-file', dest='inventory',
                help="specify inventory host file (default=%s)" % C.DEFAULT_HOST_LIST,
                default=C.DEFAULT_HOST_LIST, action="callback", callback=CLI.expand_tilde, type=str)
            parser.add_option('--list-hosts', dest='listhosts', action='store_true',
                help='outputs a list of matching hosts; does not execute anything else')
            parser.add_option('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
                help='further limit selected hosts to an additional pattern')

        if module_opts:
            parser.add_option('-M', '--module-path', dest='module_path', default=None,
                help="specify path(s) to module library (default=%s)" % C.DEFAULT_MODULE_PATH,
                action="callback", callback=CLI.expand_tilde, type=str)
        if runtask_opts:
            parser.add_option('-e', '--extra-vars', dest="extra_vars", action="append",
                help="set additional variables as key=value or YAML/JSON", default=[])

        if fork_opts:
            parser.add_option('-f','--forks', dest='forks', default=C.DEFAULT_FORKS, type='int',
                help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)

        if vault_opts:
            parser.add_option('--ask-vault-pass', default=False, dest='ask_vault_pass', action='store_true',
                help='ask for vault password')
            parser.add_option('--vault-password-file', default=C.DEFAULT_VAULT_PASSWORD_FILE,
                dest='vault_password_file', help="vault password file", action="callback",
                callback=CLI.expand_tilde, type=str)
            parser.add_option('--new-vault-password-file',
                dest='new_vault_password_file', help="new vault password file for rekey", action="callback",
                callback=CLI.expand_tilde, type=str)
            parser.add_option('--output', default=None, dest='output_file',
                help='output file name for encrypt or decrypt; use - for stdout')


        if subset_opts:
            parser.add_option('-t', '--tags', dest='tags', default='all',
                help="only run plays and tasks tagged with these values")
            parser.add_option('--skip-tags', dest='skip_tags',
                help="only run plays and tasks whose tags do not match these values")

        if output_opts:
            parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
                help='condense output')
            parser.add_option('-t', '--tree', dest='tree', default=None,
                help='log output to this directory')

        if runas_opts:
            # priv user defaults to root later on to enable detecting when this option was given here
            parser.add_option('-K', '--ask-sudo-pass', default=C.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true',
                help='ask for sudo password (deprecated, use become)')
            parser.add_option('--ask-su-pass', default=C.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true',
                help='ask for su password (deprecated, use become)')
            parser.add_option("-s", "--sudo", default=C.DEFAULT_SUDO, action="store_true", dest='sudo',
                help="run operations with sudo (nopasswd) (deprecated, use become)")
            parser.add_option('-U', '--sudo-user', dest='sudo_user', default=None,
                              help='desired sudo user (default=root) (deprecated, use become)')
            parser.add_option('-S', '--su', default=C.DEFAULT_SU, action='store_true',
                help='run operations with su (deprecated, use become)')
            parser.add_option('-R', '--su-user', default=None,
                help='run operations with su as this user (default=%s) (deprecated, use become)' % C.DEFAULT_SU_USER)

            # consolidated privilege escalation (become)
            parser.add_option("-b", "--become", default=C.DEFAULT_BECOME, action="store_true", dest='become',
                help="run operations with become (nopasswd implied)")
            parser.add_option('--become-method', dest='become_method', default=C.DEFAULT_BECOME_METHOD, type='string',
                help="privilege escalation method to use (default=%s), valid choices: [ %s ]" % (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
            parser.add_option('--become-user', default=None, dest='become_user', type='string',
                help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)
            parser.add_option('--ask-become-pass', default=False, dest='become_ask_pass', action='store_true',
                help='ask for privilege escalation password')


        if connect_opts:
            parser.add_option('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
                help='ask for connection password')
            parser.add_option('--private-key','--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
                help='use this file to authenticate the connection')
            parser.add_option('-u', '--user', default=C.DEFAULT_REMOTE_USER, dest='remote_user',
                help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
            parser.add_option('-c', '--connection', dest='connection', default=C.DEFAULT_TRANSPORT,
                help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
            parser.add_option('-T', '--timeout', default=C.DEFAULT_TIMEOUT, type='int', dest='timeout',
                help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
            parser.add_option('--ssh-common-args', default='', dest='ssh_common_args',
                help="specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)")
            parser.add_option('--sftp-extra-args', default='', dest='sftp_extra_args',
                help="specify extra arguments to pass to sftp only (e.g. -f, -l)")
            parser.add_option('--scp-extra-args', default='', dest='scp_extra_args',
                help="specify extra arguments to pass to scp only (e.g. -l)")
            parser.add_option('--ssh-extra-args', default='', dest='ssh_extra_args',
                help="specify extra arguments to pass to ssh only (e.g. -R)")

        if async_opts:
            parser.add_option('-P', '--poll', default=C.DEFAULT_POLL_INTERVAL, type='int', dest='poll_interval',
                help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL)
            parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
                help='run asynchronously, failing after X seconds (default=N/A)')

        if check_opts:
            parser.add_option("-C", "--check", default=False, dest='check', action='store_true',
                help="don't make any changes; instead, try to predict some of the changes that may occur")
            parser.add_option('--syntax-check', dest='syntax', action='store_true',
                help="perform a syntax check on the playbook, but do not execute it")
            parser.add_option("-D", "--diff", default=False, dest='diff', action='store_true',
                help="when changing (small) files and templates, show the differences in those files; works great with --check")

        if meta_opts:
            parser.add_option('--force-handlers', default=C.DEFAULT_FORCE_HANDLERS, dest='force_handlers', action='store_true',
                help="run handlers even if a task fails")
            parser.add_option('--flush-cache', dest='flush_cache', action='store_true',
                help="clear the fact cache")

        return parser

    @staticmethod
    def version(prog):
        ''' return ansible version '''
        result = "{0} {1}".format(prog, __version__)
        gitinfo = CLI._gitinfo()
        if gitinfo:
            result = result + " {0}".format(gitinfo)
        result += "\n  config file = %s" % C.CONFIG_FILE
        result = result + "\n  configured module search path = %s" % C.DEFAULT_MODULE_PATH
        return result

    @staticmethod
    def version_info(gitinfo=False):
        ''' return full ansible version info '''
        if gitinfo:
            # expensive call, user with care
            ansible_version_string = CLI.version('')
        else:
            ansible_version_string = __version__
        ansible_version = ansible_version_string.split()[0]
        ansible_versions = ansible_version.split('.')
        for counter in range(len(ansible_versions)):
            if ansible_versions[counter] == "":
                ansible_versions[counter] = 0
            try:
                ansible_versions[counter] = int(ansible_versions[counter])
            except:
                pass
        if len(ansible_versions) < 3:
            for counter in range(len(ansible_versions), 3):
                ansible_versions.append(0)
        return {'string':      ansible_version_string.strip(),
                'full':        ansible_version,
                'major':       ansible_versions[0],
                'minor':       ansible_versions[1],
                'revision':    ansible_versions[2]}

    @staticmethod
    def _git_repo_info(repo_path):
        ''' returns a string containing git branch, commit id and commit date '''
        result = None
        if os.path.exists(repo_path):
            # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
            if os.path.isfile(repo_path):
                try:
                    gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
                    # There is a possibility the .git file to have an absolute path.
                    if os.path.isabs(gitdir):
                        repo_path = gitdir
                    else:
                        repo_path = os.path.join(repo_path[:-4], gitdir)
                except (IOError, AttributeError):
                    return ''
            f = open(os.path.join(repo_path, "HEAD"))
            branch = f.readline().split('/')[-1].rstrip("\n")
            f.close()
            branch_path = os.path.join(repo_path, "refs", "heads", branch)
            if os.path.exists(branch_path):
                f = open(branch_path)
                commit = f.readline()[:10]
                f.close()
            else:
                # detached HEAD
                commit = branch[:10]
                branch = 'detached HEAD'
                branch_path = os.path.join(repo_path, "HEAD")

            date = time.localtime(os.stat(branch_path).st_mtime)
            if time.daylight == 0:
                offset = time.timezone
            else:
                offset = time.altzone
            result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
                time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
        else:
            result = ''
        return result

    @staticmethod
    def _gitinfo():
        basedir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
        repo_path = os.path.join(basedir, '.git')
        result = CLI._git_repo_info(repo_path)
        submodules = os.path.join(basedir, '.gitmodules')
        if not os.path.exists(submodules):
           return result
        f = open(submodules)
        for line in f:
            tokens = line.strip().split(' ')
            if tokens[0] == 'path':
                submodule_path = tokens[2]
                submodule_info = CLI._git_repo_info(os.path.join(basedir, submodule_path, '.git'))
                if not submodule_info:
                    submodule_info = ' not found - use git submodule update --init ' + submodule_path
                result += "\n  {0}: {1}".format(submodule_path, submodule_info)
        f.close()
        return result


    def pager(self, text):
        ''' find reasonable way to display text '''
        # this is a much simpler form of what is in pydoc.py
        if not sys.stdout.isatty():
            self.display.display(text)
        elif 'PAGER' in os.environ:
            if sys.platform == 'win32':
                self.display.display(text)
            else:
                self.pager_pipe(text, os.environ['PAGER'])
        elif subprocess.call('(less --version) 2> /dev/null', shell = True) == 0:
            self.pager_pipe(text, 'less')
        else:
            self.display.display(text)

    @staticmethod
    def pager_pipe(text, cmd):
        ''' pipe text through a pager '''
        if 'LESS' not in os.environ:
            os.environ['LESS'] = CLI.LESS_OPTS
        try:
            cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
            cmd.communicate(input=text.encode(sys.stdout.encoding))
        except IOError:
            pass
        except KeyboardInterrupt:
            pass

    @classmethod
    def tty_ify(self, text):

        t = self._ITALIC.sub("`" + r"\1" + "'", text)    # I(word) => `word'
        t = self._BOLD.sub("*" + r"\1" + "*", t)         # B(word) => *word*
        t = self._MODULE.sub("[" + r"\1" + "]", t)       # M(word) => [word]
        t = self._URL.sub(r"\1", t)                      # U(word) => word
        t = self._CONST.sub("`" + r"\1" + "'", t)        # C(word) => `word'

        return t

    @staticmethod
    def read_vault_password_file(vault_password_file, loader):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(os.path.expanduser(vault_password_file))
        if not os.path.exists(this_path):
            raise AnsibleError("The vault password file %s was not found" % this_path)

        if loader.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            vault_pass = stdout.strip('\r\n')
        else:
            try:
                f = open(this_path, "rb")
                vault_pass=f.read().strip()
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))

        return vault_pass

    def get_opt(self, k, defval=""):
        """
        Returns an option from an Optparse values instance.
        """
        try:
            data = getattr(self.options, k)
        except:
            return defval
        if k == "roles_path":
            if os.pathsep in data:
                data = data.split(os.pathsep)[0]
        return data
Esempio n. 11
0
class ActionModule(ActionBase):  #pylint: disable=R0903
    """ The action module class
    """
    display = Display()

    def _get_network_os(self, task_vars):
        if 'network_os' in self._task.args and self._task.args['network_os']:
            self.display.vvvv('Getting network OS from task argument')
            network_os = self._task.args['network_os']
        elif self._play_context.network_os:
            self.display.vvvv('Getting network OS from inventory')
            network_os = self._play_context.network_os
        elif ('network_os' in task_vars.get('ansible_facts', {})
              and task_vars['ansible_facts']['network_os']):
            self.display.vvvv('Getting network OS from fact')
            network_os = task_vars['ansible_facts']['network_os']
        else:
            raise AnsibleError(
                'ansible_network_os must be specified on this host.')
        return network_os

    def _get_os_resource(self, network_os, resource):
        parsers = [
            p for p in parser_loader.all()
            if p.PARSER_METADATA['network_os'] == network_os
            and p.PARSER_METADATA['resource'] == resource
        ]
        if not parsers:
            self.display.warning(
                "No parser available for resource %s for network os %s" %
                (resource, network_os))
            return None
        return parsers[0]

    def _run_command(self, command, task_vars):
        socket_path = getattr(self._connection,
                              'socket_path') or task_vars.get('ansible_socket')
        connection = Connection(socket_path)
        try:
            output = connection.get(command)
        except ConnectionError as exc:
            raise AnsibleError(to_text(exc))
        return output

    @staticmethod
    def _command_map():
        command_map = {}
        parsers = [p.PARSER_METADATA for p in parser_loader.all()]
        for parser in parsers:
            if not parser['resource'] in command_map:
                command_map[parser['resource']] = {}
            if not os in command_map[parser['resource']]:
                command_map[parser['resource']][parser['network_os']] = []
            command_map[parser['resource']][parser['network_os']].append(
                parser['commands'])
        return command_map

    @staticmethod
    def _validate_args(args):
        provided = set(list(args.keys()))
        valid_args = set(
            ['resources', 'update_facts', 'fact_key', '_return_command_map'])
        extras = provided - valid_args
        if extras:
            raise AnsibleError(
                "The following arguments are not supported: %s" %
                ','.join(extras))

    def run(self, tmp=None, task_vars=None):
        self.display.verbosity = self._play_context.verbosity

        self._validate_args(self._task.args)
        result = super(ActionModule, self).run(tmp, task_vars)
        if '_return_command_map' in self._task.args and self._task.args[
                '_return_command_map']:
            result.update({'command_map': self._command_map()})
            return result

        network_os = self._get_network_os(task_vars)

        facts = {}
        if 'resources' in self._task.args and self._task.args['resources']:
            resources = self._task.args['resources']
        else:
            resources = [
                p.PARSER_METADATA['resource'] for p in parser_loader.all()
                if p.PARSER_METADATA['network_os'] == network_os
            ]

        for resource in resources:
            if 'name' in resource:
                resource_name = resource['name']
            else:
                resource_name = resource

            parser = self._get_os_resource(network_os, resource_name)
            if parser:
                if 'output' in resource:
                    outputs = resource['output']
                else:
                    outputs = []
                    for command in parser.PARSER_METADATA['commands']:
                        outputs.append(self._run_command(command, task_vars))
                objs = parser.parse(outputs)
                facts.update(
                    json.loads(json.dumps(objs, sort_keys=True, cls=ToFacts)))

        if 'update_facts' in self._task.args:
            result.update(
                {'ansible_facts': {
                    self._task.args['fact_key']: facts
                }})

        result.update({'results': facts})
        return result
Esempio n. 12
0
    def __init__(self,
                 username,
                 playbook,
                 private_key_file,
                 inventory_data,
                 extra_vars,
                 become_pass,
                 verbosity=0,
                 search_filter=None):
        """
    Args:
      username: string, username of user running the playbook
      playbook: string, full playbook path eg. /tmp/my_pb.yml
      private_key_file: string, private key file
      inventory_data: dict, inventory data
      extra_vars: dict, Ansible extra vars, key = variable name
      become_pass: string, become password
      verbosity: integer, verbosity level
      search_filter: string, hosts/groups to match
    """

        self.playbook = playbook
        self.username = username
        self.inventory_data = inventory_data
        self.extra_vars = extra_vars
        self.search_filter = search_filter

        self.options = Options()
        self.options.private_key_file = private_key_file
        self.options.verbosity = verbosity
        self.options.connection = 'ssh'  # Need a connection type "smart" or "ssh"
        self.options.become = True
        self.options.become_method = 'sudo'
        self.options.become_user = '******'

        # Set global verbosity
        self.display = Display()
        self.display.verbosity = self.options.verbosity
        # Executor appears to have it's own verbosity object/setting as well
        playbook_executor.verbosity = self.options.verbosity

        # Become Pass Needed if not logging in as user root
        passwords = {'become_pass': become_pass}

        # Gets data from YAML/JSON files
        self.loader = DataLoader()

        # ORIGNAL on line 1
        #self.loader.set_vault_password(os.environ['VAULT_PASS'])
        self.loader.set_vault_password('secret')

        # All the variables from all the various places
        self.variable_manager = VariableManager()

        # Set of hosts
        hosts = set()

        # Load group variable
        for group in self.inventory_data:
            if group != '_meta':
                for host in self.inventory_data[group]['hosts']:
                    host_obj = Host(host)
                    hosts.add(host)
                    for var in self.inventory_data[group]['vars']:
                        self.variable_manager.set_host_variable(
                            host_obj, var,
                            self.inventory_data[group]['vars'][var])

        # Load host variables
        for host in self.inventory_data['_meta']['hostvars']:
            for var in self.inventory_data['_meta']['hostvars'][host]:
                host_obj = Host(host)
                self.variable_manager.set_host_variable(
                    host_obj, var,
                    self.inventory_data['_meta']['hostvars'][host][var])

        self.variable_manager.extra_vars = self.extra_vars

        # Set inventory, using most of above objects
        self.inventory = Inventory(loader=self.loader,
                                   variable_manager=self.variable_manager,
                                   host_list=list(hosts))
        self.variable_manager.set_inventory(self.inventory)

        # Setup playbook executor, but don't run until run() called
        self.pbex = playbook_executor.PlaybookExecutor(
            playbooks=[self.playbook],
            inventory=self.inventory,
            variable_manager=self.variable_manager,
            loader=self.loader,
            options=self.options,
            passwords=passwords)
Esempio n. 13
0
 def __init__(self, configfile):
     self.display = Display()
     self.configfile = configfile
     self.logfile = None
     self.loglevel = None
Esempio n. 14
0
class TaskExecutor:

    '''
    This is the main worker class for the executor pipeline, which
    handles loading an action plugin to actually dispatch the task to
    a given host. This class roughly corresponds to the old Runner()
    class.
    '''

    # Modules that we optimize by squashing loop items into a single call to
    # the module
    SQUASH_ACTIONS = frozenset(C.DEFAULT_SQUASH_ACTIONS)

    def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj):
        self._host              = host
        self._task              = task
        self._job_vars          = job_vars
        self._play_context      = play_context
        self._new_stdin         = new_stdin
        self._loader            = loader
        self._shared_loader_obj = shared_loader_obj

        try:
            from __main__ import display
            self._display = display
        except ImportError:
            from ansible.utils.display import Display
            self._display = Display()

    def run(self):
        '''
        The main executor entrypoint, where we determine if the specified
        task requires looping and either runs the task with 
        '''

        self._display.debug("in run()")

        try:
            # lookup plugins need to know if this task is executing from
            # a role, so that it can properly find files/templates/etc.
            roledir = None
            if self._task._role:
                roledir = self._task._role._role_path
            self._job_vars['roledir'] = roledir

            items = self._get_loop_items()
            if items is not None:
                if len(items) > 0:
                    item_results = self._run_loop(items)

                    # loop through the item results, and remember the changed/failed
                    # result flags based on any item there.
                    changed = False
                    failed  = False
                    for item in item_results:
                        if 'changed' in item and item['changed']:
                           changed = True
                        if 'failed' in item and item['failed']:
                           failed = True

                    # create the overall result item, and set the changed/failed
                    # flags there to reflect the overall result of the loop
                    res = dict(results=item_results)

                    if changed:
                        res['changed'] = True

                    if failed:
                        res['failed'] = True
                        res['msg'] = 'One or more items failed'
                    else:
                        res['msg'] = 'All items completed'
                else:
                    res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
            else:
                self._display.debug("calling self._execute()")
                res = self._execute()
                self._display.debug("_execute() done")

            # make sure changed is set in the result, if it's not present
            if 'changed' not in res:
                res['changed'] = False

            def _clean_res(res):
                if isinstance(res, dict):
                    for k in res.keys():
                        res[k] = _clean_res(res[k])
                elif isinstance(res, list):
                    for idx,item in enumerate(res):
                        res[idx] = _clean_res(item)
                elif isinstance(res, UnsafeProxy):
                    return res._obj
                return res

            self._display.debug("dumping result to json")
            res = _clean_res(res)
            self._display.debug("done dumping result, returning")
            return res
        except AnsibleError as e:
            return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
        finally:
            try:
                self._connection.close()
            except AttributeError:
                pass
            except Exception as e:
                self._display.debug("error closing connection: %s" % to_unicode(e))

    def _get_loop_items(self):
        '''
        Loads a lookup plugin to handle the with_* portion of a task (if specified),
        and returns the items result.
        '''

        # create a copy of the job vars here so that we can modify
        # them temporarily without changing them too early for other
        # parts of the code that might still need a pristine version
        vars_copy = self._job_vars.copy()

        # now we update them with the play context vars
        self._play_context.update_vars(vars_copy)

        templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=vars_copy)
        items = None
        if self._task.loop:
            if self._task.loop in self._shared_loader_obj.lookup_loader:
                #TODO: remove convert_bare true and deprecate this in with_ 
                try:
                    loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True)
                except AnsibleUndefinedVariable as e:
                    if 'has no attribute' in str(e):
                        loop_terms = []
                        self._display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.")
                    else:
                        raise
                items = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
            else:
                raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)

        if items:
            from ansible.vars.unsafe_proxy import UnsafeProxy
            for idx, item in enumerate(items):
                if item is not None and not isinstance(item, UnsafeProxy):
                    items[idx] = UnsafeProxy(item)
        return items

    def _run_loop(self, items):
        '''
        Runs the task with the loop items specified and collates the result
        into an array named 'results' which is inserted into the final result
        along with the item for which the loop ran.
        '''

        results = []

        # make copies of the job vars and task so we can add the item to
        # the variables and re-validate the task with the item variable
        task_vars = self._job_vars.copy()

        items = self._squash_items(items, task_vars)
        for item in items:
            task_vars['item'] = item

            try:
                tmp_task = self._task.copy()
                tmp_play_context = self._play_context.copy()
            except AnsibleParserError as e:
                results.append(dict(failed=True, msg=str(e)))
                continue

            # now we swap the internal task and play context with their copies,
            # execute, and swap them back so we can do the next iteration cleanly
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
            res = self._execute(variables=task_vars)
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)

            # now update the result with the item info, and append the result
            # to the list of results
            res['item'] = item
            results.append(res)

        return results

    def _squash_items(self, items, variables):
        '''
        Squash items down to a comma-separated list for certain modules which support it
        (typically package management modules).
        '''
        if len(items) > 0 and self._task.action in self.SQUASH_ACTIONS:
            final_items = []
            name = self._task.args.pop('name', None) or self._task.args.pop('pkg', None)
            for item in items:
                variables['item'] = item
                templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
                if self._task.evaluate_conditional(templar, variables):
                    if templar._contains_vars(name):
                        new_item = templar.template(name)
                        final_items.append(new_item)
                    else:
                        final_items.append(item)
            joined_items = ",".join(final_items)
            self._task.args['name'] = joined_items
            return [joined_items]
        else:
            return items

    def _execute(self, variables=None):
        '''
        The primary workhorse of the executor system, this runs the task
        on the specified host (which may be the delegated_to host) and handles
        the retry/until and block rescue/always execution
        '''

        if variables is None:
            variables = self._job_vars

        templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)

        # apply the given task's information to the connection info,
        # which may override some fields already set by the play or
        # the options specified on the command line
        self._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=variables, templar=templar)

        # fields set from the play/task may be based on variables, so we have to
        # do the same kind of post validation step on it here before we use it.
        # We also add "magic" variables back into the variables dict to make sure
        # a certain subset of variables exist.
        self._play_context.update_vars(variables)
        self._play_context.post_validate(templar=templar)

        # Evaluate the conditional (if any) for this task, which we do before running
        # the final task post-validation. We do this before the post validation due to
        # the fact that the conditional may specify that the task be skipped due to a
        # variable not being present which would otherwise cause validation to fail
        if not self._task.evaluate_conditional(templar, variables):
            self._display.debug("when evaulation failed, skipping this task")
            return dict(changed=False, skipped=True, skip_reason='Conditional check failed', _ansible_no_log=self._play_context.no_log)

        # Now we do final validation on the task, which sets all fields to their final values.
        # In the case of debug tasks, we save any 'var' params and restore them after validating
        # so that variables are not replaced too early.
        prev_var = None
        if self._task.action == 'debug' and 'var' in self._task.args:
            prev_var = self._task.args.pop('var')

        original_args = self._task.args.copy()
        self._task.post_validate(templar=templar)
        if '_variable_params' in self._task.args:
            variable_params = self._task.args.pop('_variable_params')
            if isinstance(variable_params, dict):
                self._display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
                variable_params.update(self._task.args)
                self._task.args = variable_params

        if prev_var is not None:
            self._task.args['var'] = prev_var

        # if this task is a TaskInclude, we just return now with a success code so the
        # main thread can expand the task list for the given host
        if self._task.action == 'include':
            include_variables = original_args
            include_file = include_variables.get('_raw_params')
            del include_variables['_raw_params']
            return dict(include=include_file, include_variables=include_variables)

        # get the connection and the handler for this execution
        self._connection = self._get_connection(variables=variables, templar=templar)
        self._connection.set_host_overrides(host=self._host)

        self._handler = self._get_action_handler(connection=self._connection, templar=templar)

        # And filter out any fields which were set to default(omit), and got the omit token value
        omit_token = variables.get('omit')
        if omit_token is not None:
            self._task.args = dict((i[0], i[1]) for i in iteritems(self._task.args) if i[1] != omit_token)

        # Read some values from the task, so that we can modify them if need be
        retries = self._task.retries
        if retries <= 0:
            retries = 1

        delay = self._task.delay
        if delay < 0:
            delay = 1

        # make a copy of the job vars here, in case we need to update them
        # with the registered variable value later on when testing conditions
        vars_copy = variables.copy()

        self._display.debug("starting attempt loop")
        result = None
        for attempt in range(retries):
            if attempt > 0:
                # FIXME: this should use the self._display.callback/message passing mechanism
                self._display.display("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result), color="red")
                result['attempts'] = attempt + 1

            self._display.debug("running the handler")
            try:
                result = self._handler.run(task_vars=variables)
            except AnsibleConnectionFailure as e:
                return dict(unreachable=True, msg=str(e))
            self._display.debug("handler run complete")

            if self._task.async > 0:
                # the async_wrapper module returns dumped JSON via its stdout
                # response, so we parse it here and replace the result
                try:
                    result = json.loads(result.get('stdout'))
                except (TypeError, ValueError) as e:
                    return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))

                if self._task.poll > 0:
                    result = self._poll_async_result(result=result, templar=templar)

            # update the local copy of vars with the registered value, if specified,
            # or any facts which may have been generated by the module execution
            if self._task.register:
                vars_copy[self._task.register] = result 

            if 'ansible_facts' in result:
                vars_copy.update(result['ansible_facts'])

            # create a conditional object to evaluate task conditions
            cond = Conditional(loader=self._loader)

            def _evaluate_changed_when_result(result):
                if self._task.changed_when is not None:
                    cond.when = [ self._task.changed_when ]
                    result['changed'] = cond.evaluate_conditional(templar, vars_copy)

            def _evaluate_failed_when_result(result):
                if self._task.failed_when is not None:
                    cond.when = [ self._task.failed_when ]
                    failed_when_result = cond.evaluate_conditional(templar, vars_copy)
                    result['failed_when_result'] = result['failed'] = failed_when_result
                    return failed_when_result
                return False

            if self._task.until:
                cond.when = self._task.until
                if cond.evaluate_conditional(templar, vars_copy):
                    _evaluate_changed_when_result(result)
                    _evaluate_failed_when_result(result)
                    break
            elif (self._task.changed_when is not None or self._task.failed_when is not None) and 'skipped' not in result:
                    _evaluate_changed_when_result(result)
                    if _evaluate_failed_when_result(result):
                        break
            elif 'failed' not in result:
                if result.get('rc', 0) != 0:
                    result['failed'] = True
                else:
                    # if the result is not failed, stop trying
                    break

            if attempt < retries - 1:
                time.sleep(delay)
            else:
                _evaluate_changed_when_result(result)
                _evaluate_failed_when_result(result)

        # do the final update of the local variables here, for both registered
        # values and any facts which may have been created
        if self._task.register:
            variables[self._task.register] = result

        if 'ansible_facts' in result:
            variables.update(result['ansible_facts'])

        # save the notification target in the result, if it was specified, as
        # this task may be running in a loop in which case the notification
        # may be item-specific, ie. "notify: service {{item}}"
        if self._task.notify is not None:
            result['_ansible_notify'] = self._task.notify

        # preserve no_log setting
        result["_ansible_no_log"] = self._play_context.no_log

        # and return
        self._display.debug("attempt loop complete, returning result")
        return result
Esempio n. 15
0
class Config(object):

    def __init__(self, configfile):
        self.display = Display()
        self.configfile = configfile
        self.logfile = None
        self.loglevel = None

    @property
    def parse_configfile(self):
        """
        Retrieve configuration parameters from the config file
        """
        try:
            with open(self.configfile, "r") as f:
                config = yaml.load(f)
        except:
            self.display.error(
                "Can't read configuration file %s" % self.configfile
            )
            sys.exit(1)
        return config

    def default_values(self, args, config):
        # Set kubespray_path
        if 'kubespray_path' not in config.keys() and args.kubespray_path is None:
            config['kubespray_path'] = os.path.join(os.path.expanduser("~"), '.kubespray')
        arguments = dict(args._get_kwargs())
        for key, value in arguments.items():
            if value is not None:
                config[key] = value
        # Set inventory_path
        if 'inventory_path' not in config.keys() and args.inventory_path is None:
            config['inventory_path'] = os.path.join(
                config['kubespray_path'], 'inventory/inventory.cfg'
            )
        # Set logfile
        if 'logfile' not in config.keys():
            config['logfile'] = os.path.join(config['kubespray_path'], 'kubespray.log')
        # Set default bool
        for v in ['use_private_ip', 'assign_public_ip']:
            if v not in config.keys():
                config[v] = False
        # Set default instances type
        if args.func.__name__ == "aws":
            if 'masters_instance_type' not in config.keys() and args.masters_instance_type is None:
                config['masters_instance_type'] = 't2.medium'
            if 'nodes_instance_type' not in config.keys() and args.nodes_instance_type is None:
                config['nodes_instance_type'] = 't2.large'
            if 'etcds_instance_type' not in config.keys() and args.etcds_instance_type is None:
                config['etcds_instance_type'] = 't2.small'
        # ----GCE
        if args.func.__name__ == "gce":
            if 'masters_machine_type' not in config.keys() and args.masters_machine_type is None:
                config['masters_machine_type'] = 'n1-standard-2'
            if 'nodes_machine_type' not in config.keys() and args.nodes_machine_type is None:
                config['nodes_machine_type'] = 'n1-standard-4'
            if 'etcds_machine_type' not in config.keys() and args.etcds_machine_type is None:
                config['etcds_machine_type'] = 'n1-standard-1'
        # Conflicting options
        if args.func.__name__ == "aws":
            if args.security_group_name and 'security_group_id' in config.keys():
                config.pop('security_group_id')
            elif args.security_group_id and 'security_group_name' in config.keys():
                config.pop('security_group_name')
        # Set kubernetes 'kube' password
        if 'prompt_pwd' in config.keys() and config['prompt_pwd'] is True:
            pwd = read_password()
            config['k8s_passwd'] = pwd
        return(config)
Esempio n. 16
0
    def __init__(self,
                 playbook,
                 inventory,
                 run_data,
                 start_at_task,
                 step,
                 private_key_file,
                 become_pass,
                 verbosity=0):

        self.run_data = run_data

        self.options = Options()
        self.options.listtags = False
        self.options.listtasks = False
        self.options.listhosts = False
        self.options.syntax = False
        self.options.check = False
        self.options.diff = False
        self.options.start_at_task = start_at_task
        self.options.step = step

        self.options.private_key_file = private_key_file
        self.options.verbosity = verbosity
        self.options.connection = 'ssh'  # Need a connection type "smart" or "ssh"
        self.options.become = False
        self.options.become_method = 'sudo'
        self.options.become_user = '******'
        self.options.remote_user = '******'
        # Set global verbosity
        self.display = Display()
        self.display.verbosity = self.options.verbosity
        # Executor appears to have it's own
        # verbosity object/setting as well
        #playbook_executor.verbosity = self.options.verbosity

        # Become Pass Needed if not logging in as user root
        passwords = {'become_pass': become_pass}

        # Gets data from YAML/JSON files
        self.loader = DataLoader()
        # self.loader.set_vault_password(os.environ['VAULT_PASS'])

        # All the variables from all the various places

        # Parse hosts, I haven't found a good way to
        # pass hosts in without using a parsed template :(
        # (Maybe you know how?)
        #         self.hosts = NamedTemporaryFile(delete=False)
        #         self.hosts.write("""[run_hosts]
        # %s
        # """ % hostnames)
        #         self.hosts.close()

        # This was my attempt to pass in hosts directly.
        #
        # Also Note: In py2.7, "isinstance(foo, str)" is valid for
        #            latin chars only. Luckily, hostnames are
        #            ascii-only, which overlaps latin charset
        ## if isinstance(hostnames, str):
        ##     hostnames = {"customers": {"hosts": [hostnames]}}

        # Set inventory, using most of above objects
        inventory_dir = '/etc/ansible/inventory'
        inventory_source = "%s/%s" % (inventory_dir, inventory)
        self.inventory = InventoryManager(loader=self.loader,
                                          sources=inventory_source)
        self.variable_manager = VariableManager(loader=self.loader,
                                                inventory=self.inventory)
        if self.run_data:
            self.variable_manager.extra_vars = self.run_data['extra_vars']
            self.options.tags = self.run_data['tags']

        # Playbook to run. Assumes it is
        # local to this python file
        pb_dir = '/etc/ansible/playbooks'
        playbook = "%s/%s" % (pb_dir, playbook)
        print(playbook)
        # Setup playbook executor, but don't run until run() called
        self.pbex = playbook_executor.PlaybookExecutor(
            playbooks=[playbook],
            inventory=self.inventory,
            variable_manager=self.variable_manager,
            loader=self.loader,
            options=self.options.get_config(),
            passwords=passwords)
 def display(*args, **kwargs):
     """Set up display function for Ansible v2"""
     display_instance = Display()
     display_instance.display(*args, **kwargs)
Esempio n. 18
0
from ansible import constants as C
from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.six import text_type
from ansible.parsing.ajson import AnsibleJSONEncoder
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.parsing.yaml.objects import AnsibleUnicode
from ansible.plugins import AnsiblePlugin, get_plugin_class
from ansible.utils.color import stringc
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, NativeJinjaUnsafeText
from ansible.vars.clean import strip_internal_keys, module_response_deepcopy

import yaml

global_display = Display()


__all__ = ["CallbackBase"]


_DEBUG_ALLOWED_KEYS = frozenset(('msg', 'exception', 'warnings', 'deprecations'))
_YAML_TEXT_TYPES = (text_type, AnsibleUnicode, AnsibleUnsafeText, NativeJinjaUnsafeText)
# Characters that libyaml/pyyaml consider breaks
_YAML_BREAK_CHARS = '\n\x85\u2028\u2029'  # NL, NEL, LS, PS
# regex representation of libyaml/pyyaml of a space followed by a break character
_SPACE_BREAK_RE = re.compile(fr' +([{_YAML_BREAK_CHARS}])')


class _AnsibleCallbackDumper(AnsibleDumper):
    def __init__(self, lossy=False):
Esempio n. 19
0
class CLI(object):
    """ code behind bin/ansible* programs """

    VALID_ACTIONS = ["No Actions"]

    _ITALIC = re.compile(r"I\(([^)]+)\)")
    _BOLD = re.compile(r"B\(([^)]+)\)")
    _MODULE = re.compile(r"M\(([^)]+)\)")
    _URL = re.compile(r"U\(([^)]+)\)")
    _CONST = re.compile(r"C\(([^)]+)\)")

    PAGER = "less"
    LESS_OPTS = "FRSX"  # -F (quit-if-one-screen) -R (allow raw ansi control chars)
    # -S (chop long lines) -X (disable termcap init and de-init)

    def __init__(self, args, display=None):
        """
        Base init method for all command line programs
        """

        self.args = args
        self.options = None
        self.parser = None
        self.action = None

        if display is None:
            self.display = Display()
        else:
            self.display = display

    def set_action(self):
        """
        Get the action the user wants to execute from the sys argv list.
        """
        for i in range(0, len(self.args)):
            arg = self.args[i]
            if arg in self.VALID_ACTIONS:
                self.action = arg
                del self.args[i]
                break

        if not self.action:
            raise AnsibleOptionsError("Missing required action")

    def execute(self):
        """
        Actually runs a child defined method using the execute_<action> pattern
        """
        fn = getattr(self, "execute_%s" % self.action)
        fn()

    def parse(self):
        raise Exception("Need to implement!")

    def run(self):

        if self.options.verbosity > 0:
            self.display.display("Using %s as config file" % C.CONFIG_FILE)

    @staticmethod
    def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
        """ prompt for vault password and/or password change """

        vault_pass = None
        new_vault_pass = None

        try:
            if ask_vault_pass:
                vault_pass = getpass.getpass(prompt="Vault password: "******"Confirm Vault password: "******"Passwords do not match")

            if ask_new_vault_pass:
                new_vault_pass = getpass.getpass(prompt="New Vault password: "******"Confirm New Vault password: "******"Passwords do not match")
        except EOFError:
            pass

        # enforce no newline chars at the end of passwords
        if vault_pass:
            vault_pass = to_bytes(vault_pass, errors="strict", nonstring="simplerepr").strip()
        if new_vault_pass:
            new_vault_pass = to_bytes(new_vault_pass, errors="strict", nonstring="simplerepr").strip()

        return vault_pass, new_vault_pass

    def ask_passwords(self):
        """ prompt for connection and become passwords if needed """

        op = self.options
        sshpass = None
        becomepass = None
        become_prompt = ""

        try:
            if op.ask_pass:
                sshpass = getpass.getpass(prompt="SSH password: "******"%s password[defaults to SSH password]: " % op.become_method.upper()
                if sshpass:
                    sshpass = to_bytes(sshpass, errors="strict", nonstring="simplerepr")
            else:
                become_prompt = "%s password: "******"":
                    becomepass = sshpass
                if becomepass:
                    becomepass = to_bytes(becomepass)
        except EOFError:
            pass

        return (sshpass, becomepass)

    def normalize_become_options(self):
        """ this keeps backwards compatibility with sudo/su self.options """
        self.options.become_ask_pass = (
            self.options.become_ask_pass
            or self.options.ask_sudo_pass
            or self.options.ask_su_pass
            or C.DEFAULT_BECOME_ASK_PASS
        )
        self.options.become_user = (
            self.options.become_user or self.options.sudo_user or self.options.su_user or C.DEFAULT_BECOME_USER
        )

        if self.options.become:
            pass
        elif self.options.sudo:
            self.options.become = True
            self.options.become_method = "sudo"
        elif self.options.su:
            self.options.become = True
            self.options.become_method = "su"

    def validate_conflicts(self, vault_opts=False, runas_opts=False):
        """ check for conflicting options """

        op = self.options

        if vault_opts:
            # Check for vault related conflicts
            if op.ask_vault_pass and op.vault_password_file:
                self.parser.error("--ask-vault-pass and --vault-password-file are mutually exclusive")

        if runas_opts:
            # Check for privilege escalation conflicts
            if (
                (op.su or op.su_user or op.ask_su_pass)
                and (op.sudo or op.sudo_user or op.ask_sudo_pass)
                or (op.su or op.su_user or op.ask_su_pass)
                and (op.become or op.become_user or op.become_ask_pass)
                or (op.sudo or op.sudo_user or op.ask_sudo_pass)
                and (op.become or op.become_user or op.become_ask_pass)
            ):

                self.parser.error(
                    "Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') "
                    "and su arguments ('-su', '--su-user', and '--ask-su-pass') "
                    "and become arguments ('--become', '--become-user', and '--ask-become-pass')"
                    " are exclusive of each other"
                )

    @staticmethod
    def expand_tilde(option, opt, value, parser):
        setattr(parser.values, option.dest, os.path.expanduser(value))

    @staticmethod
    def base_parser(
        usage="",
        output_opts=False,
        runas_opts=False,
        meta_opts=False,
        runtask_opts=False,
        vault_opts=False,
        async_opts=False,
        connect_opts=False,
        subset_opts=False,
        check_opts=False,
        diff_opts=False,
        epilog=None,
        fork_opts=False,
    ):
        """ create an options parser for most ansible scripts """

        # FIXME: implemente epilog parsing
        # OptionParser.format_epilog = lambda self, formatter: self.epilog

        # base opts
        parser = SortedOptParser(usage, version=CLI.version("%prog"))
        parser.add_option(
            "-v",
            "--verbose",
            dest="verbosity",
            default=0,
            action="count",
            help="verbose mode (-vvv for more, -vvvv to enable connection debugging)",
        )

        if runtask_opts:
            parser.add_option(
                "-i",
                "--inventory-file",
                dest="inventory",
                help="specify inventory host file (default=%s)" % C.DEFAULT_HOST_LIST,
                default=C.DEFAULT_HOST_LIST,
                action="callback",
                callback=CLI.expand_tilde,
                type=str,
            )
            parser.add_option(
                "--list-hosts",
                dest="listhosts",
                action="store_true",
                help="outputs a list of matching hosts; does not execute anything else",
            )
            parser.add_option(
                "-M",
                "--module-path",
                dest="module_path",
                help="specify path(s) to module library (default=%s)" % C.DEFAULT_MODULE_PATH,
                default=None,
                action="callback",
                callback=CLI.expand_tilde,
                type=str,
            )
            parser.add_option(
                "-e",
                "--extra-vars",
                dest="extra_vars",
                action="append",
                help="set additional variables as key=value or YAML/JSON",
                default=[],
            )

        if fork_opts:
            parser.add_option(
                "-f",
                "--forks",
                dest="forks",
                default=C.DEFAULT_FORKS,
                type="int",
                help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS,
            )
            parser.add_option(
                "-l",
                "--limit",
                default=C.DEFAULT_SUBSET,
                dest="subset",
                help="further limit selected hosts to an additional pattern",
            )

        if vault_opts:
            parser.add_option(
                "--ask-vault-pass",
                default=False,
                dest="ask_vault_pass",
                action="store_true",
                help="ask for vault password",
            )
            parser.add_option(
                "--vault-password-file",
                default=C.DEFAULT_VAULT_PASSWORD_FILE,
                dest="vault_password_file",
                help="vault password file",
                action="callback",
                callback=CLI.expand_tilde,
                type=str,
            )

        if subset_opts:
            parser.add_option(
                "-t", "--tags", dest="tags", default="all", help="only run plays and tasks tagged with these values"
            )
            parser.add_option(
                "--skip-tags", dest="skip_tags", help="only run plays and tasks whose tags do not match these values"
            )

        if output_opts:
            parser.add_option("-o", "--one-line", dest="one_line", action="store_true", help="condense output")
            parser.add_option("-t", "--tree", dest="tree", default=None, help="log output to this directory")

        if runas_opts:
            # priv user defaults to root later on to enable detecting when this option was given here
            parser.add_option(
                "-K",
                "--ask-sudo-pass",
                default=C.DEFAULT_ASK_SUDO_PASS,
                dest="ask_sudo_pass",
                action="store_true",
                help="ask for sudo password (deprecated, use become)",
            )
            parser.add_option(
                "--ask-su-pass",
                default=C.DEFAULT_ASK_SU_PASS,
                dest="ask_su_pass",
                action="store_true",
                help="ask for su password (deprecated, use become)",
            )
            parser.add_option(
                "-s",
                "--sudo",
                default=C.DEFAULT_SUDO,
                action="store_true",
                dest="sudo",
                help="run operations with sudo (nopasswd) (deprecated, use become)",
            )
            parser.add_option(
                "-U",
                "--sudo-user",
                dest="sudo_user",
                default=None,
                help="desired sudo user (default=root) (deprecated, use become)",
            )
            parser.add_option(
                "-S",
                "--su",
                default=C.DEFAULT_SU,
                action="store_true",
                help="run operations with su (deprecated, use become)",
            )
            parser.add_option(
                "-R",
                "--su-user",
                default=None,
                help="run operations with su as this user (default=%s) (deprecated, use become)" % C.DEFAULT_SU_USER,
            )

            # consolidated privilege escalation (become)
            parser.add_option(
                "-b",
                "--become",
                default=C.DEFAULT_BECOME,
                action="store_true",
                dest="become",
                help="run operations with become (nopasswd implied)",
            )
            parser.add_option(
                "--become-method",
                dest="become_method",
                default=C.DEFAULT_BECOME_METHOD,
                type="string",
                help="privilege escalation method to use (default=%s), valid choices: [ %s ]"
                % (C.DEFAULT_BECOME_METHOD, " | ".join(C.BECOME_METHODS)),
            )
            parser.add_option(
                "--become-user",
                default=None,
                dest="become_user",
                type="string",
                help="run operations as this user (default=%s)" % C.DEFAULT_BECOME_USER,
            )
            parser.add_option(
                "--ask-become-pass",
                default=False,
                dest="become_ask_pass",
                action="store_true",
                help="ask for privilege escalation password",
            )

        if connect_opts:
            parser.add_option(
                "-k",
                "--ask-pass",
                default=False,
                dest="ask_pass",
                action="store_true",
                help="ask for connection password",
            )
            parser.add_option(
                "--private-key",
                "--key-file",
                default=C.DEFAULT_PRIVATE_KEY_FILE,
                dest="private_key_file",
                help="use this file to authenticate the connection",
            )
            parser.add_option(
                "-u",
                "--user",
                default=C.DEFAULT_REMOTE_USER,
                dest="remote_user",
                help="connect as this user (default=%s)" % C.DEFAULT_REMOTE_USER,
            )
            parser.add_option(
                "-c",
                "--connection",
                dest="connection",
                default=C.DEFAULT_TRANSPORT,
                help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT,
            )
            parser.add_option(
                "-T",
                "--timeout",
                default=C.DEFAULT_TIMEOUT,
                type="int",
                dest="timeout",
                help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT,
            )

        if async_opts:
            parser.add_option(
                "-P",
                "--poll",
                default=C.DEFAULT_POLL_INTERVAL,
                type="int",
                dest="poll_interval",
                help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL,
            )
            parser.add_option(
                "-B",
                "--background",
                dest="seconds",
                type="int",
                default=0,
                help="run asynchronously, failing after X seconds (default=N/A)",
            )

        if check_opts:
            parser.add_option(
                "-C",
                "--check",
                default=False,
                dest="check",
                action="store_true",
                help="don't make any changes; instead, try to predict some of the changes that may occur",
            )
            parser.add_option(
                "--syntax-check",
                dest="syntax",
                action="store_true",
                help="perform a syntax check on the playbook, but do not execute it",
            )

        if diff_opts:
            parser.add_option(
                "-D",
                "--diff",
                default=False,
                dest="diff",
                action="store_true",
                help="when changing (small) files and templates, show the differences in those files; works great with --check",
            )

        if meta_opts:
            parser.add_option(
                "--force-handlers", dest="force_handlers", action="store_true", help="run handlers even if a task fails"
            )
            parser.add_option("--flush-cache", dest="flush_cache", action="store_true", help="clear the fact cache")

        return parser

    @staticmethod
    def version(prog):
        """ return ansible version """
        result = "{0} {1}".format(prog, __version__)
        gitinfo = CLI._gitinfo()
        if gitinfo:
            result = result + " {0}".format(gitinfo)
        result = result + "\n  configured module search path = %s" % C.DEFAULT_MODULE_PATH
        return result

    @staticmethod
    def version_info(gitinfo=False):
        """ return full ansible version info """
        if gitinfo:
            # expensive call, user with care
            ansible_version_string = version("")
        else:
            ansible_version_string = __version__
        ansible_version = ansible_version_string.split()[0]
        ansible_versions = ansible_version.split(".")
        for counter in range(len(ansible_versions)):
            if ansible_versions[counter] == "":
                ansible_versions[counter] = 0
            try:
                ansible_versions[counter] = int(ansible_versions[counter])
            except:
                pass
        if len(ansible_versions) < 3:
            for counter in range(len(ansible_versions), 3):
                ansible_versions.append(0)
        return {
            "string": ansible_version_string.strip(),
            "full": ansible_version,
            "major": ansible_versions[0],
            "minor": ansible_versions[1],
            "revision": ansible_versions[2],
        }

    @staticmethod
    def _git_repo_info(repo_path):
        """ returns a string containing git branch, commit id and commit date """
        result = None
        if os.path.exists(repo_path):
            # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
            if os.path.isfile(repo_path):
                try:
                    gitdir = yaml.safe_load(open(repo_path)).get("gitdir")
                    # There is a possibility the .git file to have an absolute path.
                    if os.path.isabs(gitdir):
                        repo_path = gitdir
                    else:
                        repo_path = os.path.join(repo_path[:-4], gitdir)
                except (IOError, AttributeError):
                    return ""
            f = open(os.path.join(repo_path, "HEAD"))
            branch = f.readline().split("/")[-1].rstrip("\n")
            f.close()
            branch_path = os.path.join(repo_path, "refs", "heads", branch)
            if os.path.exists(branch_path):
                f = open(branch_path)
                commit = f.readline()[:10]
                f.close()
            else:
                # detached HEAD
                commit = branch[:10]
                branch = "detached HEAD"
                branch_path = os.path.join(repo_path, "HEAD")

            date = time.localtime(os.stat(branch_path).st_mtime)
            if time.daylight == 0:
                offset = time.timezone
            else:
                offset = time.altzone
            result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(
                branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36)
            )
        else:
            result = ""
        return result

    @staticmethod
    def _gitinfo():
        basedir = os.path.join(os.path.dirname(__file__), "..", "..", "..")
        repo_path = os.path.join(basedir, ".git")
        result = CLI._git_repo_info(repo_path)
        submodules = os.path.join(basedir, ".gitmodules")
        if not os.path.exists(submodules):
            return result
        f = open(submodules)
        for line in f:
            tokens = line.strip().split(" ")
            if tokens[0] == "path":
                submodule_path = tokens[2]
                submodule_info = CLI._git_repo_info(os.path.join(basedir, submodule_path, ".git"))
                if not submodule_info:
                    submodule_info = " not found - use git submodule update --init " + submodule_path
                result += "\n  {0}: {1}".format(submodule_path, submodule_info)
        f.close()
        return result

    @staticmethod
    def pager(text):
        """ find reasonable way to display text """
        # this is a much simpler form of what is in pydoc.py
        if not sys.stdout.isatty():
            print(text)
        elif "PAGER" in os.environ:
            if sys.platform == "win32":
                print(text)
            else:
                CLI.pager_pipe(text, os.environ["PAGER"])
        elif subprocess.call("(less --version) 2> /dev/null", shell=True) == 0:
            CLI.pager_pipe(text, "less")
        else:
            print(text)

    @staticmethod
    def pager_pipe(text, cmd):
        """ pipe text through a pager """
        if "LESS" not in os.environ:
            os.environ["LESS"] = CLI.LESS_OPTS
        try:
            cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
            cmd.communicate(input=text)
        except IOError:
            pass
        except KeyboardInterrupt:
            pass

    @classmethod
    def tty_ify(self, text):

        t = self._ITALIC.sub("`" + r"\1" + "'", text)  # I(word) => `word'
        t = self._BOLD.sub("*" + r"\1" + "*", t)  # B(word) => *word*
        t = self._MODULE.sub("[" + r"\1" + "]", t)  # M(word) => [word]
        t = self._URL.sub(r"\1", t)  # U(word) => word
        t = self._CONST.sub("`" + r"\1" + "'", t)  # C(word) => `word'

        return t
Esempio n. 20
0
def main(argv=sys.argv[1:]):
    display = Display()

    host_list = ['spark-node1', 'spark-node2', 'spark-node3']

    # initialize needed objects
    Options = namedtuple('Options', ['connection',
                                     'module_path',
                                     'log_path',
                                     'forks',
                                     'become',
                                     'become_method',
                                     'become_user',
                                     'verbosity',
                                     'check'])
    variable_manager = VariableManager()
    loader = DataLoader()
    options = Options(connection='ssh',
                      module_path='/path/to/mymodules',
                      log_path='./log',
                      forks=100,
                      become=None,
                      become_method=None,
                      become_user=None,
                      verbosity=None,
                      check=False)

    # create inventory and pass to var manager
    inventory = Inventory(loader=loader,
                          variable_manager=variable_manager,
                          host_list=host_list)

    variable_manager.set_inventory(inventory)

    # inventory_list = inventory.get_hosts()
    # display.display('hosts: %s' % dir(inventory_list[0]))
    # h = inventory_list[0].get_name()
    # display.display('hosts: %s' % inventory_list[0].get_name())
    # display.display('hosts: %s' % variable_manager.get_vars(loader))
    # display.display('host list: %s' % inventory_list)
    display.warning('Running Ansible embedded')

    # create play with tasks
    play_source = dict(name="Ansible Play",
                       hosts=host_list,
                       gather_facts='no',
                       tasks=[
                           dict(action=dict(module='command',
                                args='uname -a'), register='shell_out'),
                           dict(action=dict(module='debug',
                                args=dict(msg='{{shell_out.stdout}}')))
                       ]
                       )

    play = Play().load(play_source,
                       variable_manager=variable_manager,
                       loader=loader)

    callback = ResultsCollector()

    # actually run it
    try:
        tqm = TaskQueueManager(inventory=inventory,
                               variable_manager=variable_manager,
                               loader=loader,
                               options=options,
                               passwords={},
                               stdout_callback=callback,
                               )

        result = tqm.run(play)
        if result != 0:
            print "ERROR"
    finally:
        if tqm is not None:
            tqm.cleanup()

    print "UP ***********"
    for host, result in callback.host_ok.items():
        print '{}: {}'.format(host, result._result['msg'])

    print "FAILED *******"
    for host, result in callback.host_failed.items():
        print '{}: {}'.format(host, result._result['msg'])

    print "DOWN *********"
    for host, result in callback.host_unreachable.items():
        print '{}: {}'.format(host, result._result['msg'])
Esempio n. 21
0
import hashlib
import os
import re
import string

from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import string_types
from ansible.template import Templar

try:
    from __main__ import display
except ImportError:
    from ansible.utils.display import Display
    display = Display()

_SAFE_GROUP = re.compile("[^A-Za-z0-9\_]")


class BaseInventoryPlugin(object):
    """ Parses an Inventory Source"""

    TYPE = 'generator'

    def __init__(self, cache=None):

        self.inventory = None
        self.display = display
        self.cache = cache
Esempio n. 22
0
from ansible.module_utils.common.collections import is_sequence
from ansible.plugins.loader import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import wrap_var

# HACK: keep Python 2.6 controller tests happy in CI until they're properly split
try:
    from importlib import import_module
except ImportError:
    import_module = __import__

display = Display()


__all__ = ['Templar', 'generate_ansible_template_vars']

# A regex for checking to see if a variable we're trying to
# expand is just a single variable name.

# Primitive Types which we don't want Jinja to convert to strings.
NON_TEMPLATED_TYPES = (bool, Number)

JINJA2_OVERRIDE = '#jinja2:'

USE_JINJA2_NATIVE = False
if C.DEFAULT_JINJA2_NATIVE:
    try:
def display(*args, **kwargs):
    """Set up display function for Ansible v2"""
    display_instance = Display()
    display_instance.display(*args, **kwargs)
Esempio n. 24
0
class TaskExecutor:
    '''
    This is the main worker class for the executor pipeline, which
    handles loading an action plugin to actually dispatch the task to
    a given host. This class roughly corresponds to the old Runner()
    class.
    '''

    # Modules that we optimize by squashing loop items into a single call to
    # the module
    SQUASH_ACTIONS = frozenset(C.DEFAULT_SQUASH_ACTIONS)

    def __init__(self, host, task, job_vars, play_context, new_stdin, loader,
                 shared_loader_obj):
        self._host = host
        self._task = task
        self._job_vars = job_vars
        self._play_context = play_context
        self._new_stdin = new_stdin
        self._loader = loader
        self._shared_loader_obj = shared_loader_obj

        try:
            from __main__ import display
            self._display = display
        except ImportError:
            from ansible.utils.display import Display
            self._display = Display()

    def run(self):
        '''
        The main executor entrypoint, where we determine if the specified
        task requires looping and either runs the task with 
        '''

        self._display.debug("in run()")

        try:
            # lookup plugins need to know if this task is executing from
            # a role, so that it can properly find files/templates/etc.
            roledir = None
            if self._task._role:
                roledir = self._task._role._role_path
            self._job_vars['roledir'] = roledir

            items = self._get_loop_items()
            if items is not None:
                if len(items) > 0:
                    item_results = self._run_loop(items)

                    # loop through the item results, and remember the changed/failed
                    # result flags based on any item there.
                    changed = False
                    failed = False
                    for item in item_results:
                        if 'changed' in item and item['changed']:
                            changed = True
                        if 'failed' in item and item['failed']:
                            failed = True

                    # create the overall result item, and set the changed/failed
                    # flags there to reflect the overall result of the loop
                    res = dict(results=item_results)

                    if changed:
                        res['changed'] = True

                    if failed:
                        res['failed'] = True
                        res['msg'] = 'One or more items failed'
                    else:
                        res['msg'] = 'All items completed'
                else:
                    res = dict(changed=False,
                               skipped=True,
                               skipped_reason='No items in the list',
                               results=[])
            else:
                self._display.debug("calling self._execute()")
                res = self._execute()
                self._display.debug("_execute() done")

            # make sure changed is set in the result, if it's not present
            if 'changed' not in res:
                res['changed'] = False

            def _clean_res(res):
                if isinstance(res, dict):
                    for k in res.keys():
                        res[k] = _clean_res(res[k])
                elif isinstance(res, list):
                    for idx, item in enumerate(res):
                        res[idx] = _clean_res(item)
                elif isinstance(res, UnsafeProxy):
                    return res._obj
                return res

            self._display.debug("dumping result to json")
            res = _clean_res(res)
            self._display.debug("done dumping result, returning")
            return res
        except AnsibleError as e:
            return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
        finally:
            try:
                self._connection.close()
            except AttributeError:
                pass
            except Exception as e:
                self._display.debug("error closing connection: %s" %
                                    to_unicode(e))

    def _get_loop_items(self):
        '''
        Loads a lookup plugin to handle the with_* portion of a task (if specified),
        and returns the items result.
        '''

        # create a copy of the job vars here so that we can modify
        # them temporarily without changing them too early for other
        # parts of the code that might still need a pristine version
        vars_copy = self._job_vars.copy()

        # now we update them with the play context vars
        self._play_context.update_vars(vars_copy)

        templar = Templar(loader=self._loader,
                          shared_loader_obj=self._shared_loader_obj,
                          variables=vars_copy)
        items = None
        if self._task.loop:
            if self._task.loop in self._shared_loader_obj.lookup_loader:
                #TODO: remove convert_bare true and deprecate this in with_
                try:
                    loop_terms = listify_lookup_plugin_terms(
                        terms=self._task.loop_args,
                        templar=templar,
                        loader=self._loader,
                        fail_on_undefined=True,
                        convert_bare=True)
                except AnsibleUndefinedVariable as e:
                    if 'has no attribute' in str(e):
                        loop_terms = []
                        self._display.deprecated(
                            "Skipping task due to undefined attribute, in the future this will be a fatal error."
                        )
                    else:
                        raise
                items = self._shared_loader_obj.lookup_loader.get(
                    self._task.loop, loader=self._loader,
                    templar=templar).run(terms=loop_terms, variables=vars_copy)
            else:
                raise AnsibleError(
                    "Unexpected failure in finding the lookup named '%s' in the available lookup plugins"
                    % self._task.loop)

        if items:
            from ansible.vars.unsafe_proxy import UnsafeProxy
            for idx, item in enumerate(items):
                if item is not None and not isinstance(item, UnsafeProxy):
                    items[idx] = UnsafeProxy(item)
        return items

    def _run_loop(self, items):
        '''
        Runs the task with the loop items specified and collates the result
        into an array named 'results' which is inserted into the final result
        along with the item for which the loop ran.
        '''

        results = []

        # make copies of the job vars and task so we can add the item to
        # the variables and re-validate the task with the item variable
        task_vars = self._job_vars.copy()

        items = self._squash_items(items, task_vars)
        for item in items:
            task_vars['item'] = item

            try:
                tmp_task = self._task.copy()
                tmp_play_context = self._play_context.copy()
            except AnsibleParserError as e:
                results.append(dict(failed=True, msg=str(e)))
                continue

            # now we swap the internal task and play context with their copies,
            # execute, and swap them back so we can do the next iteration cleanly
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context,
                                                      self._play_context)
            res = self._execute(variables=task_vars)
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context,
                                                      self._play_context)

            # now update the result with the item info, and append the result
            # to the list of results
            res['item'] = item
            results.append(res)

        return results

    def _squash_items(self, items, variables):
        '''
        Squash items down to a comma-separated list for certain modules which support it
        (typically package management modules).
        '''
        # _task.action could contain templatable strings (via action: and
        # local_action:)  Template it before comparing.  If we don't end up
        # optimizing it here, the templatable string might use template vars
        # that aren't available until later (it could even use vars from the
        # with_items loop) so don't make the templated string permanent yet.
        templar = Templar(loader=self._loader,
                          shared_loader_obj=self._shared_loader_obj,
                          variables=variables)
        if templar._contains_vars(self._task.action):
            task_action = templar.template(self._task.action,
                                           fail_on_undefined=False)

        if len(items) > 0 and task_action in self.SQUASH_ACTIONS:
            if all(isinstance(o, string_types) for o in items):
                final_items = []
                name = self._task.args.pop(
                    'name', None) or self._task.args.pop('pkg', None)
                for item in items:
                    variables['item'] = item
                    if self._task.evaluate_conditional(templar, variables):
                        if templar._contains_vars(name):
                            new_item = templar.template(name)
                            final_items.append(new_item)
                        else:
                            final_items.append(item)
                self._task.args['name'] = final_items
                return [final_items]
            #elif:
            # Right now we only optimize single entries.  In the future we
            # could optimize more types:
            # * lists can be squashed together
            # * dicts could squash entries that match in all cases except the
            #   name or pkg field.
            # Note: we really should be checking that the name or pkg field
            # contains a template that expands with our with_items values.
            # If it doesn't then we may break things
        return items

    def _execute(self, variables=None):
        '''
        The primary workhorse of the executor system, this runs the task
        on the specified host (which may be the delegated_to host) and handles
        the retry/until and block rescue/always execution
        '''

        if variables is None:
            variables = self._job_vars

        templar = Templar(loader=self._loader,
                          shared_loader_obj=self._shared_loader_obj,
                          variables=variables)

        context_validation_error = None
        try:
            # apply the given task's information to the connection info,
            # which may override some fields already set by the play or
            # the options specified on the command line
            self._play_context = self._play_context.set_task_and_variable_override(
                task=self._task, variables=variables, templar=templar)

            # fields set from the play/task may be based on variables, so we have to
            # do the same kind of post validation step on it here before we use it.
            self._play_context.post_validate(templar=templar)

            # We also add "magic" variables back into the variables dict to make sure
            # a certain subset of variables exist.
            self._play_context.update_vars(variables)
        except AnsibleError as e:
            # save the error, which we'll raise later if we don't end up
            # skipping this task during the conditional evaluation step
            context_validation_error = e

        # Evaluate the conditional (if any) for this task, which we do before running
        # the final task post-validation. We do this before the post validation due to
        # the fact that the conditional may specify that the task be skipped due to a
        # variable not being present which would otherwise cause validation to fail
        try:
            if not self._task.evaluate_conditional(templar, variables):
                self._display.debug(
                    "when evaluation failed, skipping this task")
                return dict(changed=False,
                            skipped=True,
                            skip_reason='Conditional check failed',
                            _ansible_no_log=self._play_context.no_log)
        except AnsibleError:
            # skip conditional exception in the case of includes as the vars needed might not be avaiable except in the included tasks or due to tags
            if self._task.action != 'include':
                raise

        # if we ran into an error while setting up the PlayContext, raise it now
        if context_validation_error is not None:
            raise context_validation_error

        # if this task is a TaskInclude, we just return now with a success code so the
        # main thread can expand the task list for the given host
        if self._task.action == 'include':
            include_variables = self._task.args.copy()
            include_file = include_variables.pop('_raw_params', None)
            if not include_file:
                return dict(failed=True,
                            msg="No include file was specified to the include")

            include_file = templar.template(include_file)
            return dict(include=include_file,
                        include_variables=include_variables)

        # Now we do final validation on the task, which sets all fields to their final values.
        self._task.post_validate(templar=templar)
        if '_variable_params' in self._task.args:
            variable_params = self._task.args.pop('_variable_params')
            if isinstance(variable_params, dict):
                self._display.deprecated(
                    "Using variables for task params is unsafe, especially if the variables come from an external source like facts"
                )
                variable_params.update(self._task.args)
                self._task.args = variable_params

        # get the connection and the handler for this execution
        self._connection = self._get_connection(variables=variables,
                                                templar=templar)
        self._connection.set_host_overrides(host=self._host)

        self._handler = self._get_action_handler(connection=self._connection,
                                                 templar=templar)

        # And filter out any fields which were set to default(omit), and got the omit token value
        omit_token = variables.get('omit')
        if omit_token is not None:
            self._task.args = dict((i[0], i[1])
                                   for i in iteritems(self._task.args)
                                   if i[1] != omit_token)

        # Read some values from the task, so that we can modify them if need be
        if self._task.until is not None:
            retries = self._task.retries
            if retries <= 0:
                retries = 1
        else:
            retries = 1

        delay = self._task.delay
        if delay < 0:
            delay = 1

        # make a copy of the job vars here, in case we need to update them
        # with the registered variable value later on when testing conditions
        vars_copy = variables.copy()

        self._display.debug("starting attempt loop")
        result = None
        for attempt in range(retries):
            if attempt > 0:
                self._display.display(
                    "FAILED - RETRYING: %s (%d retries left). Result was: %s" %
                    (self._task, retries - attempt, result),
                    color="red")
                result['attempts'] = attempt + 1

            self._display.debug("running the handler")
            try:
                result = self._handler.run(task_vars=variables)
            except AnsibleConnectionFailure as e:
                return dict(unreachable=True, msg=str(e))
            self._display.debug("handler run complete")

            if self._task. async > 0:
                # the async_wrapper module returns dumped JSON via its stdout
                # response, so we parse it here and replace the result
                try:
                    result = json.loads(result.get('stdout'))
                except (TypeError, ValueError) as e:
                    return dict(
                        failed=True,
                        msg="The async task did not return valid JSON: %s" %
                        str(e))

                if self._task.poll > 0:
                    result = self._poll_async_result(result=result,
                                                     templar=templar)

            # update the local copy of vars with the registered value, if specified,
            # or any facts which may have been generated by the module execution
            if self._task.register:
                vars_copy[self._task.register] = result

            if 'ansible_facts' in result:
                vars_copy.update(result['ansible_facts'])

            # create a conditional object to evaluate task conditions
            cond = Conditional(loader=self._loader)

            def _evaluate_changed_when_result(result):
                if self._task.changed_when is not None:
                    cond.when = [self._task.changed_when]
                    result['changed'] = cond.evaluate_conditional(
                        templar, vars_copy)

            def _evaluate_failed_when_result(result):
                if self._task.failed_when is not None:
                    cond.when = [self._task.failed_when]
                    failed_when_result = cond.evaluate_conditional(
                        templar, vars_copy)
                    result['failed_when_result'] = result[
                        'failed'] = failed_when_result
                    return failed_when_result
                return False

            if self._task.until:
                cond.when = self._task.until
                if cond.evaluate_conditional(templar, vars_copy):
                    _evaluate_changed_when_result(result)
                    _evaluate_failed_when_result(result)
                    break
            elif (self._task.changed_when is not None or self._task.failed_when
                  is not None) and 'skipped' not in result:
                _evaluate_changed_when_result(result)
                if _evaluate_failed_when_result(result):
                    break
            elif 'failed' not in result:
                if result.get('rc', 0) != 0:
                    result['failed'] = True
                else:
                    # if the result is not failed, stop trying
                    break

            if attempt < retries - 1:
                time.sleep(delay)
            else:
                _evaluate_changed_when_result(result)
                _evaluate_failed_when_result(result)

        # do the final update of the local variables here, for both registered
        # values and any facts which may have been created
        if self._task.register:
            ### FIXME:
            # If we remove invocation, we should also be removing _ansible*
            # and maybe ansible_facts.
            # Remove invocation from registered vars
            #if 'invocation' in result:
            #    del result['invocation']
            variables[self._task.register] = result

        if 'ansible_facts' in result:
            variables.update(result['ansible_facts'])

        # save the notification target in the result, if it was specified, as
        # this task may be running in a loop in which case the notification
        # may be item-specific, ie. "notify: service {{item}}"
        if self._task.notify is not None:
            result['_ansible_notify'] = self._task.notify

        # preserve no_log setting
        result["_ansible_no_log"] = self._play_context.no_log

        # and return
        self._display.debug("attempt loop complete, returning result")
        return result