def _execute_module(self, conn, tmp, module_name, args,
        async_jid=None, async_module=None, async_limit=None, inject=None, persist_files=False, complex_args=None):

        ''' runs a module that has already been transferred '''

        # hack to support fireball mode
        if module_name == 'fireball':
            args = "%s password=%s" % (args, base64.b64encode(str(utils.key_for_hostname(conn.host))))
            if 'port' not in args:
                args += " port=%s" % C.ZEROMQ_PORT

        (remote_module_path, is_new_style, shebang) = self._copy_module(conn, tmp, module_name, args, inject, complex_args)

        environment_string = self._compute_environment_string(inject)

        cmd_mod = ""
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd_chmod = "chmod a+r %s" % remote_module_path
            self._low_level_exec_command(conn, cmd_chmod, tmp, sudoable=False)

        cmd = ""
        if not is_new_style:
            if 'CHECKMODE=True' in args:
                # if module isn't using AnsibleModuleCommon infrastructure we can't be certain it knows how to
                # do --check mode, so to be safe we will not run it.
                return ReturnData(conn=conn, result=dict(skippped=True, msg="cannot run check mode against old-style modules"))

            args = utils.template(self.basedir, args, inject)
            argsfile = self._transfer_str(conn, tmp, 'arguments', args)
            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module]])

        if not shebang:
            raise errors.AnsibleError("module is missing interpreter line")

        cmd = " ".join([environment_string, shebang.replace("#!",""), cmd])
        if tmp.find("tmp") != -1 and C.DEFAULT_KEEP_REMOTE_FILES != '1' and not persist_files:
            cmd = cmd + "; rm -rf %s >/dev/null 2>&1" % tmp
        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
        data = utils.parse_json(res['stdout'])
        if 'parsed' in data and data['parsed'] == False:
            data['msg'] += res['stderr']
        return ReturnData(conn=conn, result=data)
Esempio n. 2
0
    def _executor_internal(self, host):
        ''' executes any module one or more times '''

        host_variables = self.inventory.get_variables(host)
        port = host_variables.get('ansible_ssh_port', self.remote_port)

        inject = {}
        inject.update(host_variables)
        inject.update(self.module_vars)
        inject.update(self.setup_cache[host])
        inject['hostvars'] = self.setup_cache
        inject['group_names'] = host_variables.get('group_names', [])
        inject['groups'] = self.inventory.groups_list()

        # allow with_items to work in playbooks...
        # apt and yum are converted into a single call, others run in a loop

        items = self.module_vars.get('items', [])
        if isinstance(items, basestring) and items.startswith("$"):
            items = utils.varLookup(items, inject)
        if type(items) != list:
            raise errors.AnsibleError("with_items only takes a list: %s" % items)

        if len(items) and self.module_name in [ 'apt', 'yum' ]:
            # hack for apt and soon yum, with_items maps back into a single module call
            inject['item'] = ",".join(items)
            items = []

        # logic to decide how to run things depends on whether with_items is used

        if len(items) == 0:
            return self._executor_internal_inner(host, self.module_name, self.module_args, inject, port)
        else:
            # executing using with_items, so make multiple calls
            # TODO: refactor
            aggregrate = {}
            all_comm_ok = True
            all_changed = False
            all_failed = False
            results = []
            for x in items:
                inject['item'] = x
                result = self._executor_internal_inner(host, self.module_name, self.module_args, inject, port)
                results.append(result.result)
                if result.comm_ok == False:
                    all_comm_ok = False
                    break
                for x in results:
                    if x.get('changed') == True:
                        all_changed = True
                    if (x.get('failed') == True) or (('rc' in x) and (x['rc'] != 0)):
                        all_failed = True
                        break
            msg = 'All items succeeded'
            if all_failed:
                msg = "One or more items failed."
            rd_result = dict(failed=all_failed, changed=all_changed, results=results, msg=msg)
            if not all_failed:
                del rd_result['failed']
            return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
Esempio n. 3
0
    def _execute_module(self, conn, tmp, module_name, args,
        async_jid=None, async_module=None, async_limit=None, inject=None):

        ''' runs a module that has already been transferred '''

        # hack to support fireball mode
        if module_name == 'fireball':
            args = "%s password=%s" % (args, base64.b64encode(str(utils.key_for_hostname(conn.host))))
            if 'port' not in args:
                args += " port=%s" % C.ZEROMQ_PORT

        (remote_module_path, is_new_style) = self._copy_module(conn, tmp, module_name, args, inject)
        cmd = "chmod u+x %s" % remote_module_path
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd = "chmod a+rx %s" % remote_module_path
        self._low_level_exec_command(conn, cmd, tmp)

        cmd = ""
        if not is_new_style:
            args = utils.template(self.basedir, args, inject)
            argsfile = self._transfer_str(conn, tmp, 'arguments', args)
            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module]])

        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
        return ReturnData(conn=conn, result=res)
Esempio n. 4
0
    def _executor(self, host, new_stdin):
        ''' handler for multiprocessing library '''

        def get_flags():
            # flags are a way of passing arbitrary event information
            # back up the chain, since multiprocessing forks and doesn't
            # allow state exchange
            flags = []
            if template.Flags.LEGACY_TEMPLATE_WARNING:
                flags.append('LEGACY_TEMPLATE_WARNING')
            return flags

        try:
            if not new_stdin:
                self._new_stdin = os.fdopen(os.dup(sys.stdin.fileno()))
            else:
                self._new_stdin = new_stdin

            exec_rc = self._executor_internal(host, new_stdin)
            if type(exec_rc) != ReturnData:
                raise Exception("unexpected return type: %s" % type(exec_rc))
            exec_rc.flags = get_flags()
            # redundant, right?
            if not exec_rc.comm_ok:
                self.callbacks.on_unreachable(host, exec_rc.result)
            return exec_rc
        except errors.AnsibleError, ae:
            msg = str(ae)
            self.callbacks.on_unreachable(host, msg)
            return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg), flags=get_flags())
Esempio n. 5
0
    def run(self):
        ''' xfer & run module on all matched hosts '''

        # find hosts that match the pattern
        hosts = self.inventory.list_hosts(self.pattern)
        if len(hosts) == 0:
            self.callbacks.on_no_hosts()
            return dict(contacted={}, dark={})

        hosts = [ (self,x) for x in hosts ]
        results = None

        # Check if this is an action plugin. Some of them are designed
        # to be ran once per group of hosts. Example module: pause,
        # run once per hostgroup, rather than pausing once per each
        # host.
        p = self.action_plugins.get(self.module_name, None)
        if p and getattr(p, 'BYPASS_HOST_LOOP', None):
            # Expose the current hostgroup to the bypassing plugins
            self.host_set = hosts
            # We aren't iterating over all the hosts in this
            # group. So, just pick the first host in our group to
            # construct the conn object with.
            result_data = self._executor(hosts[0][1]).result
            # Create a ResultData item for each host in this group
            # using the returned result. If we didn't do this we would
            # get false reports of dark hosts.
            results = [ ReturnData(host=h[1], result=result_data, comm_ok=True) \
                           for h in hosts ]
            del self.host_set
        elif self.forks > 1:
            results = self._parallel_exec(hosts)
        else:
            results = [ self._executor(h[1]) for h in hosts ]
        return self._partition_results(results)
Esempio n. 6
0
    def _executor_internal_inner(self, host, inject, port, is_chained=False):
        ''' decides how to invoke a module '''

        # FIXME: temporary, need to refactor to pass as parameters versus reassigning
        prev_module_name = self.module_name
        prev_module_args = self.module_args

        # special non-user/non-fact variables:
        # 'groups' variable is a list of host name in each group
        # 'hostvars' variable contains variables for each host name
        #  ... and is set elsewhere
        # 'inventory_hostname' is also set elsewhere
        inject['groups'] = self.inventory.groups_list()
        # allow module args to work as a dictionary
        # though it is usually a string
        new_args = ""
        if type(self.module_args) == dict:
            for (k, v) in self.module_args.iteritems():
                new_args = new_args + "%s='%s' " % (k, v)
            self.module_args = new_args
        self.module_args = utils.template(self.module_args, inject)

        def _check_conditional(conditional):
            def is_set(var):
                return not var.startswith("$")

            return eval(conditional)

        conditional = utils.template(self.conditional, inject)
        if not _check_conditional(conditional):
            result = utils.jsonify(dict(skipped=True))
            self.callbacks.on_skipped(host, inject.get('item', None))
            return ReturnData(host=host, result=result)

        conn = None
        actual_host = host
        try:
            delegate_to = inject.get('delegate_to', None)
            if delegate_to is not None:
                actual_host = delegate_to
            conn = self.connector.connect(actual_host, port)
            if delegate_to is not None:
                conn._delegate_for = host
        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)
Esempio n. 7
0
    def _executor_internal_inner(self,
                                 host,
                                 module_name,
                                 module_args,
                                 inject,
                                 port,
                                 is_chained=False):
        ''' decides how to invoke a module '''

        # special non-user/non-fact variables:
        # 'groups' variable is a list of host name in each group
        # 'hostvars' variable contains variables for each host name
        #  ... and is set elsewhere
        # 'inventory_hostname' is also set elsewhere
        inject['groups'] = self.inventory.groups_list()

        # allow module args to work as a dictionary
        # though it is usually a string
        new_args = ""
        if type(module_args) == dict:
            for (k, v) in module_args.iteritems():
                new_args = new_args + "%s='%s' " % (k, v)
            module_args = new_args

        conditional = utils.template(self.basedir, self.conditional, inject)
        if not utils.check_conditional(conditional):
            result = utils.jsonify(dict(skipped=True))
            self.callbacks.on_skipped(host, inject.get('item', None))
            return ReturnData(host=host, result=result)

        conn = None
        actual_host = host
        try:
            delegate_to = inject.get('delegate_to', None)
            alternative_host = inject.get('ansible_ssh_host', None)
            if delegate_to is not None:
                actual_host = delegate_to
            elif alternative_host is not None:
                actual_host = alternative_host
            conn = self.connector.connect(actual_host, port)
            if delegate_to is not None or alternative_host is not None:
                conn._delegate_for = host
        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)
Esempio n. 8
0
    def _execute_module(self,
                        conn,
                        tmp,
                        module_name,
                        args,
                        async_jid=None,
                        async_module=None,
                        async_limit=None,
                        inject=None):
        ''' runs a module that has already been transferred '''

        # hack to support fireball mode
        if module_name == 'fireball':
            args = "%s password=%s" % (
                args, base64.b64encode(str(utils.key_for_hostname(conn.host))))
            if 'port' not in args:
                args += " port=%s" % C.ZEROMQ_PORT

        (remote_module_path, is_new_style,
         shebang) = self._copy_module(conn, tmp, module_name, args, inject)

        cmd_mod = ""
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd_chmod = "chmod a+r %s" % remote_module_path
            self._low_level_exec_command(conn, cmd_chmod, tmp, sudoable=False)

        cmd = ""
        if not is_new_style:
            args = utils.template(self.basedir, args, inject)
            argsfile = self._transfer_str(conn, tmp, 'arguments', args)
            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([
                    str(x) for x in [
                        remote_module_path, async_jid, async_limit,
                        async_module, argsfile
                    ]
                ])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([
                    str(x) for x in
                    [remote_module_path, async_jid, async_limit, async_module]
                ])

        if not shebang:
            raise errors.AnsibleError("module is missing interpreter line")

        cmd = shebang.replace("#!", "") + " " + cmd
        if tmp.find("tmp") != -1 and C.DEFAULT_KEEP_REMOTE_FILES != '1':
            cmd = cmd + "; rm -rf %s >/dev/null 2>&1" % tmp
        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
        return ReturnData(conn=conn, result=res)
 def _executor(self, host):
     ''' handler for multiprocessing library '''
     try:
         exec_rc = self._executor_internal(host)
         if type(exec_rc) != ReturnData:
             raise Exception("unexpected return type: %s" % type(exec_rc))
         # redundant, right?
         if not exec_rc.comm_ok:
             self.callbacks.on_unreachable(host, exec_rc.result)
         return exec_rc
     except errors.AnsibleError, ae:
         msg = str(ae)
         self.callbacks.on_unreachable(host, msg)
         return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
Esempio n. 10
0
    def _execute_module(self,
                        conn,
                        tmp,
                        module_name,
                        args,
                        async_jid=None,
                        async_module=None,
                        async_limit=None,
                        inject=None):
        ''' runs a module that has already been transferred '''

        if type(args) == dict:
            args = utils.jsonify(args, format=True)

        (remote_module_path,
         is_new_style) = self._copy_module(conn, tmp, module_name, inject)
        cmd = "chmod u+x %s" % remote_module_path
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd = "chmod a+rx %s" % remote_module_path
        self._low_level_exec_command(conn, cmd, tmp)

        cmd = ""
        if not is_new_style:
            args = utils.template(args, inject)
            argsfile = self._transfer_str(conn, tmp, 'arguments', args)
            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([
                    str(x) for x in [
                        remote_module_path, async_jid, async_limit,
                        async_module, argsfile
                    ]
                ])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([
                    str(x) for x in
                    [remote_module_path, async_jid, async_limit, async_module]
                ])

        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
        return ReturnData(conn=conn, result=res)
Esempio n. 11
0
    def run(self):
        ''' xfer & run module on all matched hosts '''

        # find hosts that match the pattern
        hosts = self.inventory.list_hosts(self.pattern)
        if len(hosts) == 0:
            self.callbacks.on_no_hosts()
            return dict(contacted={}, dark={})

        global multiprocessing_runner
        multiprocessing_runner = self
        results = None

        # Check if this is an action plugin. Some of them are designed
        # to be ran once per group of hosts. Example module: pause,
        # run once per hostgroup, rather than pausing once per each
        # host.
        p = utils.plugins.action_loader.get(self.module_name, self)

        if self.forks == 0 or self.forks > len(hosts):
            self.forks = len(hosts)

        if p and getattr(p, 'BYPASS_HOST_LOOP', None):

            # Expose the current hostgroup to the bypassing plugins
            self.host_set = hosts
            # We aren't iterating over all the hosts in this
            # group. So, just pick the first host in our group to
            # construct the conn object with.
            result_data = self._executor(hosts[0], None).result
            # Create a ResultData item for each host in this group
            # using the returned result. If we didn't do this we would
            # get false reports of dark hosts.
            results = [ ReturnData(host=h, result=result_data, comm_ok=True) \
                           for h in hosts ]
            del self.host_set

        elif self.forks > 1:
            try:
                results = self._parallel_exec(hosts)
            except IOError, ie:
                print ie.errno
                if ie.errno == 32:
                    # broken pipe from Ctrl+C
                    raise errors.AnsibleError("interrupted")
                raise
class Runner(object):
    ''' core API interface to ansible '''

    # see bin/ansible for how this is used...

    def __init__(
        self,
        host_list=C.DEFAULT_HOST_LIST,  # ex: /etc/ansible/hosts, legacy usage
        module_path=None,  # ex: /usr/share/ansible
        module_name=C.DEFAULT_MODULE_NAME,  # ex: copy
        module_args=C.DEFAULT_MODULE_ARGS,  # ex: "src=/tmp/a dest=/tmp/b"
        forks=C.DEFAULT_FORKS,  # parallelism level
        timeout=C.DEFAULT_TIMEOUT,  # SSH timeout
        pattern=C.
        DEFAULT_PATTERN,  # which hosts?  ex: 'all', 'acme.example.org'
        remote_user=C.DEFAULT_REMOTE_USER,  # ex: 'username'
        remote_pass=C.
        DEFAULT_REMOTE_PASS,  # ex: 'password123' or None if using key
        remote_port=None,  # if SSH on different ports
        private_key_file=C.
        DEFAULT_PRIVATE_KEY_FILE,  # if not using keys/passwords
        sudo_pass=C.DEFAULT_SUDO_PASS,  # ex: 'password123' or None
        background=0,  # async poll every X seconds, else 0 for non-async
        basedir=None,  # directory of playbook, if applicable
        setup_cache=None,  # used to share fact data w/ other tasks
        transport=C.DEFAULT_TRANSPORT,  # 'ssh', 'paramiko', 'local'
        conditional='True',  # run only if this fact expression evals to true
        callbacks=None,  # used for output
        sudo=False,  # whether to run sudo or not
        sudo_user=C.DEFAULT_SUDO_USER,  # ex: 'root'
        module_vars=None,  # a playbooks internals thing
        is_playbook=False,  # running from playbook or not?
        inventory=None,  # reference to Inventory object
        subset=None,  # subset pattern
        check=False,  # don't make any changes, just try to probe for potential changes
        diff=False,  # whether to show diffs for template files that change
        environment=None,  # environment variables (as dict) to use inside the command
        complex_args=None  # structured data in addition to module_args, must be a dict
    ):

        if not complex_args:
            complex_args = {}

        # storage & defaults
        self.check = check
        self.diff = diff
        self.setup_cache = utils.default(setup_cache,
                                         lambda: collections.defaultdict(dict))
        self.basedir = utils.default(basedir, lambda: os.getcwd())
        self.callbacks = utils.default(callbacks,
                                       lambda: DefaultRunnerCallbacks())
        self.generated_jid = str(random.randint(0, 999999999999))
        self.transport = transport
        self.inventory = utils.default(
            inventory, lambda: ansible.inventory.Inventory(host_list))

        self.module_vars = utils.default(module_vars, lambda: {})
        self.sudo_user = sudo_user
        self.connector = connection.Connection(self)
        self.conditional = conditional
        self.module_name = module_name
        self.forks = int(forks)
        self.pattern = pattern
        self.module_args = module_args
        self.timeout = timeout
        self.remote_user = remote_user
        self.remote_pass = remote_pass
        self.remote_port = remote_port
        self.private_key_file = private_key_file
        self.background = background
        self.sudo = sudo
        self.sudo_pass = sudo_pass
        self.is_playbook = is_playbook
        self.environment = environment
        self.complex_args = complex_args

        # misc housekeeping
        if subset and self.inventory._subset is None:
            # don't override subset when passed from playbook
            self.inventory.subset(subset)

        if self.transport == 'local':
            self.remote_user = pwd.getpwuid(os.geteuid())[0]

        if module_path is not None:
            for i in module_path.split(os.pathsep):
                utils.plugins.module_finder.add_directory(i)

        utils.plugins.push_basedir(self.basedir)

        # ensure we are using unique tmp paths
        random.seed()

    # *****************************************************

    def _complex_args_hack(self, complex_args, module_args):
        """
        ansible-playbook both allows specifying key=value string arguments and complex arguments
        however not all modules use our python common module system and cannot
        access these.  An example might be a Bash module.  This hack allows users to still pass "args"
        as a hash of simple scalars to those arguments and is short term.  We could technically
        just feed JSON to the module, but that makes it hard on Bash consumers.  The way this is implemented
        it does mean values in 'args' have LOWER priority than those on the key=value line, allowing
        args to provide yet another way to have pluggable defaults.
        """
        if complex_args is None:
            return module_args
        if type(complex_args) != dict:
            raise errors.AnsibleError(
                "complex arguments are not a dictionary: %s" % complex_args)
        for (k, v) in complex_args.iteritems():
            if isinstance(v, basestring):
                module_args = "%s=%s %s" % (k, pipes.quote(v), module_args)
        return module_args

    # *****************************************************

    def _transfer_str(self, conn, tmp, name, data):
        ''' transfer string to remote file '''

        if type(data) == dict:
            data = utils.jsonify(data)

        afd, afile = tempfile.mkstemp()
        afo = os.fdopen(afd, 'w')
        try:
            if not isinstance(data, unicode):
                #ensure the data is valid UTF-8
                data.decode('utf-8')
            else:
                data = data.encode('utf-8')
            afo.write(data)
        except:
            raise errors.AnsibleError("failure encoding into utf-8")
        afo.flush()
        afo.close()

        remote = os.path.join(tmp, name)
        try:
            conn.put_file(afile, remote)
        finally:
            os.unlink(afile)
        return remote

    # *****************************************************

    def _compute_environment_string(self, inject=None):
        ''' what environment variables to use when running the command? '''

        if not self.environment:
            return ""
        enviro = utils.template(self.basedir, self.environment, inject)
        if type(enviro) != dict:
            raise errors.AnsibleError(
                "environment must be a dictionary, received %s" % enviro)
        result = ""
        for (k, v) in enviro.iteritems():
            result = "%s=%s %s" % (k, pipes.quote(str(v)), result)
        return result

    # *****************************************************

    def _execute_module(self,
                        conn,
                        tmp,
                        module_name,
                        args,
                        async_jid=None,
                        async_module=None,
                        async_limit=None,
                        inject=None,
                        persist_files=False,
                        complex_args=None):
        ''' runs a module that has already been transferred '''

        # hack to support fireball mode
        if module_name == 'fireball':
            args = "%s password=%s" % (
                args, base64.b64encode(str(utils.key_for_hostname(conn.host))))
            if 'port' not in args:
                args += " port=%s" % C.ZEROMQ_PORT

        (remote_module_path, is_new_style,
         shebang) = self._copy_module(conn, tmp, module_name, args, inject,
                                      complex_args)

        environment_string = self._compute_environment_string(inject)

        cmd_mod = ""
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd_chmod = "chmod a+r %s" % remote_module_path
            self._low_level_exec_command(conn, cmd_chmod, tmp, sudoable=False)

        cmd = ""
        if not is_new_style:
            if 'CHECKMODE=True' in args:
                # if module isn't using AnsibleModuleCommon infrastructure we can't be certain it knows how to
                # do --check mode, so to be safe we will not run it.
                return ReturnData(
                    conn=conn,
                    result=dict(
                        skippped=True,
                        msg="cannot run check mode against old-style modules"))

            args = utils.template(self.basedir, args, inject)
            argsfile = self._transfer_str(conn, tmp, 'arguments', args)
            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([
                    str(x) for x in [
                        remote_module_path, async_jid, async_limit,
                        async_module, argsfile
                    ]
                ])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([
                    str(x) for x in
                    [remote_module_path, async_jid, async_limit, async_module]
                ])

        if not shebang:
            raise errors.AnsibleError("module is missing interpreter line")

        cmd = " ".join([environment_string, shebang.replace("#!", ""), cmd])
        if tmp.find(
                "tmp"
        ) != -1 and C.DEFAULT_KEEP_REMOTE_FILES != '1' and not persist_files:
            cmd = cmd + "; rm -rf %s >/dev/null 2>&1" % tmp
        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
        data = utils.parse_json(res['stdout'])
        if 'parsed' in data and data['parsed'] == False:
            data['msg'] += res['stderr']
        return ReturnData(conn=conn, result=data)

    # *****************************************************

    def _executor(self, host):
        ''' handler for multiprocessing library '''

        try:
            exec_rc = self._executor_internal(host)
            if type(exec_rc) != ReturnData:
                raise Exception("unexpected return type: %s" % type(exec_rc))
            # redundant, right?
            if not exec_rc.comm_ok:
                self.callbacks.on_unreachable(host, exec_rc.result)
            return exec_rc
        except errors.AnsibleError, ae:
            msg = str(ae)
            self.callbacks.on_unreachable(host, msg)
            return ReturnData(host=host,
                              comm_ok=False,
                              result=dict(failed=True, msg=msg))
        except Exception:
            msg = traceback.format_exc()
            self.callbacks.on_unreachable(host, msg)
            return ReturnData(host=host,
                              comm_ok=False,
                              result=dict(failed=True, msg=msg))
    def _executor_internal_inner(self,
                                 host,
                                 module_name,
                                 module_args,
                                 inject,
                                 port,
                                 is_chained=False,
                                 complex_args=None):
        ''' decides how to invoke a module '''

        # allow module args to work as a dictionary
        # though it is usually a string
        new_args = ""
        if type(module_args) == dict:
            for (k, v) in module_args.iteritems():
                new_args = new_args + "%s='%s' " % (k, v)
            module_args = new_args

        module_name = utils.template(self.basedir, module_name, inject)
        module_args = utils.template(self.basedir, module_args, inject)
        complex_args = utils.template(self.basedir, complex_args, inject)

        if module_name in utils.plugins.action_loader:
            if self.background != 0:
                raise errors.AnsibleError(
                    "async mode is not supported with the %s module" %
                    module_name)
            handler = utils.plugins.action_loader.get(module_name, self)
        elif self.background == 0:
            handler = utils.plugins.action_loader.get('normal', self)
        else:
            handler = utils.plugins.action_loader.get('async', self)

        conditional = utils.template(self.basedir,
                                     self.conditional,
                                     inject,
                                     expand_lists=False)
        if not utils.check_conditional(conditional):
            result = utils.jsonify(dict(skipped=True))
            self.callbacks.on_skipped(host, inject.get('item', None))
            return ReturnData(host=host, result=result)

        conn = None
        actual_host = inject.get('ansible_ssh_host', host)
        actual_port = port
        actual_user = inject.get('ansible_ssh_user', self.remote_user)
        actual_pass = inject.get('ansible_ssh_pass', self.remote_pass)
        actual_transport = inject.get('ansible_connection', self.transport)
        if actual_transport in ['paramiko', 'ssh']:
            actual_port = inject.get('ansible_ssh_port', port)

        # the delegated host may have different SSH port configured, etc
        # and we need to transfer those, and only those, variables
        delegate_to = inject.get('delegate_to', None)
        if delegate_to is not None:
            delegate_to = utils.template(self.basedir, delegate_to, inject)
            inject = inject.copy()
            interpreters = []
            for i in inject:
                if i.startswith("ansible_") and i.endswith("_interpreter"):
                    interpreters.append(i)
            for i in interpreters:
                del inject[i]
            port = C.DEFAULT_REMOTE_PORT
            try:
                delegate_info = inject['hostvars'][delegate_to]
                actual_host = delegate_info.get('ansible_ssh_host',
                                                delegate_to)
                actual_port = delegate_info.get('ansible_ssh_port', port)
                actual_user = delegate_info.get('ansible_ssh_user',
                                                actual_user)
                actual_pass = delegate_info.get('ansible_ssh_pass',
                                                actual_pass)
                actual_transport = delegate_info.get('ansible_connection',
                                                     self.transport)
                for i in delegate_info:
                    if i.startswith("ansible_") and i.endswith("_interpreter"):
                        inject[i] = delegate_info[i]
            except errors.AnsibleError:
                actual_host = delegate_to
                actual_port = port

        actual_user = utils.template(self.basedir, actual_user, inject)
        actual_pass = utils.template(self.basedir, actual_pass, inject)

        try:
            if actual_port is not None:
                actual_port = int(actual_port)
        except ValueError, e:
            result = dict(
                failed=True,
                msg=
                "FAILED: Configured port \"%s\" is not a valid port, expected integer"
                % actual_port)
            return ReturnData(host=host, comm_ok=False, result=result)
    def _executor_internal(self, host):
        ''' executes any module one or more times '''

        host_variables = self.inventory.get_variables(host)
        host_connection = host_variables.get('ansible_connection',
                                             self.transport)
        if host_connection in ['paramiko', 'ssh']:
            port = host_variables.get('ansible_ssh_port', self.remote_port)
            if port is None:
                port = C.DEFAULT_REMOTE_PORT
        else:
            # fireball, local, etc
            port = self.remote_port

        inject = {}
        inject.update(host_variables)
        inject.update(self.module_vars)
        inject.update(self.setup_cache[host])
        inject['hostvars'] = HostVars(self.setup_cache, self.inventory)
        inject['group_names'] = host_variables.get('group_names', [])
        inject['groups'] = self.inventory.groups_list()
        inject['vars'] = self.module_vars
        inject['environment'] = self.environment
        if self.inventory.basedir() is not None:
            inject['inventory_dir'] = self.inventory.basedir()

        # allow with_foo to work in playbooks...
        items = None
        items_plugin = self.module_vars.get('items_lookup_plugin', None)
        if items_plugin is not None and items_plugin in utils.plugins.lookup_loader:
            items_terms = self.module_vars.get('items_lookup_terms', '')
            items_terms = utils.template(self.basedir, items_terms, inject)
            items = utils.plugins.lookup_loader.get(items_plugin,
                                                    runner=self,
                                                    basedir=self.basedir).run(
                                                        items_terms,
                                                        inject=inject)
            if type(items) != list:
                raise errors.AnsibleError(
                    "lookup plugins have to return a list: %r" % items)

            if len(items) and utils.is_list_of_strings(
                    items) and self.module_name in ['apt', 'yum']:
                # hack for apt and soon yum, with_items maps back into a single module call
                inject['item'] = ",".join(items)
                items = None

        # logic to decide how to run things depends on whether with_items is used

        if items is None:
            return self._executor_internal_inner(
                host,
                self.module_name,
                self.module_args,
                inject,
                port,
                complex_args=self.complex_args)
        elif len(items) > 0:
            # executing using with_items, so make multiple calls
            # TODO: refactor
            aggregrate = {}
            all_comm_ok = True
            all_changed = False
            all_failed = False
            results = []
            for x in items:
                inject['item'] = x
                result = self._executor_internal_inner(
                    host,
                    self.module_name,
                    self.module_args,
                    inject,
                    port,
                    complex_args=self.complex_args)
                results.append(result.result)
                if result.comm_ok == False:
                    all_comm_ok = False
                    all_failed = True
                    break
                for x in results:
                    if x.get('changed') == True:
                        all_changed = True
                    if (x.get('failed') == True) or (('rc' in x) and
                                                     (x['rc'] != 0)):
                        all_failed = True
                        break
            msg = 'All items completed'
            if all_failed:
                msg = "One or more items failed."
            rd_result = dict(failed=all_failed,
                             changed=all_changed,
                             results=results,
                             msg=msg)
            if not all_failed:
                del rd_result['failed']
            return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
        else:
            self.callbacks.on_skipped(host, None)
            return ReturnData(host=host,
                              comm_ok=True,
                              result=dict(skipped=True))
                failed=True,
                msg=
                "FAILED: Configured port \"%s\" is not a valid port, expected integer"
                % actual_port)
            return ReturnData(host=host, comm_ok=False, result=result)

        try:
            conn = self.connector.connect(actual_host, actual_port,
                                          actual_user, actual_pass,
                                          actual_transport)
            if delegate_to or host != actual_host:
                conn.delegate = host

        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)

        tmp = ''
        # all modules get a tempdir, action plugins get one unless they have NEEDS_TMPPATH set to False
        if getattr(handler, 'NEEDS_TMPPATH', True):
            tmp = self._make_tmp_path(conn)

        result = handler.run(conn, tmp, module_name, module_args, inject,
                             complex_args)

        conn.close()

        if not result.comm_ok:
            # connection or parsing errors...
            self.callbacks.on_unreachable(host, result.result)
        else:
Esempio n. 16
0
    def _executor_internal_inner(self, host, module_name, module_args, inject, port, is_chained=False, complex_args=None):
        ''' decides how to invoke a module '''

        # late processing of parameterized sudo_user (with_items,..)
        if self.sudo_user_var is not None:
            self.sudo_user = template.template(self.basedir, self.sudo_user_var, inject)

        # allow module args to work as a dictionary
        # though it is usually a string
        new_args = ""
        if type(module_args) == dict:
            for (k,v) in module_args.iteritems():
                new_args = new_args + "%s='%s' " % (k,v)
            module_args = new_args

        # module_name may be dynamic (but cannot contain {{ ansible_ssh_user }})
        module_name  = template.template(self.basedir, module_name, inject)

        if module_name in utils.plugins.action_loader:
            if self.background != 0:
                raise errors.AnsibleError("async mode is not supported with the %s module" % module_name)
            handler = utils.plugins.action_loader.get(module_name, self)
        elif self.background == 0:
            handler = utils.plugins.action_loader.get('normal', self)
        else:
            handler = utils.plugins.action_loader.get('async', self)

        if type(self.conditional) != list:
            self.conditional = [ self.conditional ]

        for cond in self.conditional:

            if not utils.check_conditional(cond, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars):
                result = utils.jsonify(dict(changed=False, skipped=True))
                self.callbacks.on_skipped(host, inject.get('item',None))
                return ReturnData(host=host, result=result)

        if getattr(handler, 'setup', None) is not None:
            handler.setup(module_name, inject)
        conn = None
        actual_host = inject.get('ansible_ssh_host', host)
        # allow ansible_ssh_host to be templated
        actual_host = template.template(self.basedir, actual_host, inject, fail_on_undefined=True)
        actual_port = port
        actual_user = inject.get('ansible_ssh_user', self.remote_user)
        actual_pass = inject.get('ansible_ssh_pass', self.remote_pass)
        actual_transport = inject.get('ansible_connection', self.transport)
        actual_private_key_file = inject.get('ansible_ssh_private_key_file', self.private_key_file)

        if self.accelerate and actual_transport != 'local':
            #Fix to get the inventory name of the host to accelerate plugin
            if inject.get('ansible_ssh_host', None):
                self.accelerate_inventory_host = host
            else:
                self.accelerate_inventory_host = None
            # if we're using accelerated mode, force the
            # transport to accelerate
            actual_transport = "accelerate"
            if not self.accelerate_port:
                self.accelerate_port = C.ACCELERATE_PORT

        if actual_transport in [ 'paramiko', 'ssh', 'accelerate' ]:
            actual_port = inject.get('ansible_ssh_port', port)

        # the delegated host may have different SSH port configured, etc
        # and we need to transfer those, and only those, variables
        delegate_to = inject.get('delegate_to', None)
        if delegate_to is not None:
            delegate_to = template.template(self.basedir, delegate_to, inject)
            inject = inject.copy()
            interpreters = []
            for i in inject:
                if i.startswith("ansible_") and i.endswith("_interpreter"):
                    interpreters.append(i)
            for i in interpreters:
                del inject[i]
            port = C.DEFAULT_REMOTE_PORT
            try:
                delegate_info = inject['hostvars'][delegate_to]
                actual_host = delegate_info.get('ansible_ssh_host', delegate_to)
                # allow ansible_ssh_host to be templated
                actual_host = template.template(self.basedir, actual_host, inject, fail_on_undefined=True)
                actual_port = delegate_info.get('ansible_ssh_port', port)
                actual_user = delegate_info.get('ansible_ssh_user', actual_user)
                actual_pass = delegate_info.get('ansible_ssh_pass', actual_pass)
                actual_private_key_file = delegate_info.get('ansible_ssh_private_key_file', self.private_key_file)
                actual_transport = delegate_info.get('ansible_connection', self.transport)
                for i in delegate_info:
                    if i.startswith("ansible_") and i.endswith("_interpreter"):
                        inject[i] = delegate_info[i]
            except errors.AnsibleError:
                actual_host = delegate_to
                actual_port = port

        # user/pass may still contain variables at this stage
        actual_user = template.template(self.basedir, actual_user, inject)
        actual_pass = template.template(self.basedir, actual_pass, inject)

        # make actual_user available as __magic__ ansible_ssh_user variable
        inject['ansible_ssh_user'] = actual_user

        try:
            if actual_transport == 'accelerate':
                # for accelerate, we stuff both ports into a single
                # variable so that we don't have to mangle other function
                # calls just to accomodate this one case
                actual_port = [actual_port, self.accelerate_port]
            elif actual_port is not None:
                actual_port = int(template.template(self.basedir, actual_port, inject))
        except ValueError, e:
            result = dict(failed=True, msg="FAILED: Configured port \"%s\" is not a valid port, expected integer" % actual_port)
            return ReturnData(host=host, comm_ok=False, result=result)
Esempio n. 17
0
    def _executor_internal(self, host, new_stdin):
        ''' executes any module one or more times '''

        host_variables = self.inventory.get_variables(host)
        host_connection = host_variables.get('ansible_connection', self.transport)
        if host_connection in [ 'paramiko', 'ssh', 'accelerate' ]:
            port = host_variables.get('ansible_ssh_port', self.remote_port)
            if port is None:
                port = C.DEFAULT_REMOTE_PORT
        else:
            # fireball, local, etc
            port = self.remote_port

        inject = {}
        inject = utils.combine_vars(inject, self.default_vars)
        inject = utils.combine_vars(inject, host_variables)
        inject = utils.combine_vars(inject, self.module_vars)
        inject = utils.combine_vars(inject, self.setup_cache[host])
        inject.setdefault('ansible_ssh_user', self.remote_user)
        inject['hostvars'] = HostVars(self.setup_cache, self.inventory)
        inject['group_names'] = host_variables.get('group_names', [])
        inject['groups']      = self.inventory.groups_list()
        inject['vars']        = self.module_vars
        inject['defaults']    = self.default_vars
        inject['environment'] = self.environment
        inject['playbook_dir'] = self.basedir

        if self.inventory.basedir() is not None:
            inject['inventory_dir'] = self.inventory.basedir()

        if self.inventory.src() is not None:
            inject['inventory_file'] = self.inventory.src()

        # allow with_foo to work in playbooks...
        items = None
        items_plugin = self.module_vars.get('items_lookup_plugin', None)

        if items_plugin is not None and items_plugin in utils.plugins.lookup_loader:

            basedir = self.basedir
            if '_original_file' in inject:
                basedir = os.path.dirname(inject['_original_file'])
                filesdir = os.path.join(basedir, '..', 'files')
                if os.path.exists(filesdir):
                    basedir = filesdir

            items_terms = self.module_vars.get('items_lookup_terms', '')
            items_terms = template.template(basedir, items_terms, inject)
            items = utils.plugins.lookup_loader.get(items_plugin, runner=self, basedir=basedir).run(items_terms, inject=inject)
            if type(items) != list:
                raise errors.AnsibleError("lookup plugins have to return a list: %r" % items)

            if len(items) and utils.is_list_of_strings(items) and self.module_name in [ 'apt', 'yum', 'pkgng' ]:
                # hack for apt, yum, and pkgng so that with_items maps back into a single module call
                use_these_items = []
                for x in items:
                    inject['item'] = x
                    if not self.conditional or utils.check_conditional(self.conditional, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars):
                        use_these_items.append(x)
                inject['item'] = ",".join(use_these_items)
                items = None

        # logic to replace complex args if possible
        complex_args = self.complex_args

        # logic to decide how to run things depends on whether with_items is used
        if items is None:
            if isinstance(complex_args, basestring):
                complex_args = template.template(self.basedir, complex_args, inject, convert_bare=True)
                complex_args = utils.safe_eval(complex_args)
                if type(complex_args) != dict:
                    raise errors.AnsibleError("args must be a dictionary, received %s" % complex_args)
            return self._executor_internal_inner(host, self.module_name, self.module_args, inject, port, complex_args=complex_args)
        elif len(items) > 0:

            # executing using with_items, so make multiple calls
            # TODO: refactor

            if self.background > 0:
                raise errors.AnsibleError("lookup plugins (with_*) cannot be used with async tasks")

            aggregrate = {}
            all_comm_ok = True
            all_changed = False
            all_failed = False
            results = []
            for x in items:
                inject['item'] = x

                # TODO: this idiom should be replaced with an up-conversion to a Jinja2 template evaluation
                if isinstance(self.complex_args, basestring):
                    complex_args = template.template(self.basedir, self.complex_args, inject, convert_bare=True)
                    complex_args = utils.safe_eval(complex_args)
                    if type(complex_args) != dict:
                        raise errors.AnsibleError("args must be a dictionary, received %s" % complex_args)
                result = self._executor_internal_inner(
                     host,
                     self.module_name,
                     self.module_args,
                     inject,
                     port,
                     complex_args=complex_args
                )
                results.append(result.result)
                if result.comm_ok == False:
                    all_comm_ok = False
                    all_failed = True
                    break
                for x in results:
                    if x.get('changed') == True:
                        all_changed = True
                    if (x.get('failed') == True) or ('failed_when_result' in x and [x['failed_when_result']] or [('rc' in x) and (x['rc'] != 0)])[0]:
                        all_failed = True
                        break
            msg = 'All items completed'
            if all_failed:
                msg = "One or more items failed."
            rd_result = dict(failed=all_failed, changed=all_changed, results=results, msg=msg)
            if not all_failed:
                del rd_result['failed']
            return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
        else:
            self.callbacks.on_skipped(host, None)
            return ReturnData(host=host, comm_ok=True, result=dict(changed=False, skipped=True))
Esempio n. 18
0
    def _execute_module(self, conn, tmp, module_name, args,
        async_jid=None, async_module=None, async_limit=None, inject=None, persist_files=False, complex_args=None):

        ''' runs a module that has already been transferred '''

        # hack to support fireball mode
        if module_name == 'fireball':
            args = "%s password=%s" % (args, base64.b64encode(str(utils.key_for_hostname(conn.host))))
            if 'port' not in args:
                args += " port=%s" % C.ZEROMQ_PORT

        (remote_module_path, module_style, shebang) = self._copy_module(conn, tmp, module_name, args, inject, complex_args)

        environment_string = self._compute_environment_string(inject)

        cmd_mod = ""
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd_chmod = "chmod a+r %s" % remote_module_path
            self._low_level_exec_command(conn, cmd_chmod, tmp, sudoable=False)

        cmd = ""
        if module_style != 'new':
            if 'CHECKMODE=True' in args:
                # if module isn't using AnsibleModuleCommon infrastructure we can't be certain it knows how to
                # do --check mode, so to be safe we will not run it.
                return ReturnData(conn=conn, result=dict(skippped=True, msg="cannot yet run check mode against old-style modules"))

            args = template.template(self.basedir, args, inject)

            # decide whether we need to transfer JSON or key=value
            argsfile = None
            if module_style == 'non_native_want_json':
                if complex_args:
                    complex_args.update(utils.parse_kv(args))
                    argsfile = self._transfer_str(conn, tmp, 'arguments', utils.jsonify(complex_args))
                else:
                    argsfile = self._transfer_str(conn, tmp, 'arguments', utils.jsonify(utils.parse_kv(args)))

            else:
                argsfile = self._transfer_str(conn, tmp, 'arguments', args)

            if self.sudo and self.sudo_user != 'root':
                # deal with possible umask issues once sudo'ed to other user
                cmd_args_chmod = "chmod a+r %s" % argsfile
                self._low_level_exec_command(conn, cmd_args_chmod, tmp, sudoable=False)

            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module]])

        if not shebang:
            raise errors.AnsibleError("module is missing interpreter line")

        cmd = " ".join([environment_string.strip(), shebang.replace("#!","").strip(), cmd])
        cmd = cmd.strip()

        if tmp.find("tmp") != -1 and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files:
            if not self.sudo or self.sudo_user == 'root':
                # not sudoing or sudoing to root, so can cleanup files in the same step
                cmd = cmd + "; rm -rf %s >/dev/null 2>&1" % tmp

        sudoable = True
        if module_name == "accelerate":
            # always run the accelerate module as the user
            # specified in the play, not the sudo_user
            sudoable = False

        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=sudoable)

        if self.sudo and self.sudo_user != 'root':
            # not sudoing to root, so maybe can't delete files as that other user
            # have to clean up temp files as original user in a second step
            if tmp.find("tmp") != -1 and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files:
                cmd2 = "rm -rf %s >/dev/null 2>&1" % tmp
                self._low_level_exec_command(conn, cmd2, tmp, sudoable=False)

        data = utils.parse_json(res['stdout'])
        if 'parsed' in data and data['parsed'] == False:
            data['msg'] += res['stderr']
        return ReturnData(conn=conn, result=data)
Esempio n. 19
0
class Runner(object):
    ''' core API interface to ansible '''

    # see bin/ansible for how this is used...

    def __init__(self,
        host_list=C.DEFAULT_HOST_LIST,      # ex: /etc/ansible/hosts, legacy usage
        module_path=C.DEFAULT_MODULE_PATH,  # ex: /usr/share/ansible
        module_name=C.DEFAULT_MODULE_NAME,  # ex: copy
        module_args=C.DEFAULT_MODULE_ARGS,  # ex: "src=/tmp/a dest=/tmp/b"
        forks=C.DEFAULT_FORKS,              # parallelism level
        timeout=C.DEFAULT_TIMEOUT,          # SSH timeout
        pattern=C.DEFAULT_PATTERN,          # which hosts?  ex: 'all', 'acme.example.org'
        remote_user=C.DEFAULT_REMOTE_USER,  # ex: 'username'
        remote_pass=C.DEFAULT_REMOTE_PASS,  # ex: 'password123' or None if using key
        remote_port=None,                   # if SSH on different ports
        private_key_file=C.DEFAULT_PRIVATE_KEY_FILE, # if not using keys/passwords
        sudo_pass=C.DEFAULT_SUDO_PASS,      # ex: 'password123' or None
        background=0,                       # async poll every X seconds, else 0 for non-async
        basedir=None,                       # directory of playbook, if applicable
        setup_cache=None,                   # used to share fact data w/ other tasks
        transport=C.DEFAULT_TRANSPORT,      # 'ssh', 'paramiko', 'local'
        conditional='True',                 # run only if this fact expression evals to true
        callbacks=None,                     # used for output
        sudo=False,                         # whether to run sudo or not
        sudo_user=C.DEFAULT_SUDO_USER,      # ex: 'root'
        module_vars=None,                   # a playbooks internals thing
        is_playbook=False,                  # running from playbook or not?
        inventory=None,                     # reference to Inventory object
        subset=None                         # subset pattern
        ):

        # storage & defaults
        self.setup_cache      = utils.default(setup_cache, lambda: collections.defaultdict(dict))
        self.basedir          = utils.default(basedir, lambda: os.getcwd())
        self.callbacks        = utils.default(callbacks, lambda: DefaultRunnerCallbacks())
        self.generated_jid    = str(random.randint(0, 999999999999))
        self.transport        = transport
        self.inventory        = utils.default(inventory, lambda: ansible.inventory.Inventory(host_list))

        self.module_vars      = utils.default(module_vars, lambda: {})
        self.sudo_user        = sudo_user
        self.connector        = connection.Connection(self)
        self.conditional      = conditional
        self.module_path      = module_path
        self.module_name      = module_name
        self.forks            = int(forks)
        self.pattern          = pattern
        self.module_args      = module_args
        self.timeout          = timeout
        self.remote_user      = remote_user
        self.remote_pass      = remote_pass
        self.remote_port      = remote_port
        self.private_key_file = private_key_file
        self.background       = background
        self.sudo             = sudo
        self.sudo_pass        = sudo_pass
        self.is_playbook      = is_playbook

        # misc housekeeping
        if subset and self.inventory._subset is None:
            # don't override subset when passed from playbook
            self.inventory.subset(subset)

        if self.transport == 'ssh' and remote_pass:
            raise errors.AnsibleError("SSH transport does not support passwords, only keys or agents")
        if self.transport == 'local':
            self.remote_user = pwd.getpwuid(os.geteuid())[0]

        # ensure we are using unique tmp paths
        random.seed()

        # instantiate plugin classes
        self.action_plugins = {}
        self.lookup_plugins = {}
        for (k,v) in action_plugin_list.iteritems():
            self.action_plugins[k] = v.ActionModule(self)
        for (k,v) in lookup_plugin_list.iteritems():
            self.lookup_plugins[k] = v.LookupModule(self)

    # *****************************************************

    def _delete_remote_files(self, conn, files):
        ''' deletes one or more remote files '''

        if os.getenv("ANSIBLE_KEEP_REMOTE_FILES","0") == "1":
            # ability to turn off temp file deletion for debug purposes
            return

        if type(files) in [ str, unicode ]:
            files = [ files ]
        for filename in files:
            if filename.find('/tmp/') == -1:
                raise Exception("safeguard deletion, removal of %s is not going to happen" % filename)
            self._low_level_exec_command(conn, "rm -rf %s" % filename, None)

    # *****************************************************

    def _transfer_str(self, conn, tmp, name, data):
        ''' transfer string to remote file '''

        if type(data) == dict:
            data = utils.jsonify(data)

        afd, afile = tempfile.mkstemp()
        afo = os.fdopen(afd, 'w')
        try:
            afo.write(data.encode('utf8'))
        except:
            raise errors.AnsibleError("failure encoding into utf-8")
        afo.flush()
        afo.close()

        remote = os.path.join(tmp, name)
        try:
            conn.put_file(afile, remote)
        finally:
            os.unlink(afile)
        return remote

    # *****************************************************

    def _execute_module(self, conn, tmp, module_name, args,
        async_jid=None, async_module=None, async_limit=None, inject=None):

        ''' runs a module that has already been transferred '''

        # hack to support fireball mode
        if module_name == 'fireball':
            args = "%s password=%s" % (args, base64.b64encode(str(utils.key_for_hostname(conn.host))))
            if 'port' not in args:
                args += " port=%s" % C.ZEROMQ_PORT

        (remote_module_path, is_new_style) = self._copy_module(conn, tmp, module_name, args, inject)
        cmd = "chmod u+x %s" % remote_module_path
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd = "chmod a+rx %s" % remote_module_path
        self._low_level_exec_command(conn, cmd, tmp)

        cmd = ""
        if not is_new_style:
            args = utils.template(self.basedir, args, inject)
            argsfile = self._transfer_str(conn, tmp, 'arguments', args)
            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module]])

        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
        return ReturnData(conn=conn, result=res)

    # *****************************************************

    def _executor(self, host):
        ''' handler for multiprocessing library '''

        try:
            exec_rc = self._executor_internal(host)
            if type(exec_rc) != ReturnData:
                raise Exception("unexpected return type: %s" % type(exec_rc))
            # redundant, right?
            if not exec_rc.comm_ok:
                self.callbacks.on_unreachable(host, exec_rc.result)
            return exec_rc
        except errors.AnsibleError, ae:
            msg = str(ae)
            self.callbacks.on_unreachable(host, msg)
            return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
        except Exception:
            msg = traceback.format_exc()
            self.callbacks.on_unreachable(host, msg)
            return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
Esempio n. 20
0
    def _executor_internal_inner(self,
                                 host,
                                 module_name,
                                 module_args,
                                 inject,
                                 port,
                                 is_chained=False):
        ''' decides how to invoke a module '''

        # special non-user/non-fact variables:
        # 'groups' variable is a list of host name in each group
        # 'hostvars' variable contains variables for each host name
        #  ... and is set elsewhere
        # 'inventory_hostname' is also set elsewhere
        inject['groups'] = self.inventory.groups_list()

        # allow module args to work as a dictionary
        # though it is usually a string
        new_args = ""
        if type(module_args) == dict:
            for (k, v) in module_args.iteritems():
                new_args = new_args + "%s='%s' " % (k, v)
            module_args = new_args

        conditional = utils.template(self.basedir, self.conditional, inject)
        if not utils.check_conditional(conditional):
            result = utils.jsonify(dict(skipped=True))
            self.callbacks.on_skipped(host, inject.get('item', None))
            return ReturnData(host=host, result=result)

        conn = None
        actual_host = inject.get('ansible_ssh_host', host)
        actual_port = port
        if self.transport in ['paramiko', 'ssh']:
            actual_port = inject.get('ansible_ssh_port', port)

        # the delegated host may have different SSH port configured, etc
        # and we need to transfer those, and only those, variables
        delegate_to = inject.get('delegate_to', None)
        if delegate_to is not None:
            delegate_to = utils.template(self.basedir, delegate_to, inject)
            inject = inject.copy()
            interpreters = []
            for i in inject:
                if i.startswith("ansible_") and i.endswith("_interpreter"):
                    interpreters.append(i)
            for i in interpreters:
                del inject[i]
            port = C.DEFAULT_REMOTE_PORT
            try:
                delegate_info = inject['hostvars'][delegate_to]
                actual_host = delegate_info.get('ansible_ssh_host',
                                                delegate_to)
                actual_port = delegate_info.get('ansible_ssh_port', port)
                for i in delegate_info:
                    if i.startswith("ansible_") and i.endswith("_interpreter"):
                        inject[i] = delegate_info[i]
            except errors.AnsibleError:
                actual_host = delegate_to
                actual_port = port

        try:
            if actual_port is not None:
                actual_port = int(actual_port)
            conn = self.connector.connect(actual_host, actual_port)
            if delegate_to or host != actual_host:
                conn.delegate = host

        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)
Esempio n. 21
0
    def _executor_internal(self, host):
        ''' executes any module one or more times '''

        host_variables = self.inventory.get_variables(host)
        if self.transport in [ 'paramiko', 'ssh' ]:
            port = host_variables.get('ansible_ssh_port', self.remote_port)
            if port is None:
                port = C.DEFAULT_REMOTE_PORT 
        else:
            # fireball, local, etc
            port = self.remote_port

        inject = {}
        inject.update(host_variables)
        inject.update(self.module_vars)
        inject.update(self.setup_cache[host])
        inject['hostvars'] = HostVars(self.setup_cache, self.inventory)
        inject['group_names'] = host_variables.get('group_names', [])
        inject['groups'] = self.inventory.groups_list()

        # allow with_items to work in playbooks...
        # apt and yum are converted into a single call, others run in a loop
        items = self.module_vars.get('items', [])
        if isinstance(items, basestring) and items.startswith("$"):
            items = utils.varReplaceWithItems(self.basedir, items, inject)

        # if we instead said 'with_foo' and there is a lookup module named foo...
        items_plugin = self.module_vars.get('items_lookup_plugin', None)
        if items_plugin is not None:
            items_terms = self.module_vars.get('items_lookup_terms', '')
            if items_plugin in self.lookup_plugins:
                items_terms = utils.template(self.basedir, items_terms, inject)
                items = self.lookup_plugins[items_plugin].run(items_terms)

        if type(items) != list:
            raise errors.AnsibleError("with_items only takes a list: %s" % items)

        if len(items) and self.module_name in [ 'apt', 'yum' ]:
            # hack for apt and soon yum, with_items maps back into a single module call
            inject['item'] = ",".join(items)
            items = []

        # logic to decide how to run things depends on whether with_items is used

        if len(items) == 0:
            return self._executor_internal_inner(host, self.module_name, self.module_args, inject, port)
        else:
            # executing using with_items, so make multiple calls
            # TODO: refactor
            aggregrate = {}
            all_comm_ok = True
            all_changed = False
            all_failed = False
            results = []
            for x in items:
                inject['item'] = x
                result = self._executor_internal_inner(host, self.module_name, self.module_args, inject, port)
                results.append(result.result)
                if result.comm_ok == False:
                    all_comm_ok = False
                    all_failed = True
                    break
                for x in results:
                    if x.get('changed') == True:
                        all_changed = True
                    if (x.get('failed') == True) or (('rc' in x) and (x['rc'] != 0)):
                        all_failed = True
                        break
            msg = 'All items completed'
            if all_failed:
                msg = "One or more items failed."
            rd_result = dict(failed=all_failed, changed=all_changed, results=results, msg=msg)
            if not all_failed:
                del rd_result['failed']
            return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
                    all_comm_ok = False
                    all_failed = True
                    break
                for x in results:
                    if x.get('changed') == True:
                        all_changed = True
                    if (x.get('failed') == True) or (('rc' in x) and (x['rc'] != 0)):
                        all_failed = True
                        break
            msg = 'All items completed'
            if all_failed:
                msg = "One or more items failed."
            rd_result = dict(failed=all_failed, changed=all_changed, results=results, msg=msg)
            if not all_failed:
                del rd_result['failed']
            return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
        else:
            self.callbacks.on_skipped(host, None)
            return ReturnData(host=host, comm_ok=True, result=dict(skipped=True))

    # *****************************************************

    def _executor_internal_inner(self, host, module_name, module_args, inject, port, is_chained=False, complex_args=None):
        ''' decides how to invoke a module '''


        # allow module args to work as a dictionary
        # though it is usually a string
        new_args = ""
        if type(module_args) == dict:
            for (k,v) in module_args.iteritems():