Esempio n. 1
0
    def __init__(self, play, ds, module_vars=None):
        ''' constructor loads from a task or handler datastructure '''

        # TODO: more error handling
        # include task specific vars

        self.module_vars = module_vars

        self.play        = play
        self.name        = ds.get('name', None)
        self.action      = ds.get('action', '')
        self.tags        = [ 'all' ]

        self.notified_by = []

        if self.name is None:
            self.name = self.action

        self.only_if = ds.get('only_if', 'True')
        self.async_seconds = int(ds.get('async', 0))  # not async by default
        self.async_poll_interval = int(ds.get('poll', 10))  # default poll = 10 seconds
        self.notify = ds.get('notify', [])
        if isinstance(self.notify, basestring):
            self.notify = [ self.notify ]

        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError("invalid/missing action in task")

        self.module_name = tokens[0]
        self.module_args = ''
        if len(tokens) > 1:
            self.module_args = tokens[1]

        import_tags = []
        if 'tags' in self.module_vars:
            import_tags = self.module_vars['tags'].split(",")

        self.name = utils.template(self.name, self.module_vars)
        self.action = utils.template(self.name, self.module_vars)


        if 'first_available_file' in ds:
            self.module_vars['first_available_file'] = ds.get('first_available_file')

        # tags allow certain parts of a playbook to be run without
        # running the whole playbook
        apply_tags = ds.get('tags', None)
        if apply_tags is not None:
            if type(apply_tags) in [ str, unicode ]:
                self.tags.append(apply_tags)
            elif type(apply_tags) == list:
                self.tags.extend(apply_tags)
        self.tags.extend(import_tags)
Esempio n. 2
0
    def run(self, conn, tmp, module_name, module_args, inject):
        ''' handler for fetch operations '''

        # load up options
        options = utils.parse_kv(module_args)
        source = options.get('src', None)
        dest = options.get('dest', None)
        if source is None or dest is None:
            results = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, result=results)

        # apply templating to source argument
        source = utils.template(self.runner.basedir, source, inject)
        # apply templating to dest argument
        dest = utils.template(self.runner.basedir, dest, inject)

        # files are saved in dest dir, with a subdir for each host, then the filename
        dest   = "%s/%s/%s" % (utils.path_dwim(self.runner.basedir, dest), conn.host, source)
        dest   = dest.replace("//","/")

        # calculate md5 sum for the remote file
        remote_md5 = self.runner._remote_md5(conn, tmp, source)

        # these don't fail because you may want to transfer a log file that possibly MAY exist
        # but keep going to fetch other log files
        if remote_md5 == '0':
            result = dict(msg="unable to calculate the md5 sum of the remote file", file=source, changed=False)
            return ReturnData(conn=conn, result=result)
        if remote_md5 == '1':
            result = dict(msg="the remote file does not exist, not transferring, ignored", file=source, changed=False)
            return ReturnData(conn=conn, result=result)
        if remote_md5 == '2':
            result = dict(msg="no read permission on remote file, not transferring, ignored", file=source, changed=False)
            return ReturnData(conn=conn, result=result)

        # calculate md5 sum for the local file
        local_md5 = utils.md5(dest)

        if remote_md5 != local_md5:
            # create the containing directories, if needed
            if not os.path.isdir(os.path.dirname(dest)):
                os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            conn.fetch_file(source, dest)
            new_md5 = utils.md5(dest)
            if new_md5 != remote_md5:
                result = dict(failed=True, md5sum=new_md5, msg="md5 mismatch", file=source)
                return ReturnData(conn=conn, result=result)
            result = dict(changed=True, md5sum=new_md5)
            return ReturnData(conn=conn, result=result)
        else:
            result = dict(changed=False, md5sum=local_md5, file=source)
            return ReturnData(conn=conn, result=result)
Esempio n. 3
0
    def _parse_playbook(self, playbook):
        ''' load YAML file, including handling for imported files '''
        
        dirname  = os.path.dirname(playbook)
        playbook = utils.parse_yaml_from_file(playbook)

        for play in playbook:
            tasks = play.get('tasks',[])
            handlers = play.get('handlers', [])

            # process tasks in this file as well as imported tasks
            new_tasks = []
            for task in tasks:
                if 'include' in task:
                    self._include_tasks(play, task, dirname, new_tasks)
                else:
                    new_tasks.append(task)

            # now new_tasks contains a list of tasks, but tasks may contain
            # lists of with_items to loop over.  Do that.
            # TODO: refactor into subfunction
            new_tasks2 = []
            for task in new_tasks:
                if 'with_items' in task:
                    for item in task['with_items']:
                        produced_task = {}
                        name    = task.get('name', task.get('action', 'unnamed task'))
                        action  = task.get('action', None)
                        only_if = task.get('only_if', None)
                        if action is None:
                            raise errors.AnsibleError('action is required')
                        produced_task = task.copy()
                        produced_task['action'] = utils.template(action, dict(item=item))
                        produced_task['name'] = utils.template(name, dict(item=item))
                        if only_if:
                            produced_task['only_if'] = utils.template(only_if, dict(item=item))
                        new_tasks2.append(produced_task)
                else:
                    new_tasks2.append(task)

            play['tasks'] = new_tasks2

            # process handlers as well as imported handlers
            new_handlers = [] 
            for handler in handlers:
                if 'include' in handler:
                    self._include_handlers(play, handler, dirname, new_handlers)
                else:
                    new_handlers.append(handler)
            play['handlers'] = new_handlers

        return playbook
Esempio n. 4
0
    def run(self, conn, tmp, module_name, module_args, inject):
        ''' handler for file transfer operations '''

        # load up options
        options = utils.parse_kv(module_args)
        source  = options.get('src', None)
        dest    = options.get('dest', None)
        if (source is None and not 'first_available_file' in inject) or dest is None:
            result=dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, result=result)

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if 'first_available_file' in inject:
            found = False
            for fn in inject.get('first_available_file'):
                fn = utils.template(self.runner.basedir, fn, inject)
                if os.path.exists(fn):
                    source = fn
                    found = True
                    break
            if not found:
                results=dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(conn=conn, results=results)

        source = utils.template(self.runner.basedir, source, inject)
        source = utils.path_dwim(self.runner.basedir, source)

        local_md5 = utils.md5(source)
        if local_md5 is None:
            result=dict(failed=True, msg="could not find src=%s" % source)
            return ReturnData(conn=conn, result=result)

        remote_md5 = self.runner._remote_md5(conn, tmp, dest)

        exec_rc = None
        if local_md5 != remote_md5:
            # transfer the file to a remote tmp location
            tmp_src = tmp + os.path.basename(source)
            conn.put_file(source, tmp_src)
            # fix file permissions when the copy is done as a different user
            if self.runner.sudo and self.runner.sudo_user != 'root':
                self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp)

            # run the copy module
            module_args = "%s src=%s" % (module_args, tmp_src)
            return self.runner._execute_module(conn, tmp, 'copy', module_args, inject=inject).daisychain('file', module_args)

        else:
            # no need to transfer the file, already correct md5
            result = dict(changed=False, md5sum=remote_md5, transferred=False)
            return ReturnData(conn=conn, result=result).daisychain('file', module_args)
Esempio n. 5
0
File: play.py Progetto: Minione/iwct
    def __init__(self, playbook, ds, basedir):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter in an Ansible Playbook" % x)

        # TODO: more error handling

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            hosts = ';'.join(hosts)
        self._ds          = ds
        self.playbook     = playbook
        self.basedir      = basedir
        self.vars         = ds.get('vars', {})
        self.vars_files   = ds.get('vars_files', [])
        self.vars_prompt  = ds.get('vars_prompt', {})
        self.vars         = self._get_vars()
        self.hosts        = utils.template(basedir, hosts, self.vars)
        self.name         = ds.get('name', self.hosts)
        self._tasks       = ds.get('tasks', [])
        self._handlers    = ds.get('handlers', [])
        self.remote_user  = utils.template(basedir, ds.get('user', self.playbook.remote_user), self.vars)
        self.remote_port  = ds.get('port', self.playbook.remote_port)
        self.sudo         = ds.get('sudo', self.playbook.sudo)
        self.sudo_user    = utils.template(basedir, ds.get('sudo_user', self.playbook.sudo_user), self.vars)
        self.transport    = ds.get('connection', self.playbook.transport)
        self.tags         = ds.get('tags', None)
        self.gather_facts = ds.get('gather_facts', None)
        self.serial       = int(utils.template_ds(basedir, ds.get('serial', 0), self.vars))

        if isinstance(self.remote_port, basestring):
            self.remote_port = utils.template(basedir, self.remote_port, self.vars)

        self._update_vars_files_for_host(None)

        self._tasks      = self._load_tasks(self._ds.get('tasks', []))
        self._handlers   = self._load_tasks(self._ds.get('handlers', []))

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [ str, unicode ]:
            self.tags = [ self.tags ]
        elif type(self.tags) != list:
            self.tags = []

        if self.sudo_user != 'root':
            self.sudo = True
Esempio n. 6
0
    def _execute_template(self, conn, host, tmp):
        ''' handler for template operations '''

        # load up options
        options  = utils.parse_kv(self.module_args)
        source   = options.get('src', None)
        dest     = options.get('dest', None)
        metadata = options.get('metadata', None)
        if source is None or dest is None:
            return (host, True, dict(failed=True, msg="src and dest are required"), '')

        # apply templating to source argument so vars can be used in the path
        inject = self.setup_cache.get(conn.host,{})
        source = utils.template(source, inject, self.setup_cache)

        (host, ok, data, err) = (None, None, None, None)

        if not self.is_playbook:

            # not running from a playbook so we have to fetch the remote
            # setup file contents before proceeding...
            if metadata is None:
                if self.remote_user == 'root':
                    metadata = '/etc/ansible/setup'
                else:
                    # path is expanded on remote side
                    metadata = "~/.ansible/setup"
            
            # install the template module
            slurp_module = self._transfer_module(conn, tmp, 'slurp')

            # run the slurp module to get the metadata file
            args = "src=%s" % metadata
            (result1, err, executed) = self._execute_module(conn, tmp, slurp_module, args)
            result1 = utils.json_loads(result1)
            if not 'content' in result1 or result1.get('encoding','base64') != 'base64':
                result1['failed'] = True
                return self._return_from_module(conn, host, result1, err, executed)
            content = base64.b64decode(result1['content'])
            inject = utils.json_loads(content)

        # install the template module
        copy_module = self._transfer_module(conn, tmp, 'copy')

        # template the source data locally
        source_data = file(utils.path_dwim(self.basedir, source)).read()
        resultant = ''            
        try:
            resultant = utils.template(source_data, inject, self.setup_cache)
        except Exception, e:
            return (host, False, dict(failed=True, msg=str(e)), '')
Esempio n. 7
0
    def _execute_copy(self, conn, tmp):
        ''' handler for file transfer operations '''

        # load up options
        options = utils.parse_kv(self.module_args)
        source = options.get('src', None)
        dest   = options.get('dest', None)
        if (source is None and not 'first_available_file' in self.module_vars) or dest is None:
            result=dict(failed=True, msg="src and dest are required")
            return ReturnData(host=conn.host, result=result)

        # apply templating to source argument
        inject = self.setup_cache.get(conn.host,{})
        
        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if 'first_available_file' in self.module_vars:
            found = False
            for fn in self.module_vars.get('first_available_file'):
                fn = utils.template(fn, inject, self.setup_cache)
                if os.path.exists(fn):
                    source = fn
                    found = True
                    break
            if not found:
                results=dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(host=conn.host, is_error=True, results=results)
        
        if self.module_vars is not None:
            inject.update(self.module_vars)

        source = utils.template(source, inject, self.setup_cache)

        # transfer the file to a remote tmp location
        tmp_src = tmp + source.split('/')[-1]
        conn.put_file(utils.path_dwim(self.basedir, source), tmp_src)

        # install the copy  module
        self.module_name = 'copy'
        module = self._transfer_module(conn, tmp, 'copy')

        # run the copy module
        args = "src=%s dest=%s" % (tmp_src, dest)
        exec_rc = self._execute_module(conn, tmp, module, args)

        if exec_rc.is_successful():
            return self._chain_file_module(conn, tmp, exec_rc, options)
        else:
            return exec_rc
Esempio n. 8
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            if 'include' in x:
                task_vars = self.vars.copy()
                tokens = shlex.split(x['include'])
                if 'with_items' in x:
                    items = utils.varReplaceWithItems(self.basedir, x['with_items'], task_vars)
                else:
                    items = ['']
                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k,v) = t.split("=", 1)
                        mv[k] = utils.varReplaceWithItems(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file))
                    for y in data:
                         results.append(Task(self,y,module_vars=mv.copy()))
            elif type(x) == dict:
                task_vars = self.vars.copy()
                results.append(Task(self,x,module_vars=task_vars))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Esempio n. 9
0
    def _execute_module(self, conn, tmp, remote_module_path, args, async_jid=None, async_module=None, async_limit=None):
        """ runs a module that has already been transferred """

        inject = self.setup_cache.get(conn.host, {}).copy()
        host_variables = self.inventory.get_variables(conn.host)
        inject.update(host_variables)
        inject.update(self.module_vars)

        conditional = utils.double_template(self.conditional, inject, self.setup_cache)
        if not eval(conditional):
            return [utils.smjson(dict(skipped=True)), None, "skipped"]

        if self.module_name == "setup":
            if not args:
                args = {}
            args = self._add_setup_vars(inject, args)
            args = self._add_setup_metadata(args)

        if type(args) == dict:
            args = utils.bigjson(args)
        args = utils.template(args, inject, self.setup_cache)

        module_name_tail = remote_module_path.split("/")[-1]

        argsfile = self._transfer_str(conn, tmp, "arguments", args)
        if async_jid is None:
            cmd = "%s %s" % (remote_module_path, argsfile)
        else:
            cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])

        res, err = self._exec_command(conn, cmd, tmp, sudoable=True)
        client_executed_str = "%s %s" % (module_name_tail, args.strip())
        return (res, err, client_executed_str)
Esempio n. 10
0
    def run(self, conn, tmp, module_name, module_args, inject):
        ''' handler for file transfer operations '''

        tokens  = shlex.split(module_args)
        source  = tokens[0]
        # FIXME: error handling
        args    = " ".join(tokens[1:])
        source  = utils.template(self.runner.basedir, source, inject)
        source  = utils.path_dwim(self.runner.basedir, source)

        # transfer the file to a remote tmp location
        source  = source.replace('\x00','') # why does this happen here?
        args    = args.replace('\x00','') # why does this happen here?
        tmp_src = os.path.join(tmp, os.path.basename(source))
        tmp_src = tmp_src.replace('\x00', '') 

        conn.put_file(source, tmp_src)

        # fix file permissions when the copy is done as a different user
        if self.runner.sudo and self.runner.sudo_user != 'root':
            prepcmd = 'chmod a+rx %s' % tmp_src
        else:
            prepcmd = 'chmod +x %s' % tmp_src

        # add preparation steps to one ssh roundtrip executing the script
        module_args = prepcmd + '; ' + tmp_src + ' ' + args

        handler = utils.plugins.action_loader.get('raw', self.runner)
        result = handler.run(conn, tmp, 'raw', module_args, inject)

        # clean up after
        if tmp.find("tmp") != -1 and C.DEFAULT_KEEP_REMOTE_FILES != '1':
            self.runner._low_level_exec_command(conn, 'rm -rf %s >/dev/null 2>&1' % tmp, tmp)

        return result
Esempio n. 11
0
    def _run_task(self, play, task, is_handler):
        ''' run a single task in the playbook and recursively run any subtasks.  '''

        self.callbacks.on_task_start(task.name, is_handler)

        # load up an appropriate ansible runner to run the task in parallel
        results = self._run_task_internal(task)

        # if no hosts are matched, carry on
        hosts_remaining = True
        if results is None:
            hosts_remaining = False
            results = {}
 
        contacted = results.get('contacted', {})
        self.stats.compute(results, ignore_errors=task.ignore_errors)

        # add facts to the global setup cache
        for host, result in contacted.iteritems():
            facts = result.get('ansible_facts', {})
            self.SETUP_CACHE[host].update(facts)
            if task.register:
                if 'stdout' in result:
                    result['stdout_lines'] = result['stdout'].splitlines()
                self.SETUP_CACHE[host][task.register] = result

        # flag which notify handlers need to be run
        if len(task.notify) > 0:
            for host, results in results.get('contacted',{}).iteritems():
                if results.get('changed', False):
                    for handler_name in task.notify:
                        self._flag_handler(play.handlers(), utils.template(play.basedir, handler_name, task.module_vars), host)

        return hosts_remaining
Esempio n. 12
0
    def _execute_module(self, conn, tmp, remote_module_path, args, 
        async_jid=None, async_module=None, async_limit=None):
        ''' runs a module that has already been transferred '''

        inject = self.setup_cache.get(conn.host,{})
        conditional = utils.double_template(self.conditional, inject)
        if not eval(conditional):
            return [ utils.smjson(dict(skipped=True)), None, 'skipped' ]

        if Runner._external_variable_script is not None:
            self._add_variables_from_script(conn, inject)
        if self.module_name == 'setup':
            args = self._add_setup_vars(inject, args)
            args = self._add_setup_metadata(args)

        if type(args) == dict:
           args = utils.bigjson(args)
        args = utils.template(args, inject)

        module_name_tail = remote_module_path.split("/")[-1]

        argsfile = self._transfer_str(conn, tmp, 'arguments', args)
        if async_jid is None:
            cmd = "%s %s" % (remote_module_path, argsfile)
        else:
            cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])

        res, err = self._exec_command(conn, cmd, tmp, sudoable=True)
        client_executed_str = "%s %s" % (module_name_tail, args.strip())
        return ( res, err, client_executed_str )
Esempio n. 13
0
    def _execute_copy(self, conn, host, tmp):
        ''' handler for file transfer operations '''

        # load up options
        options = utils.parse_kv(self.module_args)
        source = options.get('src', None)
        dest   = options.get('dest', None)
        if source is None or dest is None:
            return (host, True, dict(failed=True, msg="src and dest are required"), '')

        # apply templating to source argument
        inject = self.setup_cache.get(conn.host,{})
        source = utils.template(source, inject, self.setup_cache)

        # transfer the file to a remote tmp location
        tmp_src = tmp + source.split('/')[-1]
        conn.put_file(utils.path_dwim(self.basedir, source), tmp_src)

        # install the copy  module
        self.module_name = 'copy'
        module = self._transfer_module(conn, tmp, 'copy')

        # run the copy module
        args = "src=%s dest=%s" % (tmp_src, dest)
        (result1, err, executed) = self._execute_module(conn, tmp, module, args)
        (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed)

        if ok:
            return self._chain_file_module(conn, tmp, data, err, options, executed)
        else:
            return (host, ok, data, err) 
Esempio n. 14
0
    def _run_task(self, play, task, is_handler):
        ''' run a single task in the playbook and recursively run any subtasks.  '''

        self.callbacks.on_task_start(task.name, is_handler)

        # load up an appropriate ansible runner to run the task in parallel
        results = self._run_task_internal(task)

        # add facts to the global setup cache
        for host, result in results['contacted'].iteritems():
            if "ansible_facts" in result:
                for k,v in result['ansible_facts'].iteritems():
                    self.SETUP_CACHE[host][k]=v

        self.stats.compute(results)

        # if no hosts are matched, carry on
        if results is None:
            results = {}
 
        # flag which notify handlers need to be run
        if len(task.notify) > 0:
            for host, results in results.get('contacted',{}).iteritems():
                if results.get('changed', False):
                    for handler_name in task.notify:
                        self._flag_handler(play.handlers(), utils.template(handler_name, task.module_vars), host)
Esempio n. 15
0
    def _execute_module(self, conn, tmp, remote_module_path, args, 
        async_jid=None, async_module=None, async_limit=None):

        ''' runs a module that has already been transferred '''

        inject = self.setup_cache.get(conn.host,{}).copy()
        host_variables = self.inventory.get_variables(conn.host)
        inject.update(host_variables)
        inject.update(self.module_vars)

        if self.module_name == 'setup':
            if not args:
                args = {}
            args = self._add_setup_vars(inject, args)
            args = self._add_setup_metadata(args)

        if type(args) == dict:
            args = utils.bigjson(args)
        args = utils.template(args, inject, self.setup_cache)

        module_name_tail = remote_module_path.split("/")[-1]

        argsfile = self._transfer_str(conn, tmp, 'arguments', args)
        if async_jid is None:
            cmd = "%s %s" % (remote_module_path, argsfile)
        else:
            cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])

        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)

        executed_str = "%s %s" % (module_name_tail, args.strip())

        return ReturnData(host=conn.host, result=res, executed_str=executed_str)
Esempio n. 16
0
    def _execute_module(self, conn, tmp, remote_module_path, args, async_jid=None, async_module=None, async_limit=None):
        """ runs a module that has already been transferred """

        args = self._coerce_args_to_string(args, remote_module_path)
        inject = self.setup_cache.get(conn.host, {})
        conditional = utils.double_template(self.conditional, inject)
        if not eval(conditional):
            return [utils.smjson(dict(skipped=True)), "skipped"]

        if Runner._external_variable_script is not None:
            self._add_variables_from_script(conn, inject)
        if self.module_name == "setup":
            args = self._add_setup_vars(inject, args)
            args = self._add_setup_metadata(args)

        args = utils.template(args, inject)
        module_name_tail = remote_module_path.split("/")[-1]
        client_executed_str = "%s %s" % (module_name_tail, args.strip())

        argsfile = self._transfer_argsfile(conn, tmp, args)
        if async_jid is None:
            cmd = "%s %s" % (remote_module_path, argsfile)
        else:
            cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
        return [self._exec_command(conn, cmd), client_executed_str]
Esempio n. 17
0
    def __init__(self, playbook, ds):
        ''' constructor loads from a play datastructure '''

        # TODO: more error handling


        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            hosts = ';'.join(hosts)
        hosts = utils.template(hosts, playbook.extra_vars, {})

        self._ds         = ds
        self.playbook    = playbook
        self.hosts       = hosts 
        self.name        = ds.get('name', self.hosts)
        self.vars        = ds.get('vars', {})
        self.vars_files  = ds.get('vars_files', [])
        self.vars_prompt = ds.get('vars_prompt', {})
        self.vars        = self._get_vars(self.playbook.basedir)
        self._tasks      = ds.get('tasks', [])
        self._handlers   = ds.get('handlers', [])
        self.remote_user = ds.get('user', self.playbook.remote_user)
        self.remote_port = ds.get('port', self.playbook.remote_port)
        self.sudo        = ds.get('sudo', self.playbook.sudo)
        self.sudo_user   = ds.get('sudo_user', self.playbook.sudo_user)
        self.transport   = ds.get('connection', self.playbook.transport)
        self._tasks      = self._load_tasks(self._ds, 'tasks')
        self._handlers   = self._load_tasks(self._ds, 'handlers')

        if self.sudo_user != 'root':
            self.sudo = True
Esempio n. 18
0
    def _execute_module(self, conn, tmp, module_name, args,
        async_jid=None, async_module=None, async_limit=None, inject=None):

        ''' runs a module that has already been transferred '''

        # hack to support fireball mode
        if module_name == 'fireball':
            args = "%s password=%s port=%s" % (args, base64.b64encode(str(utils.key_for_hostname(conn.host))), C.ZEROMQ_PORT)

        (remote_module_path, is_new_style) = self._copy_module(conn, tmp, module_name, args, inject)
        cmd = "chmod u+x %s" % remote_module_path
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd = "chmod a+rx %s" % remote_module_path
        self._low_level_exec_command(conn, cmd, tmp)

        cmd = ""
        if not is_new_style:
            args = utils.template(self.basedir, args, inject)
            argsfile = self._transfer_str(conn, tmp, 'arguments', args)
            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module]])

        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
        return ReturnData(conn=conn, result=res)
Esempio n. 19
0
    def run(
        self,
        conn,
        tmp,
        module_name,
        inject,
        ):
        ''' generates params and passes them on to the rsync module '''

        options = utils.parse_kv(self.runner.module_args)
        source = utils.template(options.get('src', None), inject)
        dest = utils.template(options.get('dest', None), inject)
        try:
            options['rsync_path'] = inject['ansible_rsync_path']
        except KeyError:
            pass
        if not self.runner.transport == 'local':
            options['private_key'] = self.runner.private_key_file
            options['tmp_dir'] = tmp
            try:
                delegate = inject['delegate_to']
            except KeyError:
                pass
            if not delegate:
                delegate = 'localhost'
            inv_hostname = inject['inventory_hostname']
            if options.get('mode', 'push') == 'pull':
                (delegate, inv_hostname) = (inv_hostname, delegate)
            source = self._process_origin(delegate, source)
            dest = self._process_origin(inv_hostname, dest)
        else:
            source = self.rsync_path(source)
            dest = self.rsync_path(dest)

        options['src'] = source
        options['dest'] = dest
        try:
            del options['mode']
        except KeyError:
            pass

        # run the rsync module

        self.runner.module_args = ' '.join(['%s=%s' % (k, v) for (k,
                v) in options.items()])
        return self.runner._execute_module(conn, tmp, 'rsync',
                self.runner.module_args, inject=inject)
Esempio n. 20
0
    def _execute_fetch(self, conn, tmp):
        ''' handler for fetch operations '''

        # load up options
        options = utils.parse_kv(self.module_args)
        source = options.get('src', None)
        dest = options.get('dest', None)
        if source is None or dest is None:
            results = dict(failed=True, msg="src and dest are required")
            return ReturnData(host=conn.host, result=results)

        # apply templating to source argument
        inject = self.setup_cache.get(conn.host,{})
        if self.module_vars is not None:
            inject.update(self.module_vars)
        source = utils.template(source, inject, self.setup_cache)

        # apply templating to dest argument
        dest = utils.template(dest, inject, self.setup_cache)
       
        # files are saved in dest dir, with a subdir for each host, then the filename
        dest   = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), conn.host, source)
        dest   = dest.replace("//","/")

        # compare old and new md5 for support of change hooks
        local_md5 = utils.md5(dest)
        remote_md5 = self._remote_md5(conn, tmp, source)

        if remote_md5 == '0':
            result = dict(msg="missing remote file", changed=False)
            return ReturnData(host=conn.host, result=result)
        elif remote_md5 != local_md5:
            # create the containing directories, if needed
            if not os.path.isdir(os.path.dirname(dest)):
                os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            conn.fetch_file(source, dest)
            new_md5 = utils.md5(dest)
            if new_md5 != remote_md5:
                result = dict(failed=True, msg="md5 mismatch", md5sum=new_md5)
                return ReturnData(host=conn.host, result=result)
            result = dict(changed=True, md5sum=new_md5)
            return ReturnData(host=conn.host, result=result)
        else:
            result = dict(changed=False, md5sum=local_md5)
            return ReturnData(host=conn.host, result=result)
Esempio n. 21
0
    def _do_conditional_imports(self, vars_files, pattern=None):
        ''' handle the vars_files section, which can contain variables '''

        # FIXME: save parsed variable results in memory to avoid excessive re-reading/parsing
        # FIXME: currently parses imports for hosts not in the pattern, that is not wrong, but it's 
        #        not super optimized yet either, because we wouldn't have hit them, ergo
        #        it will raise false errors if there is no defaults variable file without any $vars
        #        in it, which could happen on uncontacted hosts.

        if type(vars_files) != list:
            raise errors.AnsibleError("vars_files must be a list")

        host_list = [ h for h in self.inventory.list_hosts(pattern)
                        if not (h in self.stats.failures or h in self.stats.dark) ]

        for host in host_list:
            cache_vars = SETUP_CACHE.get(host,{})
            SETUP_CACHE[host] = cache_vars
            for filename in vars_files:
                if type(filename) == list:
                    # loop over all filenames, loading the first one, and failing if # none found
                    found = False
                    sequence = []
                    for real_filename in filename:
                        filename2 = utils.path_dwim(self.basedir, utils.template(real_filename, cache_vars, SETUP_CACHE))
                        sequence.append(filename2)
                        if os.path.exists(filename2):
                            found = True
                            data = utils.parse_yaml_from_file(filename2)
                            SETUP_CACHE[host].update(data)
                            self.callbacks.on_import_for_host(host, filename2)
                            break
                        else:
                            self.callbacks.on_not_import_for_host(host, filename2)
                    if not found:
                        raise errors.AnsibleError(
                            "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence)
                        )

                else:
                    filename2 = utils.path_dwim(self.basedir, utils.template(filename, cache_vars, SETUP_CACHE))
                    if not os.path.exists(filename2):
                        raise errors.AnsibleError("no file matched for vars_file import: %s" % filename2)
                    data = utils.parse_yaml_from_file(filename2)
                    SETUP_CACHE[host].update(data)
                    self.callbacks.on_import_for_host(host, filename2)
Esempio n. 22
0
    def _execute_copy(self, conn, host, tmp):
        """ handler for file transfer operations """

        # load up options
        options = utils.parse_kv(self.module_args)
        source = options.get("src", None)
        dest = options.get("dest", None)
        if (source is None and not "first_available_file" in self.module_vars) or dest is None:
            return (host, True, dict(failed=True, msg="src and dest are required"), "")

        # apply templating to source argument
        inject = self.setup_cache.get(conn.host, {})

        # FIXME: break duplicate code up into subfunction
        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if "first_available_file" in self.module_vars:
            found = False
            for fn in self.module_vars.get("first_available_file"):
                fn = utils.template(fn, inject, self.setup_cache)
                if os.path.exists(fn):
                    source = fn
                    found = True
                    break
            if not found:
                return (host, True, dict(failed=True, msg="could not find src in first_available_file list"), "")

        source = utils.template(source, inject, self.setup_cache)

        # transfer the file to a remote tmp location
        tmp_src = tmp + source.split("/")[-1]
        conn.put_file(utils.path_dwim(self.basedir, source), tmp_src)

        # install the copy  module
        self.module_name = "copy"
        module = self._transfer_module(conn, tmp, "copy")

        # run the copy module
        args = "src=%s dest=%s" % (tmp_src, dest)
        (result1, err, executed) = self._execute_module(conn, tmp, module, args)
        (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed)

        if ok:
            return self._chain_file_module(conn, tmp, data, err, options, executed)
        else:
            return (host, ok, data, err)
Esempio n. 23
0
 def _include_handlers(self, play, handler, dirname, new_handlers):
     ''' load handlers from external files '''
     inject_vars = self._get_vars(play, dirname)
     path = utils.template(handler['include'], inject_vars, SETUP_CACHE) 
     path = utils.path_dwim(dirname, path)
     included = utils.template_from_file(path, inject_vars, SETUP_CACHE)
     included = utils.parse_yaml(included)
     for x in included:
         new_handlers.append(x)
Esempio n. 24
0
    def run(self, conn, tmp, module_name, module_args, inject):
        ''' handler for template operations '''

        if not self.runner.is_playbook:
            raise errors.AnsibleError("in current versions of ansible, templates are only usable in playbooks")

        # load up options
        options  = utils.parse_kv(module_args)
        source   = options.get('src', None)
        dest     = options.get('dest', None)

        if (source is None and 'first_available_file' not in inject) or dest is None:
            result = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, comm_ok=False, result=result)

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if 'first_available_file' in inject:
            found = False
            for fn in self.runner.module_vars.get('first_available_file'):
                fnt = utils.template(self.runner.basedir, fn, inject)
                fnd = utils.path_dwim(self.runner.basedir, fnt)
                if os.path.exists(fnd):
                    source = fnt
                    found = True
                    break
            if not found:
                result = dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(conn=conn, comm_ok=False, result=result)
        else:
            source = utils.template(self.runner.basedir, source, inject)

        if dest.endswith("/"):
            base = os.path.basename(source)
            dest = os.path.join(dest, base)

        # template the source data locally & transfer
        try:
            resultant = utils.template_from_file(self.runner.basedir, source, inject)
        except Exception, e:
            result = dict(failed=True, msg=str(e))
            return ReturnData(conn=conn, comm_ok=False, result=result)
Esempio n. 25
0
File: play.py Progetto: Minione/iwct
    def _load_tasks(self, tasks, vars={}, additional_conditions=[]):
        ''' handle task and handler include statements '''

        results = []
        if tasks is None:
            # support empty handler files, and the like.
            tasks = []

        for x in tasks:
            task_vars = self.vars.copy()
            task_vars.update(vars)
            if 'include' in x:
                tokens = shlex.split(x['include'])
                items = ['']
                included_additional_conditions = list(additional_conditions)
                for k in x:
                    if k.startswith("with_"):
                        plugin_name = k[5:]
                        if plugin_name not in utils.plugins.lookup_loader:
                            raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
                        terms = utils.template_ds(self.basedir, x[k], task_vars)
                        items = utils.plugins.lookup_loader.get(plugin_name, basedir=self.basedir, runner=None).run(terms, inject=task_vars)
                    elif k.startswith("when_"):
                        included_additional_conditions.append(utils.compile_when_to_only_if("%s %s" % (k[5:], x[k])))
                    elif k in ("include", "vars", "only_if"):
                        pass
                    else:
                        raise errors.AnsibleError("parse error: task includes cannot be used with other directives: %s" % k)

                if 'vars' in x:
                    task_vars.update(x['vars'])
                if 'only_if' in x:
                    included_additional_conditions.append(x['only_if'])

                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k,v) = t.split("=", 1)
                        mv[k] = utils.template_ds(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file))
                    results += self._load_tasks(data, mv, included_additional_conditions)
            elif type(x) == dict:
                results.append(Task(self,x,module_vars=task_vars, additional_conditions=additional_conditions))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Esempio n. 26
0
    def run(self, conn, tmp, module_name, module_args, inject):
        args = parse_kv(self.runner.module_args)
        if not 'key' in args:
            raise ae("'key' is a required argument.")

        vv("created 'group_by' ActionModule: key=%s"%(args['key']))

        inventory = self.runner.inventory

        result = {'changed': False}

        ### find all groups
        groups = {}
        for host in self.runner.host_set:
            data = inject['hostvars'][host]
            if not check_conditional(template(self.runner.basedir, self.runner.conditional, data)):
                continue
            group_name = template(self.runner.basedir, args['key'], data)
            group_name = group_name.replace(' ','-')
            if group_name not in groups:
                groups[group_name] = []
            groups[group_name].append(host)

        result['groups'] = groups

        ### add to inventory
        for group, hosts in groups.items():
            inv_group = inventory.get_group(group)
            if not inv_group:
                inv_group = ansible.inventory.Group(name=group)
                inventory.add_group(inv_group)
            for host in hosts:
                inv_host = inventory.get_host(host)
                if not inv_host:
                    inv_host = ansible.inventory.Host(name=host)
                if inv_group not in inv_host.get_groups():
                    result['changed'] = True
                    inv_group.add_host(inv_host)

        return ReturnData(conn=conn, comm_ok=True, result=result)
Esempio n. 27
0
    def _update_vars_files_for_host(self, host):

        if not host in self.playbook.SETUP_CACHE:
            # no need to process failed hosts or hosts not in this play
            return

        for filename in self.vars_files:

            if type(filename) == list:

                # loop over all filenames, loading the first one, and failing if # none found
                found = False
                sequence = []
                for real_filename in filename:
                    filename2 = utils.template(real_filename, self.playbook.SETUP_CACHE[host])
                    filename2 = utils.template(filename2, self.vars)
                    filename2 = utils.path_dwim(self.playbook.basedir, filename2)
                    sequence.append(filename2)
                    if os.path.exists(filename2):
                        found = True
                        data = utils.parse_yaml_from_file(filename2)
                        self.playbook.SETUP_CACHE[host].update(data)
                        self.playbook.callbacks.on_import_for_host(host, filename2)
                        break
                    else:
                        self.playbook.callbacks.on_not_import_for_host(host, filename2)
                if not found:
                    raise errors.AnsibleError(
                        "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence)
                    )

            else:

                filename2 = utils.template(filename, self.playbook.SETUP_CACHE[host])
                filename2 = utils.template(filename2, self.vars)
                fpath = utils.path_dwim(self.playbook.basedir, filename2)
                new_vars = utils.parse_yaml_from_file(fpath)
                if new_vars:
                    self.playbook.SETUP_CACHE[host].update(new_vars)
Esempio n. 28
0
    def _execute_fetch(self, conn, host, tmp):
        """ handler for fetch operations """

        # load up options
        options = utils.parse_kv(self.module_args)
        source = options.get("src", None)
        dest = options.get("dest", None)
        if source is None or dest is None:
            return (host, True, dict(failed=True, msg="src and dest are required"), "")

        # apply templating to source argument
        inject = self.setup_cache.get(conn.host, {})
        source = utils.template(source, inject, self.setup_cache)

        # apply templating to dest argument
        dest = utils.template(dest, inject, self.setup_cache)

        # files are saved in dest dir, with a subdir for each host, then the filename
        dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), host, source)
        dest = dest.replace("//", "/")

        # compare old and new md5 for support of change hooks
        local_md5 = None
        if os.path.exists(dest):
            local_md5 = os.popen("md5sum %s" % dest).read().split()[0]
        remote_md5 = self._exec_command(conn, "md5sum %s" % source, tmp, True)[0].split()[0]

        if remote_md5 != local_md5:
            # create the containing directories, if needed
            os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            conn.fetch_file(source, dest)
            new_md5 = os.popen("md5sum %s" % dest).read().split()[0]
            if new_md5 != remote_md5:
                return (host, True, dict(failed=True, msg="md5 mismatch", md5sum=new_md5), "")
            return (host, True, dict(changed=True, md5sum=new_md5), "")
        else:
            return (host, True, dict(changed=False, md5sum=local_md5), "")
Esempio n. 29
0
File: task.py Progetto: rjtg/ansible
    def __init__(self, play, ds, module_vars=None):
        """ constructor loads from a task or handler datastructure """

        # TODO: more error handling
        # include task specific vars

        self.module_vars = module_vars

        self.play = play
        self.name = ds.get("name", None)
        self.action = ds.get("action", "")
        self.notified_by = []

        if self.name is None:
            self.name = self.action

        self.only_if = ds.get("only_if", "True")
        self.async_seconds = int(ds.get("async", 0))  # not async by default
        self.async_poll_interval = int(ds.get("poll", 10))  # default poll = 10 seconds
        self.notify = ds.get("notify", [])
        if isinstance(self.notify, basestring):
            self.notify = [self.notify]

        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError("invalid/missing action in task")

        self.module_name = tokens[0]
        self.module_args = ""
        if len(tokens) > 1:
            self.module_args = tokens[1]

        self.name = utils.template(self.name, self.module_vars)
        self.action = utils.template(self.name, self.module_vars)

        if "first_available_file" in ds:
            self.module_vars["first_available_file"] = ds.get("first_available_file")
Esempio n. 30
0
    def _flag_handler(self, play, handler_name, host):
        '''
        if a task has any notify elements, flag handlers for run
        at end of execution cycle for hosts that have indicated
        changes have been made
        '''

        found = False
        for x in play.handlers():
            if handler_name == utils.template(play.basedir, x.name, x.module_vars):
                found = True
                self.callbacks.on_notify(host, x.name)
                x.notified_by.append(host)
        if not found:
            raise errors.AnsibleError("change handler (%s) is not defined" % handler_name)
Esempio n. 31
0
    def _load_tasks(self, tasks, vars={}, additional_conditions=[]):
        ''' handle task and handler include statements '''

        results = []
        if tasks is None:
            # support empty handler files, and the like.
            tasks = []

        for x in tasks:
            task_vars = self.vars.copy()
            task_vars.update(vars)
            if 'include' in x:
                tokens = shlex.split(x['include'])
                items = ['']
                included_additional_conditions = list(additional_conditions)
                for k in x:
                    if k.startswith("with_"):
                        plugin_name = k[5:]
                        if plugin_name not in utils.plugins.lookup_loader:
                            raise errors.AnsibleError(
                                "cannot find lookup plugin named %s for usage in with_%s"
                                % (plugin_name, plugin_name))
                        terms = utils.template_ds(self.basedir, x[k],
                                                  task_vars)
                        items = utils.plugins.lookup_loader.get(
                            plugin_name, basedir=self.basedir,
                            runner=None).run(terms, inject=task_vars)
                    elif k.startswith("when_"):
                        included_additional_conditions.append(
                            utils.compile_when_to_only_if("%s %s" %
                                                          (k[5:], x[k])))
                    elif k in ("include", "vars", "only_if"):
                        pass
                    else:
                        raise errors.AnsibleError(
                            "parse error: task includes cannot be used with other directives: %s"
                            % k)

                if 'vars' in x:
                    task_vars.update(x['vars'])
                if 'only_if' in x:
                    included_additional_conditions.append(x['only_if'])

                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k, v) = t.split("=", 1)
                        mv[k] = utils.template_ds(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(
                        utils.path_dwim(self.basedir, include_file))
                    results += self._load_tasks(
                        data, mv, included_additional_conditions)
            elif type(x) == dict:
                results.append(
                    Task(self,
                         x,
                         module_vars=task_vars,
                         additional_conditions=additional_conditions))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Esempio n. 32
0
            if not found:
                results=dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(conn=conn, result=results)
        elif content is not None:
            fd, tmp_content = tempfile.mkstemp()
            f = os.fdopen(fd, 'w')
            try:
                f.write(content)
            except Exception, err:
                os.remove(tmp_content)
                result = dict(failed=True, msg="could not write content temp file: %s" % err)
                return ReturnData(conn=conn, result=result)
            f.close()
            source = tmp_content
        else:
            source = utils.template(self.runner.basedir, source, inject)
            source = utils.path_dwim(self.runner.basedir, source)

        local_md5 = utils.md5(source)
        if local_md5 is None:
            result=dict(failed=True, msg="could not find src=%s" % source)
            return ReturnData(conn=conn, result=result)

        if dest.endswith("/"):
            base = os.path.basename(source)
            dest = os.path.join(dest, base)

        remote_md5 = self.runner._remote_md5(conn, tmp, dest)
        if remote_md5 == '3':
            # Destination is a directory
            if content is not None:
Esempio n. 33
0
            return [host, False, msg]

    def _executor_internal(self, host):
        ''' callback executed in parallel for each host. returns (hostname, connected_ok, extra) '''

        host_variables = self.inventory.get_variables(host)
        port = host_variables.get('ansible_ssh_port', self.remote_port)

        conn = None
        try:
            conn = self.connector.connect(host, port)
        except errors.AnsibleConnectionFailed, e:
            return [ host, False, "FAILED: %s" % str(e), None ]

        cache = self.setup_cache.get(host, {})
        module_name = utils.template(self.module_name, cache, self.setup_cache)

        tmp = self._get_tmp_path(conn)
        result = None

        if self.module_name == 'copy':
            result = self._execute_copy(conn, host, tmp)
        elif self.module_name == 'fetch':
            result = self._execute_fetch(conn, host, tmp)
        elif self.module_name == 'template':
            result = self._execute_template(conn, host, tmp)
        else:
            if self.background == 0:
                result = self._execute_normal_module(conn, host, tmp, module_name)
            else:
                result = self._execute_async_module(conn, host, tmp, module_name)
Esempio n. 34
0
    def _execute_copy(self, conn, tmp):
        ''' handler for file transfer operations '''

        # load up options
        options = utils.parse_kv(self.module_args)
        source = options.get('src', None)
        dest = options.get('dest', None)
        if (source is None and not 'first_available_file' in self.module_vars
            ) or dest is None:
            result = dict(failed=True, msg="src and dest are required")
            return ReturnData(host=conn.host, result=result)

        # apply templating to source argument
        inject = self.setup_cache.get(conn.host, {})

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if 'first_available_file' in self.module_vars:
            found = False
            for fn in self.module_vars.get('first_available_file'):
                fn = utils.template(fn, inject, self.setup_cache)
                if os.path.exists(fn):
                    source = fn
                    found = True
                    break
            if not found:
                results = dict(
                    failed=True,
                    msg="could not find src in first_available_file list")
                return ReturnData(host=conn.host, results=results)

        if self.module_vars is not None:
            inject.update(self.module_vars)

        source = utils.template(source, inject, self.setup_cache)
        source = utils.path_dwim(self.basedir, source)

        local_md5 = utils.md5(source)
        if local_md5 is None:
            result = dict(failed=True, msg="could not find src=%s" % source)
            return ReturnData(host=conn.host, result=result)

        remote_md5 = self._remote_md5(conn, tmp, dest)

        exec_rc = None
        if local_md5 != remote_md5:
            # transfer the file to a remote tmp location
            tmp_src = tmp + source.split('/')[-1]
            conn.put_file(source, tmp_src)

            # install the copy  module
            self.module_name = 'copy'
            module = self._transfer_module(conn, tmp, 'copy')

            # run the copy module
            args = "src=%s dest=%s" % (tmp_src, dest)
            exec_rc = self._execute_module(conn, tmp, module, args)
        else:
            # no need to transfer the file, already correct md5
            result = dict(changed=False, md5sum=remote_md5, transferred=False)
            exec_rc = ReturnData(host=conn.host, result=result)

        if exec_rc.is_successful():
            return self._chain_file_module(conn, tmp, exec_rc, options)
        else:
            return exec_rc
Esempio n. 35
0
    def _executor_internal(self, host):
        ''' executes any module one or more times '''

        host_variables = self.inventory.get_variables(host)
        if self.transport in [ 'paramiko', 'ssh' ]:
            port = host_variables.get('ansible_ssh_port', self.remote_port)
            if port is None:
                port = C.DEFAULT_REMOTE_PORT 
        else:
            # fireball, local, etc
            port = self.remote_port

        inject = {}
        inject.update(host_variables)
        inject.update(self.module_vars)
        inject.update(self.setup_cache[host])
        inject['hostvars'] = HostVars(self.setup_cache, self.inventory)
        inject['group_names'] = host_variables.get('group_names', [])
        inject['groups'] = self.inventory.groups_list()

        # allow with_items to work in playbooks...
        # apt and yum are converted into a single call, others run in a loop
        items = self.module_vars.get('items', [])
        if isinstance(items, basestring) and items.startswith("$"):
            items = utils.varReplaceWithItems(self.basedir, items, inject)

        # if we instead said 'with_foo' and there is a lookup module named foo...
        items_plugin = self.module_vars.get('items_lookup_plugin', None)
        if items_plugin is not None:
            items_terms = self.module_vars.get('items_lookup_terms', '')
            if items_plugin in self.lookup_plugins:
                items_terms = utils.template(self.basedir, items_terms, inject)
                items = self.lookup_plugins[items_plugin].run(items_terms)

        if type(items) != list:
            raise errors.AnsibleError("with_items only takes a list: %s" % items)

        if len(items) and self.module_name in [ 'apt', 'yum' ]:
            # hack for apt and soon yum, with_items maps back into a single module call
            inject['item'] = ",".join(items)
            items = []

        # logic to decide how to run things depends on whether with_items is used

        if len(items) == 0:
            return self._executor_internal_inner(host, self.module_name, self.module_args, inject, port)
        else:
            # executing using with_items, so make multiple calls
            # TODO: refactor
            aggregrate = {}
            all_comm_ok = True
            all_changed = False
            all_failed = False
            results = []
            for x in items:
                inject['item'] = x
                result = self._executor_internal_inner(host, self.module_name, self.module_args, inject, port)
                results.append(result.result)
                if result.comm_ok == False:
                    all_comm_ok = False
                    all_failed = True
                    break
                for x in results:
                    if x.get('changed') == True:
                        all_changed = True
                    if (x.get('failed') == True) or (('rc' in x) and (x['rc'] != 0)):
                        all_failed = True
                        break
            msg = 'All items completed'
            if all_failed:
                msg = "One or more items failed."
            rd_result = dict(failed=all_failed, changed=all_changed, results=results, msg=msg)
            if not all_failed:
                del rd_result['failed']
            return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
Esempio n. 36
0
    def _update_vars_files_for_host(self, host):

        if type(self.vars_files) != list:
            self.vars_files = [self.vars_files]

        if (host is not None):
            self.playbook.SETUP_CACHE[host].update(self.vars)

            inventory = self.playbook.inventory
            hostrec = inventory.get_host(host)
            groupz = sorted(inventory.groups_for_host(host),
                            key=lambda g: g.depth)
            groups = [g.name for g in groupz]
            basedir = inventory.basedir()
            if basedir is not None:
                for x in groups:
                    path = os.path.join(basedir, "group_vars/%s" % x)
                    if os.path.exists(path):
                        data = utils.parse_yaml_from_file(path)
                        if type(data) != dict:
                            raise errors.AnsibleError(
                                "%s must be stored as a dictionary/hash" %
                                path)
                        self.playbook.SETUP_CACHE[host].update(data)
                path = os.path.join(basedir, "host_vars/%s" % hostrec.name)
                if os.path.exists(path):
                    data = utils.parse_yaml_from_file(path)
                    if type(data) != dict:
                        raise errors.AnsibleError(
                            "%s must be stored as a dictionary/hash" % path)
                    self.playbook.SETUP_CACHE[host].update(data)

        for filename in self.vars_files:

            if type(filename) == list:

                # loop over all filenames, loading the first one, and failing if # none found
                found = False
                sequence = []
                for real_filename in filename:
                    filename2 = utils.template(self.basedir, real_filename,
                                               self.vars)
                    filename3 = filename2
                    if host is not None:
                        filename3 = utils.template(
                            self.basedir, filename2,
                            self.playbook.SETUP_CACHE[host])
                    filename4 = utils.path_dwim(self.basedir, filename3)
                    sequence.append(filename4)
                    if os.path.exists(filename4):
                        found = True
                        data = utils.parse_yaml_from_file(filename4)
                        if type(data) != dict:
                            raise errors.AnsibleError(
                                "%s must be stored as a dictionary/hash" %
                                filename4)
                        if host is not None:
                            if self._has_vars_in(
                                    filename2
                            ) and not self._has_vars_in(filename3):
                                # this filename has variables in it that were fact specific
                                # so it needs to be loaded into the per host SETUP_CACHE
                                self.playbook.SETUP_CACHE[host].update(data)
                                self.playbook.callbacks.on_import_for_host(
                                    host, filename4)
                        elif not self._has_vars_in(filename4):
                            # found a non-host specific variable, load into vars and NOT
                            # the setup cache
                            self.vars.update(data)
                    elif host is not None:
                        self.playbook.callbacks.on_not_import_for_host(
                            host, filename4)
                    if found:
                        break
                if not found:
                    raise errors.AnsibleError(
                        "%s: FATAL, no files matched for vars_files import sequence: %s"
                        % (host, sequence))

            else:
                # just one filename supplied, load it!

                filename2 = utils.template(self.basedir, filename, self.vars)
                filename3 = filename2
                if host is not None:
                    filename3 = utils.template(self.basedir, filename2,
                                               self.playbook.SETUP_CACHE[host])
                filename4 = utils.path_dwim(self.basedir, filename3)
                if self._has_vars_in(filename4):
                    return
                new_vars = utils.parse_yaml_from_file(filename4)
                if new_vars:
                    if type(new_vars) != dict:
                        raise errors.AnsibleError(
                            "%s must be stored as dictonary/hash: %s" %
                            filename4)
                    if host is not None and self._has_vars_in(
                            filename2) and not self._has_vars_in(filename3):
                        # running a host specific pass and has host specific variables
                        # load into setup cache
                        self.playbook.SETUP_CACHE[host].update(new_vars)
                    elif host is None:
                        # running a non-host specific pass and we can update the global vars instead
                        self.vars.update(new_vars)
Esempio n. 37
0
            return ReturnData(host=host, result=result)

        conn = None
        actual_host = host
        try:
            delegate_to = inject.get('delegate_to', None)
            if delegate_to is not None:
                actual_host = delegate_to
            conn = self.connector.connect(actual_host, port)
            if delegate_to is not None:
                conn._delegate_for = host
        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)

        module_name = utils.template(self.module_name, inject)

        tmp = ''
        if self.module_name != 'raw':
            tmp = self._make_tmp_path(conn)
        result = None

        handler = self.action_plugins.get(self.module_name, None)
        if handler:
            result = handler.run(conn, tmp, module_name, inject)
        else:
            if self.background == 0:
                result = self.action_plugins['normal'].run(
                    conn, tmp, module_name, inject)
            else:
                result = self.action_plugins['async'].run(
    def _execute_template(self, conn, tmp):
        ''' handler for template operations '''

        # load up options
        options  = utils.parse_kv(self.module_args)
        source   = options.get('src', None)
        dest     = options.get('dest', None)
        metadata = options.get('metadata', None)
        if (source is None and 'first_available_file' not in self.module_vars) or dest is None:
            result = dict(failed=True, msg="src and dest are required")
            return ReturnData(host=conn.host, comm_ok=False, result=result)

        # apply templating to source argument so vars can be used in the path
        inject = self.setup_cache.get(conn.host,{})

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if 'first_available_file' in self.module_vars:
            found = False
            for fn in self.module_vars.get('first_available_file'):
                fn = utils.template(fn, inject, self.setup_cache)
                if os.path.exists(fn):
                    source = fn
                    found = True
                    break
            if not found:
                result = dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(host=conn.host, comm_ok=False, result=result)


        if self.module_vars is not None:
            inject.update(self.module_vars)

        source = utils.template(source, inject, self.setup_cache)

        #(host, ok, data, err) = (None, None, None, None)

        if not self.is_playbook:

            # not running from a playbook so we have to fetch the remote
            # setup file contents before proceeding...
            if metadata is None:
                if self.remote_user == 'root':
                    metadata = '/etc/ansible/setup'
                else:
                    # path is expanded on remote side
                    metadata = "~/.ansible/setup"
            
            # install the template module
            slurp_module = self._transfer_module(conn, tmp, 'slurp')

            # run the slurp module to get the metadata file
            args = "src=%s" % metadata
            result1  = self._execute_module(conn, tmp, slurp_module, args)
            if not 'content' in result1.result or result1.result.get('encoding','base64') != 'base64':
                result1.result['failed'] = True
                return result1
            content = base64.b64decode(result1.result['content'])
            inject = utils.json_loads(content)


        # install the template module
        copy_module = self._transfer_module(conn, tmp, 'copy')

        # template the source data locally
        try:
            resultant = utils.template_from_file(utils.path_dwim(self.basedir, source),
                                                 inject, self.setup_cache, no_engine=False)
        except Exception, e:
            result = dict(failed=True, msg=str(e))
            return ReturnData(host=conn.host, comm_ok=False, result=result)
        inject.update(self.module_vars)

        conditional = utils.double_template(self.conditional, inject, self.setup_cache)
        if not eval(conditional):
            result = utils.smjson(dict(skipped=True))
            self.callbacks.on_skipped(host)
            return ReturnData(host=host, result=result)

        conn = None
        try:
            conn = self.connector.connect(host, port)
        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)

        module_name = utils.template(self.module_name, inject, self.setup_cache)

        tmp = self._make_tmp_path(conn)
        result = None

        if self.module_name == 'copy':
            result = self._execute_copy(conn, tmp)
        elif self.module_name == 'fetch':
            result = self._execute_fetch(conn, tmp)
        elif self.module_name == 'template':
            result = self._execute_template(conn, tmp)
        elif self.module_name == 'raw':
            result = self._execute_raw(conn, tmp)
        elif self.module_name == 'assemble':
            result = self._execute_assemble(conn, tmp)
        else:
Esempio n. 40
0
    def __init__(self, play, ds, module_vars=None):
        ''' constructor loads from a task or handler datastructure '''

        for x in ds.keys():
            if not x in Task.VALID_KEYS:
                raise errors.AnsibleError(
                    "%s is not a legal parameter in an Ansible task or handler"
                    % x)

        self.module_vars = module_vars
        self.play = play

        # load various attributes
        self.name = ds.get('name', None)
        self.tags = ['all']
        self.register = ds.get('register', None)

        # Both are defined
        if ('action' in ds) and ('local_action' in ds):
            raise errors.AnsibleError(
                "the 'action' and 'local_action' attributes can not be used together"
            )
        # Both are NOT defined
        elif (not 'action' in ds) and (not 'local_action' in ds):
            raise errors.AnsibleError("task missing an 'action' attribute")
        # Only one of them is defined
        elif 'local_action' in ds:
            self.action = ds.get('local_action', '')
            self.delegate_to = '127.0.0.1'
            self.transport = 'local'
        else:
            self.action = ds.get('action', '')
            self.delegate_to = ds.get('delegate_to', None)
            self.transport = ds.get('transport', play.transport)

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.only_if = ds.get('only_if', 'True')
        self.async_seconds = int(ds.get('async', 0))  # not async by default
        self.async_poll_interval = int(ds.get('poll',
                                              10))  # default poll = 10 seconds
        self.notify = ds.get('notify', [])
        self.first_available_file = ds.get('first_available_file', None)
        self.with_items = ds.get('with_items', None)

        self.ignore_errors = ds.get('ignore_errors', False)

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [self.notify]

        # split the action line into a module name + arguments
        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError(
                "invalid/missing action in task. name: %s" % self.name)
        self.module_name = tokens[0]
        self.module_args = ''
        if len(tokens) > 1:
            self.module_args = tokens[1]

        import_tags = self.module_vars.get('tags', [])
        if type(import_tags) in [str, unicode]:
            # allow the user to list comma delimited tags
            import_tags = import_tags.split(",")

        self.name = utils.template(None, self.name, self.module_vars)
        self.action = utils.template(None, self.action, self.module_vars)

        # handle mutually incompatible options
        if self.with_items is not None and self.first_available_file is not None:
            raise errors.AnsibleError(
                "with_items and first_available_file are mutually incompatible in a single task"
            )

        # make first_available_file accessable to Runner code
        if self.first_available_file:
            self.module_vars[
                'first_available_file'] = self.first_available_file

        # process with_items so it can be used by Runner code
        if self.with_items is None:
            self.with_items = []
        self.module_vars['items'] = self.with_items

        # allow runner to see delegate_to option
        self.module_vars['delegate_to'] = self.delegate_to

        # make ignore_errors accessable to Runner code
        self.module_vars['ignore_errors'] = self.ignore_errors

        # tags allow certain parts of a playbook to be run without running the whole playbook
        apply_tags = ds.get('tags', None)
        if apply_tags is not None:
            if type(apply_tags) in [str, unicode]:
                self.tags.append(apply_tags)
            elif type(apply_tags) == list:
                self.tags.extend(apply_tags)
        self.tags.extend(import_tags)
Esempio n. 41
0
    def __init__(self, play, ds, module_vars=None):
        ''' constructor loads from a task or handler datastructure '''

        for x in ds.keys():

            # code to allow for saying "modulename: args" versus "action: modulename args"
            if x in play.playbook.modules_list:
                ds['action'] = x + " " + ds[x]
                ds.pop(x)

            # code to allow "with_glob" and to reference a lookup plugin named glob
            elif x.startswith("with_") and x != 'with_items':
                plugin_name = x.replace("with_","")
                if plugin_name in play.playbook.lookup_plugins_list:
                    ds['items_lookup_plugin'] = plugin_name
                    ds['items_lookup_terms'] = ds[x]
                    ds.pop(x)
                else:
                    raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))

            elif not x in Task.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter in an Ansible task or handler" % x)

        self.module_vars = module_vars
        self.play        = play

        # load various attributes
        self.name         = ds.get('name', None)
        self.tags         = [ 'all' ]
        self.register     = ds.get('register', None)
        self.sudo         = ds.get('sudo', play.sudo)
        if self.sudo is True:
            self.sudo_user    = ds.get('sudo_user', play.sudo_user)
            self.sudo_pass    = ds.get('sudo_pass', play.playbook.sudo_pass)
        else:
            self.sudo_user    = None
            self.sudo_pass    = None
        
        # Both are defined
        if ('action' in ds) and ('local_action' in ds):
            raise errors.AnsibleError("the 'action' and 'local_action' attributes can not be used together")
        # Both are NOT defined
        elif (not 'action' in ds) and (not 'local_action' in ds):
            raise errors.AnsibleError("task missing an 'action' attribute")
        # Only one of them is defined
        elif 'local_action' in ds:
            self.action      = ds.get('local_action', '')
            self.delegate_to = '127.0.0.1'
        else:
            self.action      = ds.get('action', '')
            self.delegate_to = ds.get('delegate_to', None)
            self.transport   = ds.get('transport', play.transport)

        # delegate_to can use variables
        if not (self.delegate_to is None):
	    self.delegate_to = utils.template(None, self.delegate_to, self.module_vars)
	    # delegate_to: localhost should use local transport
	    if self.delegate_to in ['127.0.0.1', 'localhost']:
	        self.transport   = 'local'

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.only_if = ds.get('only_if', 'True')
        self.async_seconds = int(ds.get('async', 0))  # not async by default
        self.async_poll_interval = int(ds.get('poll', 10))  # default poll = 10 seconds
        self.notify = ds.get('notify', [])
        self.first_available_file = ds.get('first_available_file', None)
        self.with_items = ds.get('with_items', None)

        self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
        self.items_lookup_terms  = ds.get('items_lookup_terms', None)
     

        self.ignore_errors = ds.get('ignore_errors', False)

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [ self.notify ]

        # split the action line into a module name + arguments
        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError("invalid/missing action in task. name: %s" % self.name)
        self.module_name = tokens[0]
        self.module_args = ''
        if len(tokens) > 1:
            self.module_args = tokens[1]

        import_tags = self.module_vars.get('tags',[])
        if type(import_tags) in [str,unicode]:
            # allow the user to list comma delimited tags
            import_tags = import_tags.split(",")

        self.name = utils.template(None, self.name, self.module_vars)
        self.action = utils.template(None, self.action, self.module_vars)

        # handle mutually incompatible options
        incompatibles = [ x for x in [ self.with_items, self.first_available_file, self.items_lookup_plugin ] if x is not None ]
        if len(incompatibles) > 1:
            raise errors.AnsibleError("with_items, with_(plugin), and first_available_file are mutually incompatible in a single task")

        # make first_available_file accessable to Runner code
        if self.first_available_file:
            self.module_vars['first_available_file'] = self.first_available_file

        # process with_items so it can be used by Runner code
        if self.with_items is None:
            self.with_items = [ ]
        self.module_vars['items'] = self.with_items

        if self.items_lookup_plugin is not None:
            self.module_vars['items_lookup_plugin'] = self.items_lookup_plugin
            self.module_vars['items_lookup_terms'] = self.items_lookup_terms

        # allow runner to see delegate_to option
        self.module_vars['delegate_to'] = self.delegate_to

        # make ignore_errors accessable to Runner code
        self.module_vars['ignore_errors'] = self.ignore_errors

        # tags allow certain parts of a playbook to be run without running the whole playbook
        apply_tags = ds.get('tags', None)
        if apply_tags is not None:
            if type(apply_tags) in [ str, unicode ]:
                self.tags.append(apply_tags)
            elif type(apply_tags) == list:
                self.tags.extend(apply_tags)
        self.tags.extend(import_tags)
Esempio n. 42
0
    def run(self, conn, tmp, module_name, module_args, inject):
        ''' handler for file transfer operations '''

        # load up options
        options = utils.parse_kv(module_args)
        source  = options.get('src', None)
        dest    = options.get('dest', None)

        if dest.endswith("/"):
            base = os.path.basename(source)
            dest = os.path.join(dest, base)

        if (source is None and not 'first_available_file' in inject) or dest is None:
            result=dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, result=result)

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if 'first_available_file' in inject:
            found = False
            for fn in inject.get('first_available_file'):
                fn = utils.template(self.runner.basedir, fn, inject)
                fn = utils.path_dwim(self.runner.basedir, fn)
                if os.path.exists(fn):
                    source = fn
                    found = True
                    break
            if not found:
                results=dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(conn=conn, result=results)
        else:
            source = utils.template(self.runner.basedir, source, inject)
            source = utils.path_dwim(self.runner.basedir, source)

        local_md5 = utils.md5(source)
        if local_md5 is None:
            result=dict(failed=True, msg="could not find src=%s" % source)
            return ReturnData(conn=conn, result=result)

        remote_md5 = self.runner._remote_md5(conn, tmp, dest)

        exec_rc = None
        if local_md5 != remote_md5:
            # transfer the file to a remote tmp location
            tmp_src = tmp + os.path.basename(source)
            conn.put_file(source, tmp_src)
            # fix file permissions when the copy is done as a different user
            if self.runner.sudo and self.runner.sudo_user != 'root':
                self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp)

            # run the copy module
            module_args = "%s src=%s" % (module_args, tmp_src)
            return self.runner._execute_module(conn, tmp, 'copy', module_args, inject=inject)

        else:
            # no need to transfer the file, already correct md5, but still need to call
            # the file module in case we want to change attributes

            tmp_src = tmp + os.path.basename(source)
            module_args = "%s src=%s" % (module_args, tmp_src)
            return self.runner._execute_module(conn, tmp, 'file', module_args, inject=inject)
Esempio n. 43
0
    def _update_vars_files_for_host(self, host):

        if type(self.vars_files) != list:
            self.vars_files = [self.vars_files]

        if host is not None:
            inject = {}
            inject.update(self.playbook.inventory.get_variables(host))
            inject.update(self.playbook.SETUP_CACHE[host])

        for filename in self.vars_files:

            if type(filename) == list:

                # loop over all filenames, loading the first one, and failing if # none found
                found = False
                sequence = []
                for real_filename in filename:
                    filename2 = utils.template(self.basedir, real_filename,
                                               self.vars)
                    filename3 = filename2
                    if host is not None:
                        filename3 = utils.template(self.basedir, filename2,
                                                   inject)
                    filename4 = utils.path_dwim(self.basedir, filename3)
                    sequence.append(filename4)
                    if os.path.exists(filename4):
                        found = True
                        data = utils.parse_yaml_from_file(filename4)
                        if type(data) != dict:
                            raise errors.AnsibleError(
                                "%s must be stored as a dictionary/hash" %
                                filename4)
                        if host is not None:
                            if self._has_vars_in(
                                    filename2
                            ) and not self._has_vars_in(filename3):
                                # this filename has variables in it that were fact specific
                                # so it needs to be loaded into the per host SETUP_CACHE
                                self.playbook.SETUP_CACHE[host].update(data)
                                self.playbook.callbacks.on_import_for_host(
                                    host, filename4)
                        elif not self._has_vars_in(filename4):
                            # found a non-host specific variable, load into vars and NOT
                            # the setup cache
                            self.vars.update(data)
                    elif host is not None:
                        self.playbook.callbacks.on_not_import_for_host(
                            host, filename4)
                    if found:
                        break
                if not found and host is not None:
                    raise errors.AnsibleError(
                        "%s: FATAL, no files matched for vars_files import sequence: %s"
                        % (host, sequence))

            else:
                # just one filename supplied, load it!

                filename2 = utils.template(self.basedir, filename, self.vars)
                filename3 = filename2
                if host is not None:
                    filename3 = utils.template(self.basedir, filename2, inject)
                filename4 = utils.path_dwim(self.basedir, filename3)
                if self._has_vars_in(filename4):
                    continue
                new_vars = utils.parse_yaml_from_file(filename4)
                if new_vars:
                    if type(new_vars) != dict:
                        raise errors.AnsibleError(
                            "%s must be stored as dictonary/hash: %s" %
                            (filename4, type(new_vars)))
                    if host is not None and self._has_vars_in(
                            filename2) and not self._has_vars_in(filename3):
                        # running a host specific pass and has host specific variables
                        # load into setup cache
                        self.playbook.SETUP_CACHE[host].update(new_vars)
                    elif host is None:
                        # running a non-host specific pass and we can update the global vars instead
                        self.vars.update(new_vars)
Esempio n. 44
0
    def _execute_module(self,
                        conn,
                        tmp,
                        module_name,
                        args,
                        async_jid=None,
                        async_module=None,
                        async_limit=None,
                        inject=None,
                        persist_files=False,
                        complex_args=None):
        ''' runs a module that has already been transferred '''

        # hack to support fireball mode
        if module_name == 'fireball':
            args = "%s password=%s" % (
                args, base64.b64encode(str(utils.key_for_hostname(conn.host))))
            if 'port' not in args:
                args += " port=%s" % C.ZEROMQ_PORT

        (remote_module_path, is_new_style,
         shebang) = self._copy_module(conn, tmp, module_name, args, inject,
                                      complex_args)

        environment_string = self._compute_environment_string(inject)

        cmd_mod = ""
        if self.sudo and self.sudo_user != 'root':
            # deal with possible umask issues once sudo'ed to other user
            cmd_chmod = "chmod a+r %s" % remote_module_path
            self._low_level_exec_command(conn, cmd_chmod, tmp, sudoable=False)

        cmd = ""
        if not is_new_style:
            if 'CHECKMODE=True' in args:
                # if module isn't using AnsibleModuleCommon infrastructure we can't be certain it knows how to
                # do --check mode, so to be safe we will not run it.
                return ReturnData(
                    conn=conn,
                    result=dict(
                        skippped=True,
                        msg="cannot run check mode against old-style modules"))

            args = utils.template(self.basedir, args, inject)
            argsfile = self._transfer_str(conn, tmp, 'arguments', args)
            if async_jid is None:
                cmd = "%s %s" % (remote_module_path, argsfile)
            else:
                cmd = " ".join([
                    str(x) for x in [
                        remote_module_path, async_jid, async_limit,
                        async_module, argsfile
                    ]
                ])
        else:
            if async_jid is None:
                cmd = "%s" % (remote_module_path)
            else:
                cmd = " ".join([
                    str(x) for x in
                    [remote_module_path, async_jid, async_limit, async_module]
                ])

        if not shebang:
            raise errors.AnsibleError("module is missing interpreter line")

        cmd = " ".join([environment_string, shebang.replace("#!", ""), cmd])
        if tmp.find(
                "tmp"
        ) != -1 and C.DEFAULT_KEEP_REMOTE_FILES != '1' and not persist_files:
            cmd = cmd + "; rm -rf %s >/dev/null 2>&1" % tmp
        res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
        data = utils.parse_json(res['stdout'])
        if 'parsed' in data and data['parsed'] == False:
            data['msg'] += res['stderr']
        return ReturnData(conn=conn, result=data)
Esempio n. 45
0
    def _get_vars(self):
        ''' load the vars section from a play, accounting for all sorts of variable features
        including loading from yaml files, prompting, and conditional includes of the first
        file found in a list. '''

        if self.vars is None:
            self.vars = {}

        if type(self.vars) not in [dict, list]:
            raise errors.AnsibleError(
                "'vars' section must contain only key/value pairs")

        vars = self.playbook.global_vars

        # translate a list of vars into a dict
        if type(self.vars) == list:
            for item in self.vars:
                if getattr(item, 'items', None) is None:
                    raise errors.AnsibleError(
                        "expecting a key-value pair in 'vars' section")
                k, v = item.items()[0]
                vars[k] = v
        else:
            vars.update(self.vars)

        if type(self.vars_prompt) == list:
            for var in self.vars_prompt:
                if not 'name' in var:
                    raise errors.AnsibleError(
                        "'vars_prompt' item is missing 'name:'")

                vname = var['name']
                prompt = utils.template(None,
                                        "%s: " % var.get("prompt", vname),
                                        self.vars)
                private = var.get("private", True)

                confirm = var.get("confirm", False)
                encrypt = var.get("encrypt", None)
                salt_size = var.get("salt_size", None)
                salt = var.get("salt", None)
                conditional = var.get("only_if", 'True')

                if utils.check_conditional(conditional):
                    vars[vname] = self.playbook.callbacks.on_vars_prompt(
                        vname, private, prompt, encrypt, confirm, salt_size,
                        salt)

        elif type(self.vars_prompt) == dict:
            for (vname, prompt) in self.vars_prompt.iteritems():
                prompt_msg = "%s: " % prompt
                vars[vname] = self.playbook.callbacks.on_vars_prompt(
                    varname=vname, private=False, prompt=prompt_msg)

        else:
            raise errors.AnsibleError(
                "'vars_prompt' section is malformed, see docs")

        results = self.playbook.extra_vars.copy()
        results.update(vars)
        return results
Esempio n. 46
0
    def _executor_internal_inner(self,
                                 host,
                                 module_name,
                                 module_args,
                                 inject,
                                 port,
                                 is_chained=False,
                                 complex_args=None):
        ''' decides how to invoke a module '''

        # allow module args to work as a dictionary
        # though it is usually a string
        new_args = ""
        if type(module_args) == dict:
            for (k, v) in module_args.iteritems():
                new_args = new_args + "%s='%s' " % (k, v)
            module_args = new_args

        module_name = utils.template(self.basedir, module_name, inject)
        module_args = utils.template(self.basedir, module_args, inject)
        complex_args = utils.template(self.basedir, complex_args, inject)

        if module_name in utils.plugins.action_loader:
            if self.background != 0:
                raise errors.AnsibleError(
                    "async mode is not supported with the %s module" %
                    module_name)
            handler = utils.plugins.action_loader.get(module_name, self)
        elif self.background == 0:
            handler = utils.plugins.action_loader.get('normal', self)
        else:
            handler = utils.plugins.action_loader.get('async', self)

        conditional = utils.template(self.basedir,
                                     self.conditional,
                                     inject,
                                     expand_lists=False)
        if not getattr(handler, 'BYPASS_HOST_LOOP',
                       False) and not utils.check_conditional(conditional):
            result = utils.jsonify(dict(skipped=True))
            self.callbacks.on_skipped(host, inject.get('item', None))
            return ReturnData(host=host, result=result)

        conn = None
        actual_host = inject.get('ansible_ssh_host', host)
        actual_port = port
        actual_user = inject.get('ansible_ssh_user', self.remote_user)
        actual_pass = inject.get('ansible_ssh_pass', self.remote_pass)
        actual_transport = inject.get('ansible_connection', self.transport)
        if actual_transport in ['paramiko', 'ssh']:
            actual_port = inject.get('ansible_ssh_port', port)

        # the delegated host may have different SSH port configured, etc
        # and we need to transfer those, and only those, variables
        delegate_to = inject.get('delegate_to', None)
        if delegate_to is not None:
            delegate_to = utils.template(self.basedir, delegate_to, inject)
            inject = inject.copy()
            interpreters = []
            for i in inject:
                if i.startswith("ansible_") and i.endswith("_interpreter"):
                    interpreters.append(i)
            for i in interpreters:
                del inject[i]
            port = C.DEFAULT_REMOTE_PORT
            try:
                delegate_info = inject['hostvars'][delegate_to]
                actual_host = delegate_info.get('ansible_ssh_host',
                                                delegate_to)
                actual_port = delegate_info.get('ansible_ssh_port', port)
                actual_user = delegate_info.get('ansible_ssh_user',
                                                actual_user)
                actual_pass = delegate_info.get('ansible_ssh_pass',
                                                actual_pass)
                actual_transport = delegate_info.get('ansible_connection',
                                                     self.transport)
                for i in delegate_info:
                    if i.startswith("ansible_") and i.endswith("_interpreter"):
                        inject[i] = delegate_info[i]
            except errors.AnsibleError:
                actual_host = delegate_to
                actual_port = port

        actual_user = utils.template(self.basedir, actual_user, inject)
        actual_pass = utils.template(self.basedir, actual_pass, inject)

        try:
            if actual_port is not None:
                actual_port = int(actual_port)
        except ValueError, e:
            result = dict(
                failed=True,
                msg=
                "FAILED: Configured port \"%s\" is not a valid port, expected integer"
                % actual_port)
            return ReturnData(host=host, comm_ok=False, result=result)
Esempio n. 47
0
    def _run_play(self, pg):
        ''' run a list of tasks for a given pattern, in order '''

        # get configuration information about the pattern
        pattern = pg.get('hosts')
        if pattern is None:
            raise errors.AnsibleError('hosts declaration is required')
        if isinstance(pattern, list):
            pattern = ';'.join(pattern)
        pattern = utils.template(pattern, self.extra_vars, {})
        name = pg.get('name', pattern)

        vars = self._get_vars(pg, self.basedir)
        vars_files = pg.get('vars_files', {})
        tasks = pg.get('tasks', [])
        handlers = pg.get('handlers', [])
        user = pg.get('user', self.remote_user)
        port = pg.get('port', self.remote_port)
        sudo = pg.get('sudo', self.sudo)
        sudo_user = pg.get('sudo_user', self.sudo_user)
        transport = pg.get('connection', self.transport)

        # the default sudo user is root, so if you change it, sudo is implied
        if sudo_user != 'root':
            sudo = True

        self.callbacks.on_play_start(name)

        # push any variables down to the system # and get facts/ohai/other data back up
        self._do_setup_step(pattern, vars, user, port, sudo, sudo_user,
                            transport, None)

        # now with that data, handle contentional variable file imports!
        if len(vars_files) > 0:
            self._do_setup_step(pattern, vars, user, port, sudo, sudo_user,
                                transport, vars_files)

        # run all the top level tasks, these get run on every node
        for task in tasks:
            self._run_task(pattern=pattern,
                           task=task,
                           handlers=handlers,
                           remote_user=user,
                           sudo=sudo,
                           sudo_user=sudo_user,
                           transport=transport,
                           port=port)

        # handlers only run on certain nodes, they are flagged by _flag_handlers
        # above.  They only run on nodes when things mark them as changed, and
        # handlers only get run once.  For instance, the system is designed
        # such that multiple config files if changed can ask for an Apache restart
        # but Apache will only be restarted once (at the end).

        for task in handlers:
            triggered_by = task.get('run', None)
            if type(triggered_by) == list:
                self.inventory.restrict_to(triggered_by)
                self._run_task(pattern=pattern,
                               task=task,
                               handlers=[],
                               conditional=True,
                               remote_user=user,
                               sudo=sudo,
                               sudo_user=sudo_user,
                               transport=transport,
                               port=port)
                self.inventory.lift_restriction()
Esempio n. 48
0
    def __init__(self, play, ds, module_vars=None, additional_conditions=None):
        ''' constructor loads from a task or handler datastructure '''

        for x in ds.keys():

            # code to allow for saying "modulename: args" versus "action: modulename args"
            if x in utils.plugins.module_finder:
                if 'action' in ds:
                    raise errors.AnsibleError(
                        "multiple actions specified in task %s" %
                        (ds.get('name', ds['action'])))
                if isinstance(ds[x], dict):
                    if 'args' in ds:
                        raise errors.AnsibleError(
                            "can't combine args: and a dict for %s: in task %s"
                            % (x, ds.get('name', "%s: %s" % (x, ds[x]))))
                    ds['args'] = ds[x]
                    ds[x] = ''
                elif ds[x] is None:
                    ds[x] = ''
                if not isinstance(ds[x], basestring):
                    raise errors.AnsibleError(
                        "action specified for task %s has invalid type %s" %
                        (ds.get('name', "%s: %s" % (x, ds[x])), type(ds[x])))
                ds['action'] = x + " " + ds[x]
                ds.pop(x)

            # code to allow "with_glob" and to reference a lookup plugin named glob
            elif x.startswith("with_"):
                plugin_name = x.replace("with_", "")
                if plugin_name in utils.plugins.lookup_loader:
                    ds['items_lookup_plugin'] = plugin_name
                    ds['items_lookup_terms'] = ds[x]
                    ds.pop(x)
                else:
                    raise errors.AnsibleError(
                        "cannot find lookup plugin named %s for usage in with_%s"
                        % (plugin_name, plugin_name))

            elif x.startswith("when_"):
                if 'when' in ds:
                    raise errors.AnsibleError(
                        "multiple when_* statements specified in task %s" %
                        (ds.get('name', ds['action'])))
                when_name = x.replace("when_", "")
                ds['when'] = "%s %s" % (when_name, ds[x])
                ds.pop(x)

            elif not x in Task.VALID_KEYS:
                raise errors.AnsibleError(
                    "%s is not a legal parameter in an Ansible task or handler"
                    % x)

        self.module_vars = module_vars
        self.play = play

        # load various attributes
        self.name = ds.get('name', None)
        self.tags = ['all']
        self.register = ds.get('register', None)
        self.sudo = utils.boolean(ds.get('sudo', play.sudo))
        self.environment = ds.get('environment', {})

        # rather than simple key=value args on the options line, these represent structured data and the values
        # can be hashes and lists, not just scalars
        self.args = ds.get('args', {})

        if self.sudo:
            self.sudo_user = utils.template(
                play.basedir, ds.get('sudo_user', play.sudo_user), play.vars)
            self.sudo_pass = ds.get('sudo_pass', play.playbook.sudo_pass)
        else:
            self.sudo_user = None
            self.sudo_pass = None

        # Both are defined
        if ('action' in ds) and ('local_action' in ds):
            raise errors.AnsibleError(
                "the 'action' and 'local_action' attributes can not be used together"
            )
        # Both are NOT defined
        elif (not 'action' in ds) and (not 'local_action' in ds):
            raise errors.AnsibleError(
                "'action' or 'local_action' attribute missing in task \"%s\"" %
                ds.get('name', '<Unnamed>'))
        # Only one of them is defined
        elif 'local_action' in ds:
            self.action = ds.get('local_action', '')
            self.delegate_to = '127.0.0.1'
        else:
            self.action = ds.get('action', '')
            self.delegate_to = ds.get('delegate_to', None)
            self.transport = ds.get('connection',
                                    ds.get('transport', play.transport))

        if isinstance(self.action, dict):
            if 'module' not in self.action:
                raise errors.AnsibleError(
                    "'module' attribute missing from action in task \"%s\"" %
                    ds.get('name', '%s' % self.action))
            if self.args:
                raise errors.AnsibleError(
                    "'args' cannot be combined with dict 'action' in task \"%s\""
                    % ds.get('name', '%s' % self.action))
            self.args = self.action
            self.action = self.args.pop('module')

        # delegate_to can use variables
        if not (self.delegate_to is None):
            # delegate_to: localhost should use local transport
            if self.delegate_to in ['127.0.0.1', 'localhost']:
                self.transport = 'local'

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.only_if = ds.get('only_if', 'True')
        self.when = ds.get('when', None)

        self.async_seconds = int(ds.get('async', 0))  # not async by default
        self.async_poll_interval = int(ds.get('poll',
                                              10))  # default poll = 10 seconds
        self.notify = ds.get('notify', [])
        self.first_available_file = ds.get('first_available_file', None)

        self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
        self.items_lookup_terms = ds.get('items_lookup_terms', None)

        self.ignore_errors = ds.get('ignore_errors', False)

        # action should be a string
        if not isinstance(self.action, basestring):
            raise errors.AnsibleError(
                "action is of type '%s' and not a string in task. name: %s" %
                (type(self.action).__name__, self.name))

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [self.notify]

        # split the action line into a module name + arguments
        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError(
                "invalid/missing action in task. name: %s" % self.name)
        self.module_name = tokens[0]
        self.module_args = ''
        if len(tokens) > 1:
            self.module_args = tokens[1]

        import_tags = self.module_vars.get('tags', [])
        if type(import_tags) in [str, unicode]:
            # allow the user to list comma delimited tags
            import_tags = import_tags.split(",")

        # handle mutually incompatible options
        incompatibles = [
            x for x in [self.first_available_file, self.items_lookup_plugin]
            if x is not None
        ]
        if len(incompatibles) > 1:
            raise errors.AnsibleError(
                "with_(plugin), and first_available_file are mutually incompatible in a single task"
            )

        # make first_available_file accessable to Runner code
        if self.first_available_file:
            self.module_vars[
                'first_available_file'] = self.first_available_file

        if self.items_lookup_plugin is not None:
            self.module_vars['items_lookup_plugin'] = self.items_lookup_plugin
            self.module_vars['items_lookup_terms'] = self.items_lookup_terms

        # allow runner to see delegate_to option
        self.module_vars['delegate_to'] = self.delegate_to

        # make ignore_errors accessable to Runner code
        self.module_vars['ignore_errors'] = self.ignore_errors

        # tags allow certain parts of a playbook to be run without running the whole playbook
        apply_tags = ds.get('tags', None)
        if apply_tags is not None:
            if type(apply_tags) in [str, unicode]:
                self.tags.append(apply_tags)
            elif type(apply_tags) == list:
                self.tags.extend(apply_tags)
        self.tags.extend(import_tags)

        if self.when is not None:
            if self.only_if != 'True':
                raise errors.AnsibleError(
                    'when obsoletes only_if, only use one or the other')
            self.only_if = utils.compile_when_to_only_if(self.when)

        if additional_conditions:
            self.only_if = '(' + self.only_if + ') and (' + ' ) and ('.join(
                additional_conditions) + ')'
Esempio n. 49
0
    def _executor_internal(self, host):
        ''' executes any module one or more times '''

        host_variables = self.inventory.get_variables(host)
        host_connection = host_variables.get('ansible_connection',
                                             self.transport)
        if host_connection in ['paramiko', 'ssh']:
            port = host_variables.get('ansible_ssh_port', self.remote_port)
            if port is None:
                port = C.DEFAULT_REMOTE_PORT
        else:
            # fireball, local, etc
            port = self.remote_port

        inject = {}
        inject.update(host_variables)
        inject.update(self.module_vars)
        inject.update(self.setup_cache[host])
        inject['hostvars'] = HostVars(self.setup_cache, self.inventory)
        inject['group_names'] = host_variables.get('group_names', [])
        inject['groups'] = self.inventory.groups_list()
        if self.inventory.basedir() is not None:
            inject['inventory_dir'] = self.inventory.basedir()

        # allow with_foo to work in playbooks...
        items = None
        items_plugin = self.module_vars.get('items_lookup_plugin', None)
        if items_plugin is not None and items_plugin in utils.plugins.lookup_loader:
            items_terms = self.module_vars.get('items_lookup_terms', '')
            items_terms = utils.template(self.basedir, items_terms, inject)
            items = utils.plugins.lookup_loader.get(items_plugin,
                                                    runner=self,
                                                    basedir=self.basedir).run(
                                                        items_terms,
                                                        inject=inject)
            if type(items) != list:
                raise errors.AnsibleError(
                    "lookup plugins have to return a list: %r" % items)

            if len(items) and utils.is_list_of_strings(
                    items) and self.module_name in ['apt', 'yum']:
                # hack for apt and soon yum, with_items maps back into a single module call
                inject['item'] = ",".join(items)
                items = None

        # logic to decide how to run things depends on whether with_items is used

        if items is None:
            return self._executor_internal_inner(
                host,
                self.module_name,
                self.module_args,
                inject,
                port,
                complex_args=self.complex_args)
        elif len(items) > 0:
            # executing using with_items, so make multiple calls
            # TODO: refactor
            aggregrate = {}
            all_comm_ok = True
            all_changed = False
            all_failed = False
            results = []
            for x in items:
                inject['item'] = x
                result = self._executor_internal_inner(
                    host,
                    self.module_name,
                    self.module_args,
                    inject,
                    port,
                    complex_args=self.complex_args)
                results.append(result.result)
                if result.comm_ok == False:
                    all_comm_ok = False
                    all_failed = True
                    break
                for x in results:
                    if x.get('changed') == True:
                        all_changed = True
                    if (x.get('failed') == True) or (('rc' in x) and
                                                     (x['rc'] != 0)):
                        all_failed = True
                        break
            msg = 'All items completed'
            if all_failed:
                msg = "One or more items failed."
            rd_result = dict(failed=all_failed,
                             changed=all_changed,
                             results=results,
                             msg=msg)
            if not all_failed:
                del rd_result['failed']
            return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
        else:
            self.callbacks.on_skipped(host, None)
            return ReturnData(host=host,
                              comm_ok=True,
                              result=dict(skipped=True))
Esempio n. 50
0
    def _executor_internal_inner(self,
                                 host,
                                 module_name,
                                 module_args,
                                 inject,
                                 port,
                                 is_chained=False):
        ''' decides how to invoke a module '''

        # special non-user/non-fact variables:
        # 'groups' variable is a list of host name in each group
        # 'hostvars' variable contains variables for each host name
        #  ... and is set elsewhere
        # 'inventory_hostname' is also set elsewhere
        inject['groups'] = self.inventory.groups_list()

        # allow module args to work as a dictionary
        # though it is usually a string
        new_args = ""
        if type(module_args) == dict:
            for (k, v) in module_args.iteritems():
                new_args = new_args + "%s='%s' " % (k, v)
            module_args = new_args

        conditional = utils.template(self.basedir, self.conditional, inject)
        if not utils.check_conditional(conditional):
            result = utils.jsonify(dict(skipped=True))
            self.callbacks.on_skipped(host, inject.get('item', None))
            return ReturnData(host=host, result=result)

        conn = None
        actual_host = inject.get('ansible_ssh_host', host)
        actual_port = port
        if self.transport in ['paramiko', 'ssh']:
            actual_port = inject.get('ansible_ssh_port', port)

        # the delegated host may have different SSH port configured, etc
        # and we need to transfer those, and only those, variables
        delegate_to = inject.get('delegate_to', None)
        if delegate_to is not None:
            delegate_to = utils.template(self.basedir, delegate_to, inject)
            inject = inject.copy()
            interpreters = []
            for i in inject:
                if i.startswith("ansible_") and i.endswith("_interpreter"):
                    interpreters.append(i)
            for i in interpreters:
                del inject[i]
            port = C.DEFAULT_REMOTE_PORT
            try:
                delegate_info = inject['hostvars'][delegate_to]
                actual_host = delegate_info.get('ansible_ssh_host',
                                                delegate_to)
                actual_port = delegate_info.get('ansible_ssh_port', port)
                for i in delegate_info:
                    if i.startswith("ansible_") and i.endswith("_interpreter"):
                        inject[i] = delegate_info[i]
            except errors.AnsibleError:
                actual_host = delegate_to
                actual_port = port

        try:
            if actual_port is not None:
                actual_port = int(actual_port)
            conn = self.connector.connect(actual_host, actual_port)
            if delegate_to or host != actual_host:
                conn.delegate = host

        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)
Esempio n. 51
0
            except errors.AnsibleError:
                actual_host = delegate_to
                actual_port = port

        try:
            if actual_port is not None:
                actual_port = int(actual_port)
            conn = self.connector.connect(actual_host, actual_port)
            if delegate_to or host != actual_host:
                conn.delegate = host

        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)

        module_name = utils.template(self.basedir, module_name, inject)
        module_args = utils.template(self.basedir,
                                     module_args,
                                     inject,
                                     expand_lists=True)

        tmp = ''
        if self.module_name != 'raw':
            tmp = self._make_tmp_path(conn)
        result = None

        if module_name in utils.plugins.action_loader:
            if self.background != 0:
                raise errors.AnsibleError(
                    "async mode is not supported with the %s module" %
                    module_name)
Esempio n. 52
0
    def run(self, conn, tmp, module_name, module_args, inject):
        ''' handler for fetch operations '''

        # load up options
        options = utils.parse_kv(module_args)
        source = options.get('src', None)
        dest = options.get('dest', None)
        if source is None or dest is None:
            results = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, result=results)

        # apply templating to source argument
        source = utils.template(self.runner.basedir, source, inject)
        # apply templating to dest argument
        dest = utils.template(self.runner.basedir, dest, inject)

        # files are saved in dest dir, with a subdir for each host, then the filename
        dest = "%s/%s/%s" % (utils.path_dwim(self.runner.basedir,
                                             dest), conn.host, source)
        dest = dest.replace("//", "/")

        # calculate md5 sum for the remote file
        remote_md5 = self.runner._remote_md5(conn, tmp, source)

        # these don't fail because you may want to transfer a log file that possibly MAY exist
        # but keep going to fetch other log files
        if remote_md5 == '0':
            result = dict(
                msg="unable to calculate the md5 sum of the remote file",
                file=source,
                changed=False)
            return ReturnData(conn=conn, result=result)
        if remote_md5 == '1':
            result = dict(
                msg="the remote file does not exist, not transferring, ignored",
                file=source,
                changed=False)
            return ReturnData(conn=conn, result=result)
        if remote_md5 == '2':
            result = dict(
                msg=
                "no read permission on remote file, not transferring, ignored",
                file=source,
                changed=False)
            return ReturnData(conn=conn, result=result)

        # calculate md5 sum for the local file
        local_md5 = utils.md5(dest)

        if remote_md5 != local_md5:
            # create the containing directories, if needed
            if not os.path.isdir(os.path.dirname(dest)):
                os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            conn.fetch_file(source, dest)
            new_md5 = utils.md5(dest)
            if new_md5 != remote_md5:
                result = dict(failed=True,
                              md5sum=new_md5,
                              msg="md5 mismatch",
                              file=source)
                return ReturnData(conn=conn, result=result)
            result = dict(changed=True, md5sum=new_md5)
            return ReturnData(conn=conn, result=result)
        else:
            result = dict(changed=False, md5sum=local_md5, file=source)
            return ReturnData(conn=conn, result=result)
Esempio n. 53
0
        actual_host = host
        try:
            delegate_to = inject.get('delegate_to', None)
            alternative_host = inject.get('ansible_ssh_host', None)
            if delegate_to is not None:
                actual_host = delegate_to
            elif alternative_host is not None:
                actual_host = alternative_host
            conn = self.connector.connect(actual_host, port)
            if delegate_to is not None or alternative_host is not None:
                conn._delegate_for = host
        except errors.AnsibleConnectionFailed, e:
            result = dict(failed=True, msg="FAILED: %s" % str(e))
            return ReturnData(host=host, comm_ok=False, result=result)

        module_name = utils.template(self.basedir, module_name, inject)
        module_args = utils.template(self.basedir, module_args, inject)

        tmp = ''
        if self.module_name != 'raw':
            tmp = self._make_tmp_path(conn)
        result = None

        handler = self.action_plugins.get(module_name, None)
        if handler:
            if self.background != 0:
                raise errors.AnsibleError("async mode is not supported with the %s module" % module_name)
            result = handler.run(conn, tmp, module_name, module_args, inject)
        else:
            if self.background == 0:
                result = self.action_plugins['normal'].run(conn, tmp, module_name, module_args, inject)
        inject['group_names'] = host_variables.get('group_names', [])
        inject['groups'] = self.inventory.groups_list()
<<<<<<< HEAD
        inject['vars'] = self.module_vars
        inject['environment'] = self.environment
=======
        if self.inventory.basedir() is not None:
            inject['inventory_dir'] = self.inventory.basedir()
>>>>>>> remote

        # allow with_foo to work in playbooks...
        items = None
        items_plugin = self.module_vars.get('items_lookup_plugin', None)
        if items_plugin is not None and items_plugin in utils.plugins.lookup_loader:
            items_terms = self.module_vars.get('items_lookup_terms', '')
            items_terms = utils.template(self.basedir, items_terms, inject)
            items = utils.plugins.lookup_loader.get(items_plugin, runner=self, basedir=self.basedir).run(items_terms, inject=inject)
            if type(items) != list:
                raise errors.AnsibleError("lookup plugins have to return a list: %r" % items)

            if len(items) and utils.is_list_of_strings(items) and self.module_name in [ 'apt', 'yum' ]:
                # hack for apt and soon yum, with_items maps back into a single module call
                inject['item'] = ",".join(items)
                items = None

        # logic to decide how to run things depends on whether with_items is used

        if items is None:
            return self._executor_internal_inner(host, self.module_name, self.module_args, inject, port, complex_args=self.complex_args)
        elif len(items) > 0:
            # executing using with_items, so make multiple calls
Esempio n. 55
0
        inject = self.setup_cache.get(conn.host,{})
        conditional = utils.double_template(self.conditional, inject)
        if not eval(conditional):
            return [ utils.smjson(dict(skipped=True)), None, 'skipped' ]

        host_variables = self.inventory.get_variables(conn.host, self.extra_vars)
        inject.update(host_variables)

        if self.module_name == 'setup':
            args = self._add_setup_vars(inject, args)
            args = self._add_setup_metadata(args)

        if type(args) == dict:
           args = utils.bigjson(args)
        args = utils.template(args, inject)

        module_name_tail = remote_module_path.split("/")[-1]

        argsfile = self._transfer_str(conn, tmp, 'arguments', args)
        if async_jid is None:
            cmd = "%s %s" % (remote_module_path, argsfile)
        else:
            cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])

        res, err = self._exec_command(conn, cmd, tmp, sudoable=True)
        client_executed_str = "%s %s" % (module_name_tail, args.strip())
        return ( res, err, client_executed_str )

    # *****************************************************
Esempio n. 56
0
    def _load_playbook_from_file(self, path, vars={}):
        '''
        run top level error checking on playbooks and allow them to include other playbooks.
        '''

        playbook_data = utils.parse_yaml_from_file(path)
        accumulated_plays = []
        play_basedirs = []

        if type(playbook_data) != list:
            raise errors.AnsibleError(
                "parse error: playbooks must be formatted as a YAML list")

        basedir = os.path.dirname(path) or '.'
        utils.plugins.push_basedir(basedir)
        for play in playbook_data:
            if type(play) != dict:
                raise errors.AnsibleError(
                    "parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s"
                    % play)
            if 'include' in play:
                tokens = shlex.split(play['include'])

                items = ['']
                for k in play.keys():
                    if not k.startswith("with_"):
                        # These are the keys allowed to be mixed with playbook includes
                        if k in ("include", "vars"):
                            continue
                        else:
                            raise errors.AnsibleError(
                                "parse error: playbook includes cannot be used with other directives: %s"
                                % play)
                    plugin_name = k[5:]
                    if plugin_name not in utils.plugins.lookup_loader:
                        raise errors.AnsibleError(
                            "cannot find lookup plugin named %s for usage in with_%s"
                            % (plugin_name, plugin_name))
                    terms = utils.template(basedir, play[k], vars)
                    items = utils.plugins.lookup_loader.get(plugin_name,
                                                            basedir=basedir,
                                                            runner=None).run(
                                                                terms,
                                                                inject=vars)

                for item in items:
                    incvars = vars.copy()
                    incvars['item'] = item
                    if 'vars' in play:
                        if isinstance(play['vars'], dict):
                            incvars.update(play['vars'])
                        elif isinstance(play['vars'], list):
                            for v in play['vars']:
                                incvars.update(v)
                    for t in tokens[1:]:
                        (k, v) = t.split("=", 1)
                        incvars[k] = utils.template(basedir, v, incvars)
                    included_path = utils.path_dwim(basedir, tokens[0])
                    (plays, basedirs) = self._load_playbook_from_file(
                        included_path, incvars)
                    for p in plays:
                        if 'vars' not in p:
                            p['vars'] = {}
                        if isinstance(p['vars'], dict):
                            p['vars'].update(incvars)
                        elif isinstance(p['vars'], list):
                            p['vars'].extend(
                                [dict(k=v) for k, v in incvars.iteritems()])
                    accumulated_plays.extend(plays)
                    play_basedirs.extend(basedirs)
            else:
                accumulated_plays.append(play)
                play_basedirs.append(basedir)

        return (accumulated_plays, play_basedirs)