Ejemplo n.º 1
0
    def run(self,
            conn,
            tmp_path,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source = options.get('src', None)
        content = options.get('content', None)
        dest = options.get('dest', None)
        raw = utils.boolean(options.get('raw', 'no'))
        force = utils.boolean(options.get('force', 'yes'))

        if (source is None and content is None
                and not 'first_available_file' in inject) or dest is None:
            result = dict(failed=True,
                          msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)
        elif (source is not None
              or 'first_available_file' in inject) and content is not None:
            result = dict(failed=True,
                          msg="src and content are mutually exclusive")
            return ReturnData(conn=conn, result=result)

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = source.endswith("/")

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if type(content) is dict:
                    content_tempfile = self._create_content_tempfile(
                        json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception, err:
                result = dict(failed=True,
                              msg="could not write content temp file: %s" %
                              err)
                return ReturnData(conn=conn, result=result)
Ejemplo n.º 2
0
Archivo: copy.py Proyecto: jmjf/ansible
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source  = options.get('src', None)
        content = options.get('content', None)
        dest    = options.get('dest', None)
        raw     = utils.boolean(options.get('raw', 'no'))
        force   = utils.boolean(options.get('force', 'yes'))

        if (source is None and content is None and not 'first_available_file' in inject) or dest is None:
            result=dict(failed=True, msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)
        elif (source is not None or 'first_available_file' in inject) and content is not None:
            result=dict(failed=True, msg="src and content are mutually exclusive")
            return ReturnData(conn=conn, result=result)

        source_trailing_slash = False
        if source:
            source_trailing_slash = source.endswith("/")

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if 'first_available_file' in inject:
            found = False
            for fn in inject.get('first_available_file'):
                fn_orig = fn
                fnt = template.template(self.runner.basedir, fn, inject)
                fnd = utils.path_dwim(self.runner.basedir, fnt)
                if not os.path.exists(fnd) and '_original_file' in inject:
                    fnd = utils.path_dwim_relative(inject['_original_file'], 'files', fnt, self.runner.basedir, check=False)
                if os.path.exists(fnd):
                    source = fnd
                    found = True
                    break
            if not found:
                results=dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(conn=conn, result=results)
        elif content is not None:
            fd, tmp_content = tempfile.mkstemp()
            f = os.fdopen(fd, 'w')
            try:
                f.write(content)
            except Exception, err:
                os.remove(tmp_content)
                result = dict(failed=True, msg="could not write content temp file: %s" % err)
                return ReturnData(conn=conn, result=result)
            f.close()
            source = tmp_content
Ejemplo n.º 3
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source  = options.get('src', None)
        content = options.get('content', None)
        dest    = options.get('dest', None)
        raw     = utils.boolean(options.get('raw', 'no'))
        force   = utils.boolean(options.get('force', 'yes'))

        if (source is None and content is None and not 'first_available_file' in inject) or dest is None:
            result=dict(failed=True, msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)
        elif (source is not None or 'first_available_file' in inject) and content is not None:
            result=dict(failed=True, msg="src and content are mutually exclusive")
            return ReturnData(conn=conn, result=result)

        source_trailing_slash = False
        if source:
            source_trailing_slash = source.endswith("/")

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        if 'first_available_file' in inject:
            found = False
            for fn in inject.get('first_available_file'):
                fn_orig = fn
                fnt = template.template(self.runner.basedir, fn, inject)
                fnd = utils.path_dwim(self.runner.basedir, fnt)
                if not os.path.exists(fnd) and '_original_file' in inject:
                    fnd = utils.path_dwim_relative(inject['_original_file'], 'files', fnt, self.runner.basedir, check=False)
                if os.path.exists(fnd):
                    source = fnd
                    found = True
                    break
            if not found:
                results=dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(conn=conn, result=results)
        elif content is not None:
            fd, tmp_content = tempfile.mkstemp()
            f = os.fdopen(fd, 'w')
            try:
                f.write(content)
            except Exception, err:
                os.remove(tmp_content)
                result = dict(failed=True, msg="could not write content temp file: %s" % err)
                return ReturnData(conn=conn, result=result)
            f.close()
            source = tmp_content
Ejemplo n.º 4
0
    def run(self, conn, tmp_path, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source  = options.get('src', None)
        content = options.get('content', None)
        dest    = options.get('dest', None)
        raw     = utils.boolean(options.get('raw', 'no'))
        force   = utils.boolean(options.get('force', 'yes'))

        # content with newlines is going to be escaped to safely load in yaml
        # now we need to unescape it so that the newlines are evaluated properly
        # when writing the file to disk
        if content:
            if isinstance(content, unicode):
                try:
                    content = content.decode('unicode-escape')
                except UnicodeDecodeError:
                    pass

        if (source is None and content is None and not 'first_available_file' in inject) or dest is None:
            result=dict(failed=True, msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)
        elif (source is not None or 'first_available_file' in inject) and content is not None:
            result=dict(failed=True, msg="src and content are mutually exclusive")
            return ReturnData(conn=conn, result=result)

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = source.endswith("/")

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if type(content) is dict:
                    content_tempfile = self._create_content_tempfile(json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception, err:
                result = dict(failed=True, msg="could not write content temp file: %s" % err)
                return ReturnData(conn=conn, result=result)
Ejemplo n.º 5
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        args = {}
        if complex_args:
            args.update(complex_args)

        # attempt to prevent confusing messages when the variable didn't interpolate
        module_args = module_args.replace("{{ ","{{").replace(" }}","}}")

        kv = utils.parse_kv(module_args)
        args.update(kv)

        if not 'msg' in args and not 'var' in args:
            args['msg'] = 'Hello world!'

        result = {}
        if 'msg' in args:
            if 'fail' in args and utils.boolean(args['fail']):
                result = dict(failed=True, msg=args['msg'])
            else:
                result = dict(msg=args['msg'])
        elif 'var' in args:
            results = template.template(None, "{{ %s }}" % args['var'], inject)
            result[args['var']] = results

        # force flag to make debug output module always verbose
        result['verbose_always'] = True

        return ReturnData(conn=conn, result=result)
Ejemplo n.º 6
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' Handler for file load and template operations. '''

        options = self._load_options(module_args, complex_args)
        source = options.get('src', None)
        remote_src = utils.boolean(options.get('remote_src', False))

        if not remote_src:
            if source.endswith('.j2'):
                filepath = self._resolve_file_path(source, 'templates', inject)
                content = template.template_from_file(self.runner.basedir,
                        filepath, inject, vault_password=self.runner.vault_pass)
            else:
                filepath = self._resolve_file_path(source, 'files', inject)
                with open(filepath, 'r') as f:
                    content = f.read()

            module_args = "%s content=%s" % (module_args, pipes.quote(content))

        # propagate checkmode to module
        if self.runner.noop_on_check(inject):
            module_args += " CHECKMODE=True"

        return self.runner._execute_module(conn, tmp, 'postgresql_exec', module_args,
                inject=inject, complex_args=complex_args)
Ejemplo n.º 7
0
    def run(self,
            conn,
            tmp,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):
        args = {}
        if complex_args:
            args.update(complex_args)

        # attempt to prevent confusing messages when the variable didn't interpolate
        module_args = module_args.replace("{{ ", "{{").replace(" }}", "}}")

        kv = utils.parse_kv(module_args)
        args.update(kv)
        if not 'msg' in args:
            args['msg'] = 'Hello world!'

        if 'fail' in args and utils.boolean(args['fail']):
            result = dict(failed=True, msg=args['msg'])
        else:
            result = dict(msg=args['msg'])

        return ReturnData(conn=conn, result=result)
Ejemplo n.º 8
0
    def run(self, terms, inject=None, **kwargs):

        terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) 

        result = None
        anydict = False
        skip = False

        for term in terms:
            if isinstance(term, dict):
                anydict = True
        
        if anydict:
            for term in terms:
                if isinstance(term, dict):
                    files = term.get('files', [])
                    paths = term.get('paths', [])
                    skip  = utils.boolean(term.get('skip', False))
                
                    filelist = files
                    if isinstance(files, basestring):
                        files = files.replace(',', ' ')
                        files = files.replace(';', ' ')
                        filelist = files.split(' ')

                    pathlist = paths
                    if paths:
                        if isinstance(paths, basestring):
                            paths = paths.replace(',', ' ')
                            paths = paths.replace(':', ' ')
                            paths = paths.replace(';', ' ')
                            pathlist = paths.split(' ')
                        
                    total_search = []
                

                    if not pathlist:
                        total_search = filelist
                    else:
                        for path in pathlist:
                            for fn in filelist:
                                f = os.path.join(path, fn)
                                total_search.append(f)
                else:
                    total_search = [term]
        else:
            total_search = terms

        result = None
        for fn in total_search:
            path = utils.path_dwim(self.basedir, fn)
            if os.path.exists(path):
                return [path]


        if not result:
            if skip:
                return []
            else:
                return [None]
Ejemplo n.º 9
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' Handler for file load and template operations. '''

        options = self._load_options(module_args, complex_args)
        source = options.get('src', None)
        remote_src = utils.boolean(options.get('remote_src', False))

        if not remote_src:
            if source.endswith('.j2'):
                filepath = self._resolve_file_path(source, 'templates', inject)
                content = template.template_from_file(
                    self.runner.basedir, filepath, inject, vault_password=self.runner.vault_pass)
            else:
                filepath = self._resolve_file_path(source, 'files', inject)
                with open(filepath, 'r') as f:
                    content = f.read()

            module_args = "%s content=%s" % (module_args, pipes.quote(content))

        # propagate checkmode to module
        if self.runner.noop_on_check(inject):
            module_args += " CHECKMODE=True"

        return self.runner._execute_module(
            conn, tmp, 'postgresql_exec', module_args, inject=inject, complex_args=complex_args)
Ejemplo n.º 10
0
    def run(self,
            conn,
            tmp,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):

        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))

        src = options.get('src', None)
        dest = options.get('dest', None)
        remote_src = utils.boolean(options.get('remote_src', 'no'))

        if src is None:
            result = dict(failed=True, msg="src is required")
            return ReturnData(conn=conn, comm_ok=False, result=result)

        if remote_src:
            return self.runner._execute_module(conn,
                                               tmp,
                                               'patch',
                                               module_args,
                                               inject=inject,
                                               complex_args=complex_args)

        # Source is local
        if '_original_file' in inject:
            src = utils.path_dwim_relative(inject['_original_file'], 'files',
                                           src, self.runner.basedir)
        else:
            src = utils.path_dwim(self.runner.basedir, src)

        if tmp is None or "-tmp-" not in tmp:
            tmp = self.runner._make_tmp_path(conn)

        tmp_src = conn.shell.join_path(tmp, os.path.basename(src))
        conn.put_file(src, tmp_src)

        if self.runner.become and self.runner.become_user != 'root':
            if not self.runner.noop_on_check(inject):
                self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp)

        new_module_args = dict(src=tmp_src, )

        if self.runner.noop_on_check(inject):
            new_module_args['CHECKMODE'] = True

        module_args = utils.merge_module_args(module_args, new_module_args)

        return self.runner._execute_module(conn,
                                           tmp,
                                           'patch',
                                           module_args,
                                           inject=inject,
                                           complex_args=complex_args)
Ejemplo n.º 11
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        args = {}
        if complex_args:
            args.update(complex_args)

        # attempt to prevent confusing messages when the variable didn't interpolate
        module_args = module_args.replace("{{ ","{{").replace(" }}","}}")

        kv = utils.parse_kv(module_args)
        args.update(kv)

        if not 'msg' in args and not 'var' in args:
            args['msg'] = 'Hello world!'

        result = {}
        if 'msg' in args:
            if 'fail' in args and utils.boolean(args['fail']):
                result = dict(failed=True, msg=args['msg'])
            else:
                result = dict(msg=args['msg'])
        elif 'var' in args and not utils.LOOKUP_REGEX.search(args['var']):
            results = template.template(self.basedir, args['var'], inject, convert_bare=True)
            result[args['var']] = results

        # force flag to make debug output module always verbose
        result['verbose_always'] = True

        return ReturnData(conn=conn, result=result)
Ejemplo n.º 12
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):

        # load up options
        options  = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))

        src = options.get('src', None)
        dest = options.get('dest', None)
        delimiter = options.get('delimiter', None)
        remote_src = utils.boolean(options.get('remote_src', 'yes'))

        if src is None or dest is None:
            result = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, comm_ok=False, result=result)

        if remote_src:
            return self.runner._execute_module(conn, tmp, 'assemble', module_args, inject=inject, complex_args=complex_args)
        elif '_original_file' in inject:
            src = utils.path_dwim_relative(inject['_original_file'], 'files', src, self.runner.basedir)
        else:
            # the source is local, so expand it here
            src = os.path.expanduser(src)

        # Does all work assembling the file
        path = self._assemble_from_fragments(src, delimiter)

        pathmd5 = utils.md5s(path)
        remote_md5 = self.runner._remote_md5(conn, tmp, dest)

        if pathmd5 != remote_md5:
            resultant = file(path).read()
            if self.runner.diff:
                dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, inject=inject, persist_files=True)
                if 'content' in dest_result.result:
                    dest_contents = dest_result.result['content']
                    if dest_result.result['encoding'] == 'base64':
                        dest_contents = base64.b64decode(dest_contents)
                    else:
                        raise Exception("unknown encoding, failed: %s" % dest_result.result)
            xfered = self.runner._transfer_str(conn, tmp, 'src', resultant)

            # fix file permissions when the copy is done as a different user
            if self.runner.sudo and self.runner.sudo_user != 'root':
                self.runner._low_level_exec_command(conn, "chmod a+r %s" % xfered, tmp)

            # run the copy module
            module_args = "%s src=%s dest=%s original_basename=%s" % (module_args, pipes.quote(xfered), pipes.quote(dest), pipes.quote(os.path.basename(src)))

            if self.runner.noop_on_check(inject):
                return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=src, after=resultant))
            else:
                res = self.runner._execute_module(conn, tmp, 'copy', module_args, inject=inject)
                res.diff = dict(after=resultant)
                return res
        else:
            module_args = "%s src=%s dest=%s original_basename=%s" % (module_args, pipes.quote(xfered), pipes.quote(dest), pipes.quote(os.path.basename(src)))
            return self.runner._execute_module(conn, tmp, 'file', module_args, inject=inject)
Ejemplo n.º 13
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source  = options.get('src', None)
        dest    = options.get('dest', None)
        copy    = utils.boolean(options.get('copy', 'yes'))

        if source is None or dest is None:
            result = dict(failed=True, msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)

        dest = os.path.expanduser(dest) # CCTODO: Fix path for Windows hosts.
        source = template.template(self.runner.basedir, os.path.expanduser(source), inject)
        if copy:
            if '_original_file' in inject:
                source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir)
            else:
                source = utils.path_dwim(self.runner.basedir, source)

        remote_md5 = self.runner._remote_md5(conn, tmp, dest)
        if remote_md5 != '3':
            result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest)
            return ReturnData(conn=conn, result=result)

        if copy:
            # transfer the file to a remote tmp location
            tmp_src = tmp + 'source'
            conn.put_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side
        # fix file permissions when the copy is done as a different user
        if copy:
            if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root':
                if not self.runner.noop_on_check(inject):
                    self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp)
            # Build temporary module_args.
            new_module_args = dict(
                src=tmp_src,
                original_basename=os.path.basename(source),
            )

            # make sure checkmod is passed on correctly
            if self.runner.noop_on_check(inject):
                new_module_args['CHECKMODE'] = True

            module_args = utils.merge_module_args(module_args, new_module_args)
        else:
            module_args = "%s original_basename=%s" % (module_args, pipes.quote(os.path.basename(source)))
            # make sure checkmod is passed on correctly
            if self.runner.noop_on_check(inject):
                module_args += " CHECKMODE=True"
        return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
Ejemplo n.º 14
0
    def run(self, terms, inject=None, **kwargs):

        terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject)

        result = None
        anydict = False
        skip = False

        for term in terms:
            if isinstance(term, dict):
                anydict = True

        total_search = []
        if anydict:
            for term in terms:
                if isinstance(term, dict):
                    files = term.get('files', [])
                    paths = term.get('paths', [])
                    skip  = utils.boolean(term.get('skip', False))

                    filelist = files
                    if isinstance(files, basestring):
                        files = files.replace(',', ' ')
                        files = files.replace(';', ' ')
                        filelist = files.split(' ')

                    pathlist = paths
                    if paths:
                        if isinstance(paths, basestring):
                            paths = paths.replace(',', ' ')
                            paths = paths.replace(':', ' ')
                            paths = paths.replace(';', ' ')
                            pathlist = paths.split(' ')

                    if not pathlist:
                        total_search = filelist
                    else:
                        for path in pathlist:
                            for fn in filelist:
                                f = os.path.join(path, fn)
                                total_search.append(f)
                else:
                    total_search.append(term)
        else:
            total_search = terms

        result = None
        for fn in total_search:
            path = utils.path_dwim(self.basedir, fn)
            if os.path.exists(path):
                return [path]


        if not result:
            if skip:
                return []
            else:
                return [None]
Ejemplo n.º 15
0
def workaround_enabled(workarounds, *bugs, **kw):
    if not isinstance(workarounds, dict):
        return False

    for bug_id in bugs:
        enabled = workarounds.get(bug_id, {}).get('enabled', False)
        if not utils.boolean(enabled):
            return False
    return True
Ejemplo n.º 16
0
 def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
     # this number is arbitrary, but it seems sane
     if level > 20:
         raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
     for role in roles:
         role_path,role_vars = self._get_role_path(role)
         role_vars = utils.combine_vars(role_vars, passed_vars)
         vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
         vars_data = {}
         if os.path.isfile(vars):
             vars_data = utils.parse_yaml_from_file(vars)
             if vars_data:
                 role_vars = utils.combine_vars(vars_data, role_vars)
         defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')))
         defaults_data = {}
         if os.path.isfile(defaults):
             defaults_data = utils.parse_yaml_from_file(defaults)
         # the meta directory contains the yaml that should
         # hold the list of dependencies (if any)
         meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
         if os.path.isfile(meta):
             data = utils.parse_yaml_from_file(meta)
             if data:
                 dependencies = data.get('dependencies',[])
                 for dep in dependencies:
                     (dep_path,dep_vars) = self._get_role_path(dep)
                     meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta')))
                     if os.path.isfile(meta):
                         meta_data = utils.parse_yaml_from_file(meta)
                         if meta_data:
                             allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))
                             if not allow_dupes:
                                 if dep in self.included_roles:
                                     continue
                                 else:
                                     self.included_roles.append(dep)
                     dep_vars = utils.combine_vars(passed_vars, dep_vars)
                     dep_vars = utils.combine_vars(role_vars, dep_vars)
                     vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
                     vars_data = {}
                     if os.path.isfile(vars):
                         vars_data = utils.parse_yaml_from_file(vars)
                         if vars_data:
                             dep_vars = utils.combine_vars(vars_data, dep_vars)
                     defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults')))
                     dep_defaults_data = {}
                     if os.path.isfile(defaults):
                         dep_defaults_data = utils.parse_yaml_from_file(defaults)
                     if 'role' in dep_vars:
                         del dep_vars['role']
                     self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
                     dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data])
         # only add the current role when we're at the top level,
         # otherwise we'll end up in a recursive loop 
         if level == 0:
             dep_stack.append([role,role_path,role_vars,defaults_data])
     return dep_stack
Ejemplo n.º 17
0
def workaround_enabled(workarounds, *bugs, **kw):
    if not isinstance(workarounds, dict):
        return False

    for bug_id in bugs:
        enabled = workarounds.get(bug_id, {}).get('enabled', False)
        if not utils.boolean(enabled):
            return False
    return True
Ejemplo n.º 18
0
    def run(self, conn, tmp, module_name, module_args, inject):
        args = utils.parse_kv(module_args)
        if not 'msg' in args:
            args['msg'] = 'Hello world!'

        if 'fail' in args and utils.boolean(args['fail']):
            result = dict(failed=True, msg=args['msg'])
        else:
            result = dict(msg=str(args['msg']))

        return ReturnData(conn=conn, result=result)
Ejemplo n.º 19
0
    def run(self, conn, tmp, module_name, module_args, inject):
        args = utils.parse_kv(module_args)
        if not 'msg' in args:
            args['msg'] = 'Hello world!'

        if 'fail' in args and utils.boolean(args['fail']):
            result = dict(failed=True, msg=args['msg'])
        else:
            result = dict(msg=str(args['msg']))

        return ReturnData(conn=conn, result=result)
Ejemplo n.º 20
0
    def run(self, terms, variables=None, **kwargs):
        anydict = False
        skip = False

        for term in terms:
            if isinstance(term, dict):
                anydict = True
                total_search = []

        if anydict:
            for term in terms:
                if isinstance(term, dict):
                    files = term.get("files", [])
                    paths = term.get("paths", [])
                    skip = utils.boolean(term.get("skip", False))

                    filelist = files
                    if isinstance(files, basestring):
                        files = files.replace(",", " ")
                        files = files.replace(";", " ")
                        filelist = files.split(" ")

                    pathlist = paths
                    if paths:
                        if isinstance(paths, basestring):
                            paths = paths.replace(",", " ")
                            paths = paths.replace(":", " ")
                            paths = paths.replace(";", " ")
                            pathlist = paths.split(" ")

                    if not pathlist:
                        total_search = filelist
                    else:
                        for path in pathlist:
                            for fn in filelist:
                                f = os.path.join(path, fn)
                                total_search.append(f)
                else:
                    total_search.append(term)
        else:
            total_search = terms

        paths = self.get_paths(variables)
        for fn in total_search:
            for path in paths:
                path = os.path.join(path, fn)
                if os.path.exists(path):
                    return [path]
        else:
            if skip:
                return []
            else:
                return [None]
Ejemplo n.º 21
0
    def run(self, terms, variables=None, **kwargs):
        anydict = False
        skip = False

        for term in terms:
            if isinstance(term, dict):
                anydict = True
                total_search = []

        if anydict:
            for term in terms:
                if isinstance(term, dict):
                    files = term.get('files', [])
                    paths = term.get('paths', [])
                    skip = utils.boolean(term.get('skip', False))

                    filelist = files
                    if isinstance(files, str):
                        files = files.replace(',', ' ')
                        files = files.replace(';', ' ')
                        filelist = files.split(' ')

                    pathlist = paths
                    if paths:
                        if isinstance(paths, str):
                            paths = paths.replace(',', ' ')
                            paths = paths.replace(':', ' ')
                            paths = paths.replace(';', ' ')
                            pathlist = paths.split(' ')

                    if not pathlist:
                        total_search = filelist
                    else:
                        for path in pathlist:
                            for fn in filelist:
                                f = os.path.join(path, fn)
                                total_search.append(f)
                else:
                    total_search.append(term)
        else:
            total_search = terms

        paths = self.get_paths(variables)
        for fn in total_search:
            for path in paths:
                path = os.path.join(path, fn)
                if os.path.exists(path):
                    return [path]
        else:
            if skip:
                return []
            else:
                return [None]
Ejemplo n.º 22
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source  = options.get('src', None)
        dest    = options.get('dest', None)
        copy    = utils.boolean(options.get('copy', 'yes'))

        if source is None or dest is None:
            result = dict(failed=True, msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)

        dest = os.path.expanduser(dest) # CCTODO: Fix path for Windows hosts.
        source = template.template(self.runner.basedir, os.path.expanduser(source), inject)
        if copy:
            if '_original_file' in inject:
                source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir)
            else:
                source = utils.path_dwim(self.runner.basedir, source)

        remote_md5 = self.runner._remote_md5(conn, tmp, dest)
        if remote_md5 != '3':
            result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest)
            return ReturnData(conn=conn, result=result)

        if copy:
            # transfer the file to a remote tmp location
            tmp_src = tmp + 'source'
            conn.put_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side
        # fix file permissions when the copy is done as a different user
        if copy:
            if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root':
                self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp)
            # Build temporary module_args.
            new_module_args = dict(
                src=tmp_src,
                original_basename=os.path.basename(source),
            )
            module_args = utils.merge_module_args(module_args, new_module_args)
        else:
            module_args = "%s original_basename=%s" % (module_args, pipes.quote(os.path.basename(source)))
        return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
Ejemplo n.º 23
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):

        options  = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))

        src = options.get('src', None)
        dest = options.get('dest', None)
        remote_src = utils.boolean(options.get('remote_src', 'no'))

        if src is None:
            result = dict(failed=True, msg="src is required")
            return ReturnData(conn=conn, comm_ok=False, result=result)

        if remote_src:
            return self.runner._execute_module(conn, tmp, 'patch', module_args, inject=inject, complex_args=complex_args)

        # Source is local
        if '_original_file' in inject:
            src = utils.path_dwim_relative(inject['_original_file'], 'files', src, self.runner.basedir)
        else:
            src = utils.path_dwim(self.runner.basedir, src)

        tmp_path = self.runner._make_tmp_path(conn)
        tmp_src = tmp_path + 'patch_source'
        conn.put_file(src, tmp_src)

        if self.runner.become and self.runner.become_user != 'root':
            if not self.runner.noop_on_check(inject):
                self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp_path)

        new_module_args = dict(
            src=tmp_src,
        )

        if self.runner.noop_on_check(inject):
            new_module_args['CHECKMODE'] = True

        module_args = utils.merge_module_args(module_args, new_module_args)

        data = self.runner._execute_module(conn, tmp, 'patch', module_args, inject=inject, complex_args=complex_args)
        if not C.DEFAULT_KEEP_REMOTE_FILES:
            self.runner._remove_tmp_path(conn, tmp_path)

        return data
Ejemplo n.º 24
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):

        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))

        src = options.get("src", None)
        dest = options.get("dest", None)
        remote_src = utils.boolean(options.get("remote_src", "no"))

        if src is None:
            result = dict(failed=True, msg="src is required")
            return ReturnData(conn=conn, comm_ok=False, result=result)

        if remote_src:
            return self.runner._execute_module(
                conn, tmp, "patch", module_args, inject=inject, complex_args=complex_args
            )

        # Source is local
        if "_original_file" in inject:
            src = utils.path_dwim_relative(inject["_original_file"], "files", src, self.runner.basedir)
        else:
            src = utils.path_dwim(self.runner.basedir, src)

        if tmp is None or "-tmp-" not in tmp:
            tmp = self.runner._make_tmp_path(conn)

        tmp_src = conn.shell.join_path(tmp, os.path.basename(src))
        conn.put_file(src, tmp_src)

        if self.runner.become and self.runner.become_user != "root":
            if not self.runner.noop_on_check(inject):
                self.runner._remote_chmod(conn, "a+r", tmp_src, tmp)

        new_module_args = dict(src=tmp_src)

        if self.runner.noop_on_check(inject):
            new_module_args["CHECKMODE"] = True

        module_args = utils.merge_module_args(module_args, new_module_args)

        return self.runner._execute_module(conn, tmp, "patch", module_args, inject=inject, complex_args=complex_args)
Ejemplo n.º 25
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source  = options.get('src', None)
        dest    = options.get('dest', None)
        copy    = utils.boolean(options.get('copy', 'yes'))

        if source is None or dest is None:
            result = dict(failed=True, msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)

        dest = os.path.expanduser(dest)
        source = template.template(self.runner.basedir, os.path.expanduser(source), inject)
        if copy:
            if '_original_file' in inject:
                source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir)
            else:
                source = utils.path_dwim(self.runner.basedir, source)

        remote_md5 = self.runner._remote_md5(conn, tmp, dest)
        if remote_md5 != '3':
            result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest)
            return ReturnData(conn=conn, result=result)

        if copy:
            # transfer the file to a remote tmp location
            tmp_src = tmp + 'source'
            conn.put_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side
        # fix file permissions when the copy is done as a different user
        if copy:
            if self.runner.sudo and self.runner.sudo_user != 'root':
                self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp)
            module_args = "%s src=%s original_basename=%s" % (module_args, pipes.quote(tmp_src), pipes.quote(os.path.basename(source)))
        else:
            module_args = "%s original_basename=%s" % (module_args, pipes.quote(os.path.basename(source)))
        return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
Ejemplo n.º 26
0
    def run(self,
            conn,
            tmp,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):
        args = {}
        if complex_args:
            args.update(complex_args)

        # attempt to prevent confusing messages when the variable didn't interpolate
        module_args = module_args.replace("{{ ", "{{").replace(" }}", "}}")

        kv = utils.parse_kv(module_args)
        args.update(kv)

        if not 'msg' in args and not 'var' in args:
            args['msg'] = 'Hello world!'

        result = {}
        if 'msg' in args:
            if 'fail' in args and utils.boolean(args['fail']):
                result = dict(failed=True, msg=args['msg'])
            else:
                result = dict(msg=args['msg'])
        elif 'var' in args:
            results = utils.safe_eval(args['var'],
                                      inject,
                                      include_exceptions=True,
                                      template_call=True)
            intermediate = results[0]
            exception = results[1]
            if exception is not None:
                intermediate = "failed to evaluate: %s" % str(exception)
            result[args['var']] = intermediate

        # force flag to make debug output module always verbose
        result['verbose_always'] = True

        return ReturnData(conn=conn, result=result)
Ejemplo n.º 27
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for running operations on master '''

        # load up options
        options  = {}
        if complex_args:
            options.update(complex_args)

        # parse the k=v arguments and convert any special boolean
        # strings into proper booleans (issue #8629)
        parsed_args = utils.parse_kv(module_args)
        for k,v in parsed_args.iteritems():
            # convert certain strings to boolean values
            if isinstance(v, basestring) and v.lower() in ('true', 'false', 'yes', 'no'):
                parsed_args[k] = utils.boolean(v)

        # and finally update the options with the parsed/modified args
        options.update(parsed_args)

        return ReturnData(conn=conn, result=dict(ansible_facts=options))
Ejemplo n.º 28
0
    def run(self,
            conn,
            tmp,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):
        args = {}
        if complex_args:
            args.update(complex_args)
        args.update(utils.parse_kv(module_args))
        if not 'msg' in args:
            args['msg'] = 'Hello world!'

        if 'fail' in args and utils.boolean(args['fail']):
            result = dict(failed=True, msg=args['msg'])
        else:
            result = dict(msg=str(args['msg']))

        return ReturnData(conn=conn, result=result)
Ejemplo n.º 29
0
    def confirm(self, module, module_args):
        if not self.options.step:
            return True

        # print hosts
        callbacks.display("HOSTS:","bright blue")
        for host in self.selected:
            hostname = host.name if isinstance(host, Host) else host
            callbacks.display("\t%s" % hostname,"green")

        callbacks.display("\nSUMMARY: host_num[%d] module[%s] module_args[%s] options[%s]\n" % (len(self.selected), module, module_args, self.options),"bright blue")

        answer=False
        try:
            print "Do you confirm to execute?[y/N]:(default=No) ",
            # cmd module use raw_input to read user command by default, to avoid our answer here 'logged' into history,
            # use sys.stdin.readline instead of raw_input, see more at http://docs.python.org/2/library/cmd.html#cmd.Cmd.use_rawinput
            answer = sys.stdin.readline()[:-1]
        except:
            answer = False
        return utils.boolean(answer)
Ejemplo n.º 30
0
    def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
        # this number is arbitrary, but it seems sane
        if level > 20:
            raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
        for role in roles:
            role_path,role_vars = self._get_role_path(role)
            role_vars = utils.combine_vars(passed_vars, role_vars)
            vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
            vars_data = {}
            if os.path.isfile(vars):
                vars_data = utils.parse_yaml_from_file(vars, vault_password=self.vault_password)
                if vars_data:
                    if not isinstance(vars_data, dict):
                        raise errors.AnsibleError("vars from '%s' are not a dict" % vars)
                    role_vars = utils.combine_vars(vars_data, role_vars)
            defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')))
            defaults_data = {}
            if os.path.isfile(defaults):
                defaults_data = utils.parse_yaml_from_file(defaults, vault_password=self.vault_password)
            # the meta directory contains the yaml that should
            # hold the list of dependencies (if any)
            meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
            if os.path.isfile(meta):
                data = utils.parse_yaml_from_file(meta, vault_password=self.vault_password)
                if data:
                    dependencies = data.get('dependencies',[])
                    if dependencies is None:
                        dependencies = []
                    for dep in dependencies:
                        allow_dupes = False
                        (dep_path,dep_vars) = self._get_role_path(dep)
                        meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta')))
                        if os.path.isfile(meta):
                            meta_data = utils.parse_yaml_from_file(meta, vault_password=self.vault_password)
                            if meta_data:
                                allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))

                        # if any tags were specified as role/dep variables, merge
                        # them into the current dep_vars so they're passed on to any
                        # further dependencies too, and so we only have one place
                        # (dep_vars) to look for tags going forward
                        def __merge_tags(var_obj):
                            old_tags = dep_vars.get('tags', [])
                            if isinstance(old_tags, basestring):
                                old_tags = [old_tags, ]
                            if isinstance(var_obj, dict):
                                new_tags = var_obj.get('tags', [])
                                if isinstance(new_tags, basestring):
                                    new_tags = [new_tags, ]
                            else:
                                new_tags = []
                            return list(set(old_tags).union(set(new_tags)))

                        dep_vars['tags'] = __merge_tags(role_vars)
                        dep_vars['tags'] = __merge_tags(passed_vars)

                        # if tags are set from this role, merge them
                        # into the tags list for the dependent role
                        if "tags" in passed_vars:
                            for included_role_dep in dep_stack:
                                included_dep_name = included_role_dep[0]
                                included_dep_vars = included_role_dep[2]
                                if included_dep_name == dep:
                                    if "tags" in included_dep_vars:
                                        included_dep_vars["tags"] = list(set(included_dep_vars["tags"]).union(set(passed_vars["tags"])))
                                    else:
                                        included_dep_vars["tags"] = passed_vars["tags"][:]

                        dep_vars = utils.combine_vars(passed_vars, dep_vars)
                        dep_vars = utils.combine_vars(role_vars, dep_vars)
                        vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
                        vars_data = {}
                        if os.path.isfile(vars):
                            vars_data = utils.parse_yaml_from_file(vars, vault_password=self.vault_password)
                            if vars_data:
                                dep_vars = utils.combine_vars(vars_data, dep_vars)
                        defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults')))
                        dep_defaults_data = {}
                        if os.path.isfile(defaults):
                            dep_defaults_data = utils.parse_yaml_from_file(defaults, vault_password=self.vault_password)
                        if 'role' in dep_vars:
                            del dep_vars['role']

                        if not allow_dupes:
                            if dep in self.included_roles:
                                # skip back to the top, since we don't want to
                                # do anything else with this role
                                continue
                            else:
                                self.included_roles.append(dep)

                        def _merge_conditional(cur_conditionals, new_conditionals):
                            if isinstance(new_conditionals, (basestring, bool)):
                                cur_conditionals.append(new_conditionals)
                            elif isinstance(new_conditionals, list):
                                cur_conditionals.extend(new_conditionals)

                        # pass along conditionals from roles to dep roles
                        passed_when = passed_vars.get('when')
                        role_when = role_vars.get('when')
                        dep_when = dep_vars.get('when')

                        tmpcond = []
                        _merge_conditional(tmpcond, passed_when)
                        _merge_conditional(tmpcond, role_when)
                        _merge_conditional(tmpcond, dep_when)

                        if len(tmpcond) > 0:
                            dep_vars['when'] = tmpcond

                        self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
                        dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data])

            # only add the current role when we're at the top level,
            # otherwise we'll end up in a recursive loop
            if level == 0:
                self.included_roles.append(role)
                dep_stack.append([role,role_path,role_vars,defaults_data])
        return dep_stack
Ejemplo n.º 31
0
    def __init__(self, play, ds, module_vars=None, default_vars=None, additional_conditions=None, role_name=None):
        ''' constructor loads from a task or handler datastructure '''

        # meta directives are used to tell things like ansible/playbook to run
        # operations like handler execution.  Meta tasks are not executed
        # normally.
        if 'meta' in ds:
            self.meta = ds['meta']
            self.tags = []
            return
        else:
            self.meta = None


        library = os.path.join(play.basedir, 'library')
        if os.path.exists(library):
            utils.plugins.module_finder.add_directory(library)

        for x in ds.keys():

            # code to allow for saying "modulename: args" versus "action: modulename args"
            if x in utils.plugins.module_finder:

                if 'action' in ds:
                    raise errors.AnsibleError("multiple actions specified in task: '%s' and '%s'" % (x, ds.get('name', ds['action'])))
                if isinstance(ds[x], dict):
                    if 'args' in ds:
                        raise errors.AnsibleError("can't combine args: and a dict for %s: in task %s" % (x, ds.get('name', "%s: %s" % (x, ds[x]))))
                    ds['args'] = ds[x]
                    ds[x] = ''
                elif ds[x] is None:
                    ds[x] = ''
                if not isinstance(ds[x], basestring):
                    raise errors.AnsibleError("action specified for task %s has invalid type %s" % (ds.get('name', "%s: %s" % (x, ds[x])), type(ds[x])))
                ds['action'] = x + " " + ds[x]
                ds.pop(x)

            # code to allow "with_glob" and to reference a lookup plugin named glob
            elif x.startswith("with_"):

                if isinstance(ds[x], basestring) and ds[x].lstrip().startswith("{{"):
                    utils.warning("It is unnecessary to use '{{' in loops, leave variables in loop expressions bare.")

                plugin_name = x.replace("with_","")
                if plugin_name in utils.plugins.lookup_loader:
                    ds['items_lookup_plugin'] = plugin_name
                    ds['items_lookup_terms'] = ds[x]
                    ds.pop(x)
                else:
                    raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))

            elif x in [ 'changed_when', 'failed_when', 'when']:
                if isinstance(ds[x], basestring) and ds[x].lstrip().startswith("{{"):
                    utils.warning("It is unnecessary to use '{{' in conditionals, leave variables in loop expressions bare.")
            elif x.startswith("when_"):
                utils.deprecated("The 'when_' conditional has been removed. Switch to using the regular unified 'when' statements as described on docs.ansible.com.","1.5", removed=True)

                if 'when' in ds:
                    raise errors.AnsibleError("multiple when_* statements specified in task %s" % (ds.get('name', ds['action'])))
                when_name = x.replace("when_","")
                ds['when'] = "%s %s" % (when_name, ds[x])
                ds.pop(x)
            elif not x in Task.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter in an Ansible task or handler" % x)

        self.module_vars  = module_vars
        self.default_vars = default_vars
        self.play         = play

        # load various attributes
        self.name         = ds.get('name', None)
        self.tags         = [ 'all' ]
        self.register     = ds.get('register', None)
        self.sudo         = utils.boolean(ds.get('sudo', play.sudo))
        self.su           = utils.boolean(ds.get('su', play.su))
        self.environment  = ds.get('environment', {})
        self.role_name    = role_name
        self.no_log       = utils.boolean(ds.get('no_log', "false"))
        self.run_once     = utils.boolean(ds.get('run_once', 'false'))

        #Code to allow do until feature in a Task 
        if 'until' in ds:
            if not ds.get('register'):
                raise errors.AnsibleError("register keyword is mandatory when using do until feature")
            self.module_vars['delay']     = ds.get('delay', 5)
            self.module_vars['retries']   = ds.get('retries', 3)
            self.module_vars['register']  = ds.get('register', None)
            self.until                    = ds.get('until')
            self.module_vars['until']     = self.until

        # rather than simple key=value args on the options line, these represent structured data and the values
        # can be hashes and lists, not just scalars
        self.args         = ds.get('args', {})

        # get remote_user for task, then play, then playbook
        if ds.get('remote_user') is not None:
            self.remote_user      = ds.get('remote_user')
        elif ds.get('remote_user', play.remote_user) is not None:
            self.remote_user      = ds.get('remote_user', play.remote_user)
        else:
            self.remote_user      = ds.get('remote_user', play.playbook.remote_user)

        self.sudo_user    = None
        self.sudo_pass    = None
        self.su_user      = None
        self.su_pass      = None

        if self.sudo:
            self.sudo_user    = ds.get('sudo_user', play.sudo_user)
            self.sudo_pass    = ds.get('sudo_pass', play.playbook.sudo_pass)
        elif self.su:
            self.su_user      = ds.get('su_user', play.su_user)
            self.su_pass      = ds.get('su_pass', play.playbook.su_pass)

        # Fail out if user specifies a sudo param with a su param in a given play
        if (ds.get('sudo') or ds.get('sudo_user') or ds.get('sudo_pass')) and \
                (ds.get('su') or ds.get('su_user') or ds.get('su_pass')):
            raise errors.AnsibleError('sudo params ("sudo", "sudo_user", "sudo_pass") '
                                      'and su params "su", "su_user", "su_pass") '
                                      'cannot be used together')

        # Both are defined
        if ('action' in ds) and ('local_action' in ds):
            raise errors.AnsibleError("the 'action' and 'local_action' attributes can not be used together")
        # Both are NOT defined
        elif (not 'action' in ds) and (not 'local_action' in ds):
            raise errors.AnsibleError("'action' or 'local_action' attribute missing in task \"%s\"" % ds.get('name', '<Unnamed>'))
        # Only one of them is defined
        elif 'local_action' in ds:
            self.action      = ds.get('local_action', '')
            self.delegate_to = '127.0.0.1'
        else:
            self.action      = ds.get('action', '')
            self.delegate_to = ds.get('delegate_to', None)
            self.transport   = ds.get('connection', ds.get('transport', play.transport))

        if isinstance(self.action, dict):
            if 'module' not in self.action:
                raise errors.AnsibleError("'module' attribute missing from action in task \"%s\"" % ds.get('name', '%s' % self.action))
            if self.args:
                raise errors.AnsibleError("'args' cannot be combined with dict 'action' in task \"%s\"" % ds.get('name', '%s' % self.action))
            self.args = self.action
            self.action = self.args.pop('module')

        # delegate_to can use variables
        if not (self.delegate_to is None):
            # delegate_to: localhost should use local transport
            if self.delegate_to in ['127.0.0.1', 'localhost']:
                self.transport   = 'local'

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.when    = ds.get('when', None)
        self.changed_when = ds.get('changed_when', None)
        self.failed_when = ds.get('failed_when', None)

        # combine the default and module vars here for use in templating
        all_vars = self.default_vars.copy()
        all_vars = utils.combine_vars(all_vars, self.module_vars)

        self.async_seconds = ds.get('async', 0)  # not async by default
        self.async_seconds = template.template_from_string(play.basedir, self.async_seconds, all_vars)
        self.async_seconds = int(self.async_seconds)
        self.async_poll_interval = ds.get('poll', 10)  # default poll = 10 seconds
        self.async_poll_interval = template.template_from_string(play.basedir, self.async_poll_interval, all_vars)
        self.async_poll_interval = int(self.async_poll_interval)
        self.notify = ds.get('notify', [])
        self.first_available_file = ds.get('first_available_file', None)

        self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
        self.items_lookup_terms  = ds.get('items_lookup_terms', None)
     

        self.ignore_errors = ds.get('ignore_errors', False)
        self.any_errors_fatal = ds.get('any_errors_fatal', play.any_errors_fatal)

        self.always_run = ds.get('always_run', False)

        # action should be a string
        if not isinstance(self.action, basestring):
            raise errors.AnsibleError("action is of type '%s' and not a string in task. name: %s" % (type(self.action).__name__, self.name))

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [ self.notify ]

        # split the action line into a module name + arguments
        try:
            tokens = split_args(self.action)
        except Exception, e:
            if "unbalanced" in str(e):
                raise errors.AnsibleError("There was an error while parsing the task %s.\n" % repr(self.action) + \
                                          "Make sure quotes are matched or escaped properly")
            else:
                raise
Ejemplo n.º 32
0
Archivo: task.py Proyecto: Minione/iwct
    def __init__(self, play, ds, module_vars=None, additional_conditions=None):
        """ constructor loads from a task or handler datastructure """

        for x in ds.keys():

            # code to allow for saying "modulename: args" versus "action: modulename args"
            if x in utils.plugins.module_finder:
                if "action" in ds:
                    raise errors.AnsibleError("multiple actions specified in task %s" % (ds.get("name", ds["action"])))
                ds["action"] = x + " " + ds[x]
                ds.pop(x)

            # code to allow "with_glob" and to reference a lookup plugin named glob
            elif x.startswith("with_"):
                plugin_name = x.replace("with_", "")
                if plugin_name in utils.plugins.lookup_loader:
                    ds["items_lookup_plugin"] = plugin_name
                    ds["items_lookup_terms"] = ds[x]
                    ds.pop(x)
                else:
                    raise errors.AnsibleError(
                        "cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name)
                    )

            elif x.startswith("when_"):
                when_name = x.replace("when_", "")
                ds["when"] = "%s %s" % (when_name, ds[x])
                ds.pop(x)

            elif not x in Task.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter in an Ansible task or handler" % x)

        self.module_vars = module_vars
        self.play = play

        # load various attributes
        self.name = ds.get("name", None)
        self.tags = ["all"]
        self.register = ds.get("register", None)
        self.sudo = utils.boolean(ds.get("sudo", play.sudo))

        if self.sudo:
            self.sudo_user = ds.get("sudo_user", play.sudo_user)
            self.sudo_pass = ds.get("sudo_pass", play.playbook.sudo_pass)
        else:
            self.sudo_user = None
            self.sudo_pass = None

        # Both are defined
        if ("action" in ds) and ("local_action" in ds):
            raise errors.AnsibleError("the 'action' and 'local_action' attributes can not be used together")
        # Both are NOT defined
        elif (not "action" in ds) and (not "local_action" in ds):
            raise errors.AnsibleError(
                "'action' or 'local_action' attribute missing in task \"%s\"" % ds.get("name", "<Unnamed>")
            )
        # Only one of them is defined
        elif "local_action" in ds:
            self.action = ds.get("local_action", "")
            self.delegate_to = "127.0.0.1"
        else:
            self.action = ds.get("action", "")
            self.delegate_to = ds.get("delegate_to", None)
            self.transport = ds.get("connection", ds.get("transport", play.transport))

        # delegate_to can use variables
        if not (self.delegate_to is None):
            self.delegate_to = utils.template(None, self.delegate_to, self.module_vars)
            # delegate_to: localhost should use local transport
            if self.delegate_to in ["127.0.0.1", "localhost"]:
                self.transport = "local"

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.only_if = ds.get("only_if", "True")
        self.when = ds.get("when", None)

        self.async_seconds = int(ds.get("async", 0))  # not async by default
        self.async_poll_interval = int(ds.get("poll", 10))  # default poll = 10 seconds
        self.notify = ds.get("notify", [])
        self.first_available_file = ds.get("first_available_file", None)

        self.items_lookup_plugin = ds.get("items_lookup_plugin", None)
        self.items_lookup_terms = ds.get("items_lookup_terms", None)

        self.ignore_errors = ds.get("ignore_errors", False)

        # action should be a string
        if not isinstance(self.action, basestring):
            raise errors.AnsibleError(
                "action is of type '%s' and not a string in task. name: %s" % (type(self.action).__name__, self.name)
            )

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [self.notify]

        # split the action line into a module name + arguments
        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError("invalid/missing action in task. name: %s" % self.name)
        self.module_name = tokens[0]
        self.module_args = ""
        if len(tokens) > 1:
            self.module_args = tokens[1]

        import_tags = self.module_vars.get("tags", [])
        if type(import_tags) in [str, unicode]:
            # allow the user to list comma delimited tags
            import_tags = import_tags.split(",")

        # handle mutually incompatible options
        incompatibles = [x for x in [self.first_available_file, self.items_lookup_plugin] if x is not None]
        if len(incompatibles) > 1:
            raise errors.AnsibleError(
                "with_(plugin), and first_available_file are mutually incompatible in a single task"
            )

        # make first_available_file accessable to Runner code
        if self.first_available_file:
            self.module_vars["first_available_file"] = self.first_available_file

        if self.items_lookup_plugin is not None:
            self.module_vars["items_lookup_plugin"] = self.items_lookup_plugin
            self.module_vars["items_lookup_terms"] = self.items_lookup_terms

        # allow runner to see delegate_to option
        self.module_vars["delegate_to"] = self.delegate_to

        # make ignore_errors accessable to Runner code
        self.module_vars["ignore_errors"] = self.ignore_errors

        # tags allow certain parts of a playbook to be run without running the whole playbook
        apply_tags = ds.get("tags", None)
        if apply_tags is not None:
            if type(apply_tags) in [str, unicode]:
                self.tags.append(apply_tags)
            elif type(apply_tags) == list:
                self.tags.extend(apply_tags)
        self.tags.extend(import_tags)

        if self.when is not None:
            if self.only_if != "True":
                raise errors.AnsibleError("when obsoletes only_if, only use one or the other")
            self.only_if = utils.compile_when_to_only_if(self.when)

        if additional_conditions:
            self.only_if = "(" + self.only_if + ") and (" + " ) and (".join(additional_conditions) + ")"
Ejemplo n.º 33
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for fetch operations '''

        if self.runner.check:
            return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not (yet) supported for this module'))

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source = options.get('src', None)
        dest = options.get('dest', None)
        fail_on_missing = options.get('fail_on_missing', False)
        fail_on_missing = utils.boolean(fail_on_missing)
        if source is None or dest is None:
            results = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, result=results)

        # files are saved in dest dir, with a subdir for each host, then the filename
        dest   = "%s/%s/%s" % (utils.path_dwim(self.runner.basedir, dest), conn.host, source)
        dest   = dest.replace("//","/")

        # calculate md5 sum for the remote file
        remote_md5 = self.runner._remote_md5(conn, tmp, source)

        # use slurp if sudo and permissions are lacking
        remote_data = None
        if remote_md5 in ('1', '2') and self.runner.sudo:
            slurpres = self.runner._execute_module(conn, tmp, 'slurp', 'src=%s' % source, inject=inject)
            if slurpres.is_successful():
                if slurpres.result['encoding'] == 'base64':
                    remote_data = base64.b64decode(slurpres.result['content'])
                if remote_data is not None:
                    remote_md5 = utils.md5s(remote_data)

        # these don't fail because you may want to transfer a log file that possibly MAY exist
        # but keep going to fetch other log files
        if remote_md5 == '0':
            result = dict(msg="unable to calculate the md5 sum of the remote file", file=source, changed=False)
            return ReturnData(conn=conn, result=result)
        if remote_md5 == '1':
            if fail_on_missing:
                result = dict(failed=True, msg="the remote file does not exist", file=source)
            else:
                result = dict(msg="the remote file does not exist, not transferring, ignored", file=source, changed=False)
            return ReturnData(conn=conn, result=result)
        if remote_md5 == '2':
            result = dict(msg="no read permission on remote file, not transferring, ignored", file=source, changed=False)
            return ReturnData(conn=conn, result=result)

        # calculate md5 sum for the local file
        local_md5 = utils.md5(dest)

        if remote_md5 != local_md5:
            # create the containing directories, if needed
            if not os.path.isdir(os.path.dirname(dest)):
                os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                conn.fetch_file(source, dest)
            else:
                f = open(dest, 'w')
                f.write(remote_data)
                f.close()
            new_md5 = utils.md5(dest)
            if new_md5 != remote_md5:
                result = dict(failed=True, md5sum=new_md5, msg="md5 mismatch", file=source, dest=dest)
                return ReturnData(conn=conn, result=result)
            result = dict(changed=True, md5sum=new_md5, dest=dest)
            return ReturnData(conn=conn, result=result)
        else:
            result = dict(changed=False, md5sum=local_md5, file=source, dest=dest)
            return ReturnData(conn=conn, result=result)
Ejemplo n.º 34
0
    def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
        # this number is arbitrary, but it seems sane
        if level > 20:
            raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
        for role in roles:
            role_path,role_vars = self._get_role_path(role)
            role_vars = utils.combine_vars(passed_vars, role_vars)
            vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
            vars_data = {}
            if os.path.isfile(vars):
                vars_data = utils.parse_yaml_from_file(vars)
                if vars_data:
                    if not isinstance(vars_data, dict):
                        raise errors.AnsibleError("vars from '%s' are not a dict" % vars)
                    role_vars = utils.combine_vars(vars_data, role_vars)
            defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')))
            defaults_data = {}
            if os.path.isfile(defaults):
                defaults_data = utils.parse_yaml_from_file(defaults)
            # the meta directory contains the yaml that should
            # hold the list of dependencies (if any)
            meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
            if os.path.isfile(meta):
                data = utils.parse_yaml_from_file(meta)
                if data:
                    dependencies = data.get('dependencies',[])
                    if dependencies is None:
                        dependencies = []
                    for dep in dependencies:
                        allow_dupes = False
                        (dep_path,dep_vars) = self._get_role_path(dep)
                        meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta')))
                        if os.path.isfile(meta):
                            meta_data = utils.parse_yaml_from_file(meta)
                            if meta_data:
                                allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))

                        # if tags are set from this role, merge them
                        # into the tags list for the dependent role
                        if "tags" in passed_vars:
                            for included_role_dep in dep_stack:
                                included_dep_name = included_role_dep[0]
                                included_dep_vars = included_role_dep[2]
                                if included_dep_name == dep:
                                    if "tags" in included_dep_vars:
                                        included_dep_vars["tags"] = list(set(included_dep_vars["tags"] + passed_vars["tags"]))
                                    else:
                                        included_dep_vars["tags"] = passed_vars["tags"].copy()

                        dep_vars = utils.combine_vars(passed_vars, dep_vars)
                        dep_vars = utils.combine_vars(role_vars, dep_vars)
                        vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
                        vars_data = {}
                        if os.path.isfile(vars):
                            vars_data = utils.parse_yaml_from_file(vars)
                            if vars_data:
                                dep_vars = utils.combine_vars(vars_data, dep_vars)
                        defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults')))
                        dep_defaults_data = {}
                        if os.path.isfile(defaults):
                            dep_defaults_data = utils.parse_yaml_from_file(defaults)
                        if 'role' in dep_vars:
                            del dep_vars['role']

                        if "tags" in passed_vars:
                            if not self._is_valid_tag(passed_vars["tags"]):
                                # one of the tags specified for this role was in the
                                # skip list, or we're limiting the tags and it didn't 
                                # match one, so we just skip it completely
                                continue

                        if not allow_dupes:
                            if dep in self.included_roles:
                                # skip back to the top, since we don't want to
                                # do anything else with this role
                                continue
                            else:
                                self.included_roles.append(dep)

                        # pass along conditionals from roles to dep roles
                        if type(role) is dict:
                            if 'when' in passed_vars:
                                if 'when' in dep_vars:
                                    tmpcond = []

                                    if type(passed_vars['when']) is str:
                                        tmpcond.append(passed_vars['when'])
                                    elif type(passed_vars['when']) is list:
                                        tmpcond.join(passed_vars['when'])

                                    if type(dep_vars['when']) is str:
                                        tmpcond.append(dep_vars['when'])
                                    elif type(dep_vars['when']) is list:
                                        tmpcond += dep_vars['when']

                                    if len(tmpcond) > 0:
                                        dep_vars['when'] = tmpcond
                                else:
                                    dep_vars['when'] = passed_vars['when']

                        self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
                        dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data])

            # only add the current role when we're at the top level,
            # otherwise we'll end up in a recursive loop 
            if level == 0:
                self.included_roles.append(role)
                dep_stack.append([role,role_path,role_vars,defaults_data])
        return dep_stack
Ejemplo n.º 35
0
    def __init__(self,
                 play,
                 ds,
                 module_vars=None,
                 play_vars=None,
                 play_file_vars=None,
                 role_vars=None,
                 role_params=None,
                 default_vars=None,
                 additional_conditions=None,
                 role_name=None):
        ''' constructor loads from a task or handler datastructure '''

        # meta directives are used to tell things like ansible/playbook to run
        # operations like handler execution.  Meta tasks are not executed
        # normally.
        if 'meta' in ds:
            self.meta = ds['meta']
            self.tags = []
            return
        else:
            self.meta = None

        library = os.path.join(play.basedir, 'library')
        if os.path.exists(library):
            utils.plugins.module_finder.add_directory(library)

        for x in ds.keys():

            # code to allow for saying "modulename: args" versus "action: modulename args"
            if x in utils.plugins.module_finder:

                if 'action' in ds:
                    raise errors.AnsibleError(
                        "multiple actions specified in task: '%s' and '%s'" %
                        (x, ds.get('name', ds['action'])))
                if isinstance(ds[x], dict):
                    if 'args' in ds:
                        raise errors.AnsibleError(
                            "can't combine args: and a dict for %s: in task %s"
                            % (x, ds.get('name', "%s: %s" % (x, ds[x]))))
                    ds['args'] = ds[x]
                    ds[x] = ''
                elif ds[x] is None:
                    ds[x] = ''
                if not isinstance(ds[x], basestring):
                    raise errors.AnsibleError(
                        "action specified for task %s has invalid type %s" %
                        (ds.get('name', "%s: %s" % (x, ds[x])), type(ds[x])))
                ds['action'] = x + " " + ds[x]
                ds.pop(x)

            # code to allow "with_glob" and to reference a lookup plugin named glob
            elif x.startswith("with_"):
                if isinstance(ds[x], basestring):
                    param = ds[x].strip()
                    # Only a variable, no logic
                    if (param.startswith('{{')
                            and param.find('}}') == len(ds[x]) - 2
                            and param.find('|') == -1):
                        utils.warning(
                            "It is unnecessary to use '{{' in loops, leave variables in loop expressions bare."
                        )

                plugin_name = x.replace("with_", "")
                if plugin_name in utils.plugins.lookup_loader:
                    ds['items_lookup_plugin'] = plugin_name
                    ds['items_lookup_terms'] = ds[x]
                    ds.pop(x)
                else:
                    raise errors.AnsibleError(
                        "cannot find lookup plugin named %s for usage in with_%s"
                        % (plugin_name, plugin_name))

            elif x in ['changed_when', 'failed_when', 'when']:
                if isinstance(ds[x], basestring):
                    param = ds[x].strip()
                    # Only a variable, no logic
                    if (param.startswith('{{')
                            and param.find('}}') == len(ds[x]) - 2
                            and param.find('|') == -1):
                        utils.warning(
                            "It is unnecessary to use '{{' in conditionals, leave variables in loop expressions bare."
                        )
            elif x.startswith("when_"):
                utils.deprecated(
                    "The 'when_' conditional has been removed. Switch to using the regular unified 'when' statements as described on docs.ansible.com.",
                    "1.5",
                    removed=True)

                if 'when' in ds:
                    raise errors.AnsibleError(
                        "multiple when_* statements specified in task %s" %
                        (ds.get('name', ds['action'])))
                when_name = x.replace("when_", "")
                ds['when'] = "%s %s" % (when_name, ds[x])
                ds.pop(x)
            elif not x in Task.VALID_KEYS:
                raise errors.AnsibleError(
                    "%s is not a legal parameter in an Ansible task or handler"
                    % x)

        self.module_vars = module_vars
        self.play_vars = play_vars
        self.play_file_vars = play_file_vars
        self.role_vars = role_vars
        self.role_params = role_params
        self.default_vars = default_vars
        self.play = play

        # load various attributes
        self.name = ds.get('name', None)
        self.tags = ['all']
        self.register = ds.get('register', None)
        self.sudo = utils.boolean(ds.get('sudo', play.sudo))
        self.su = utils.boolean(ds.get('su', play.su))
        self.environment = ds.get('environment', {})
        self.role_name = role_name
        self.no_log = utils.boolean(ds.get('no_log',
                                           "false")) or self.play.no_log
        self.run_once = utils.boolean(ds.get('run_once', 'false'))

        #Code to allow do until feature in a Task
        if 'until' in ds:
            if not ds.get('register'):
                raise errors.AnsibleError(
                    "register keyword is mandatory when using do until feature"
                )
            self.module_vars['delay'] = ds.get('delay', 5)
            self.module_vars['retries'] = ds.get('retries', 3)
            self.module_vars['register'] = ds.get('register', None)
            self.until = ds.get('until')
            self.module_vars['until'] = self.until

        # rather than simple key=value args on the options line, these represent structured data and the values
        # can be hashes and lists, not just scalars
        self.args = ds.get('args', {})

        # get remote_user for task, then play, then playbook
        if ds.get('remote_user') is not None:
            self.remote_user = ds.get('remote_user')
        elif ds.get('remote_user', play.remote_user) is not None:
            self.remote_user = ds.get('remote_user', play.remote_user)
        else:
            self.remote_user = ds.get('remote_user', play.playbook.remote_user)

        self.sudo_user = None
        self.sudo_pass = None
        self.su_user = None
        self.su_pass = None

        if self.sudo:
            self.sudo_user = ds.get('sudo_user', play.sudo_user)
            self.sudo_pass = ds.get('sudo_pass', play.playbook.sudo_pass)
        elif self.su:
            self.su_user = ds.get('su_user', play.su_user)
            self.su_pass = ds.get('su_pass', play.playbook.su_pass)

        # Fail out if user specifies a sudo param with a su param in a given play
        if (ds.get('sudo') or ds.get('sudo_user') or ds.get('sudo_pass')) and \
                (ds.get('su') or ds.get('su_user') or ds.get('su_pass')):
            raise errors.AnsibleError(
                'sudo params ("sudo", "sudo_user", "sudo_pass") '
                'and su params "su", "su_user", "su_pass") '
                'cannot be used together')

        # Both are defined
        if ('action' in ds) and ('local_action' in ds):
            raise errors.AnsibleError(
                "the 'action' and 'local_action' attributes can not be used together"
            )
        # Both are NOT defined
        elif (not 'action' in ds) and (not 'local_action' in ds):
            raise errors.AnsibleError(
                "'action' or 'local_action' attribute missing in task \"%s\"" %
                ds.get('name', '<Unnamed>'))
        # Only one of them is defined
        elif 'local_action' in ds:
            self.action = ds.get('local_action', '')
            self.delegate_to = '127.0.0.1'
        else:
            self.action = ds.get('action', '')
            self.delegate_to = ds.get('delegate_to', None)
            self.transport = ds.get('connection',
                                    ds.get('transport', play.transport))

        if isinstance(self.action, dict):
            if 'module' not in self.action:
                raise errors.AnsibleError(
                    "'module' attribute missing from action in task \"%s\"" %
                    ds.get('name', '%s' % self.action))
            if self.args:
                raise errors.AnsibleError(
                    "'args' cannot be combined with dict 'action' in task \"%s\""
                    % ds.get('name', '%s' % self.action))
            self.args = self.action
            self.action = self.args.pop('module')

        # delegate_to can use variables
        if not (self.delegate_to is None):
            # delegate_to: localhost should use local transport
            if self.delegate_to in ['127.0.0.1', 'localhost']:
                self.transport = 'local'

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.when = ds.get('when', None)
        self.changed_when = ds.get('changed_when', None)
        self.failed_when = ds.get('failed_when', None)

        # combine the default and module vars here for use in templating
        all_vars = self.default_vars.copy()
        all_vars = utils.combine_vars(all_vars, self.play_vars)
        all_vars = utils.combine_vars(all_vars, self.play_file_vars)
        all_vars = utils.combine_vars(all_vars, self.role_vars)
        all_vars = utils.combine_vars(all_vars, self.module_vars)
        all_vars = utils.combine_vars(all_vars, self.role_params)

        self.async_seconds = ds.get('async', 0)  # not async by default
        self.async_seconds = template.template_from_string(
            play.basedir, self.async_seconds, all_vars)
        self.async_seconds = int(self.async_seconds)
        self.async_poll_interval = ds.get('poll',
                                          10)  # default poll = 10 seconds
        self.async_poll_interval = template.template_from_string(
            play.basedir, self.async_poll_interval, all_vars)
        self.async_poll_interval = int(self.async_poll_interval)
        self.notify = ds.get('notify', [])
        self.first_available_file = ds.get('first_available_file', None)

        self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
        self.items_lookup_terms = ds.get('items_lookup_terms', None)

        self.ignore_errors = ds.get('ignore_errors', False)
        self.any_errors_fatal = ds.get('any_errors_fatal',
                                       play.any_errors_fatal)

        self.always_run = ds.get('always_run', False)

        # action should be a string
        if not isinstance(self.action, basestring):
            raise errors.AnsibleError(
                "action is of type '%s' and not a string in task. name: %s" %
                (type(self.action).__name__, self.name))

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [self.notify]

        # split the action line into a module name + arguments
        try:
            tokens = split_args(self.action)
        except Exception, e:
            if "unbalanced" in str(e):
                raise errors.AnsibleError("There was an error while parsing the task %s.\n" % repr(self.action) + \
                                          "Make sure quotes are matched or escaped properly")
            else:
                raise
Ejemplo n.º 36
0
    def _build_role_dependencies(self,
                                 roles,
                                 dep_stack,
                                 passed_vars={},
                                 level=0):
        # this number is arbitrary, but it seems sane
        if level > 20:
            raise errors.AnsibleError(
                "too many levels of recursion while resolving role dependencies"
            )
        for role in roles:
            role_path, role_vars = self._get_role_path(role)
            role_vars = utils.combine_vars(passed_vars, role_vars)
            vars = self._resolve_main(
                utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
            vars_data = {}
            if os.path.isfile(vars):
                vars_data = utils.parse_yaml_from_file(
                    vars, vault_password=self.vault_password)
                if vars_data:
                    if not isinstance(vars_data, dict):
                        raise errors.AnsibleError(
                            "vars from '%s' are not a dict" % vars)
                    role_vars = utils.combine_vars(vars_data, role_vars)
            defaults = self._resolve_main(
                utils.path_dwim(self.basedir,
                                os.path.join(role_path, 'defaults')))
            defaults_data = {}
            if os.path.isfile(defaults):
                defaults_data = utils.parse_yaml_from_file(
                    defaults, vault_password=self.vault_password)
            # the meta directory contains the yaml that should
            # hold the list of dependencies (if any)
            meta = self._resolve_main(
                utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
            if os.path.isfile(meta):
                data = utils.parse_yaml_from_file(
                    meta, vault_password=self.vault_password)
                if data:
                    dependencies = data.get('dependencies', [])
                    if dependencies is None:
                        dependencies = []
                    for dep in dependencies:
                        allow_dupes = False
                        (dep_path, dep_vars) = self._get_role_path(dep)
                        meta = self._resolve_main(
                            utils.path_dwim(self.basedir,
                                            os.path.join(dep_path, 'meta')))
                        if os.path.isfile(meta):
                            meta_data = utils.parse_yaml_from_file(
                                meta, vault_password=self.vault_password)
                            if meta_data:
                                allow_dupes = utils.boolean(
                                    meta_data.get('allow_duplicates', ''))

                        # if any tags were specified as role/dep variables, merge
                        # them into the current dep_vars so they're passed on to any
                        # further dependencies too, and so we only have one place
                        # (dep_vars) to look for tags going forward
                        def __merge_tags(var_obj):
                            old_tags = dep_vars.get('tags', [])
                            if isinstance(old_tags, basestring):
                                old_tags = [
                                    old_tags,
                                ]
                            if isinstance(var_obj, dict):
                                new_tags = var_obj.get('tags', [])
                                if isinstance(new_tags, basestring):
                                    new_tags = [
                                        new_tags,
                                    ]
                            else:
                                new_tags = []
                            return list(set(old_tags).union(set(new_tags)))

                        dep_vars['tags'] = __merge_tags(role_vars)
                        dep_vars['tags'] = __merge_tags(passed_vars)

                        # if tags are set from this role, merge them
                        # into the tags list for the dependent role
                        if "tags" in passed_vars:
                            for included_role_dep in dep_stack:
                                included_dep_name = included_role_dep[0]
                                included_dep_vars = included_role_dep[2]
                                if included_dep_name == dep:
                                    if "tags" in included_dep_vars:
                                        included_dep_vars["tags"] = list(
                                            set(included_dep_vars["tags"]).
                                            union(set(passed_vars["tags"])))
                                    else:
                                        included_dep_vars[
                                            "tags"] = passed_vars["tags"][:]

                        dep_vars = utils.combine_vars(passed_vars, dep_vars)
                        dep_vars = utils.combine_vars(role_vars, dep_vars)
                        vars = self._resolve_main(
                            utils.path_dwim(self.basedir,
                                            os.path.join(dep_path, 'vars')))
                        vars_data = {}
                        if os.path.isfile(vars):
                            vars_data = utils.parse_yaml_from_file(
                                vars, vault_password=self.vault_password)
                            if vars_data:
                                dep_vars = utils.combine_vars(
                                    vars_data, dep_vars)
                        defaults = self._resolve_main(
                            utils.path_dwim(self.basedir,
                                            os.path.join(dep_path,
                                                         'defaults')))
                        dep_defaults_data = {}
                        if os.path.isfile(defaults):
                            dep_defaults_data = utils.parse_yaml_from_file(
                                defaults, vault_password=self.vault_password)
                        if 'role' in dep_vars:
                            del dep_vars['role']

                        if not allow_dupes:
                            if dep in self.included_roles:
                                # skip back to the top, since we don't want to
                                # do anything else with this role
                                continue
                            else:
                                self.included_roles.append(dep)

                        def _merge_conditional(cur_conditionals,
                                               new_conditionals):
                            if isinstance(new_conditionals,
                                          (basestring, bool)):
                                cur_conditionals.append(new_conditionals)
                            elif isinstance(new_conditionals, list):
                                cur_conditionals.extend(new_conditionals)

                        # pass along conditionals from roles to dep roles
                        passed_when = passed_vars.get('when')
                        role_when = role_vars.get('when')
                        dep_when = dep_vars.get('when')

                        tmpcond = []
                        _merge_conditional(tmpcond, passed_when)
                        _merge_conditional(tmpcond, role_when)
                        _merge_conditional(tmpcond, dep_when)

                        if len(tmpcond) > 0:
                            dep_vars['when'] = tmpcond

                        self._build_role_dependencies([dep],
                                                      dep_stack,
                                                      passed_vars=dep_vars,
                                                      level=level + 1)
                        dep_stack.append(
                            [dep, dep_path, dep_vars, dep_defaults_data])

            # only add the current role when we're at the top level,
            # otherwise we'll end up in a recursive loop
            if level == 0:
                self.included_roles.append(role)
                dep_stack.append([role, role_path, role_vars, defaults_data])
        return dep_stack
Ejemplo n.º 37
0
class Play(object):

    __slots__ = [
        'hosts',
        'name',
        'vars',
        'vars_file_vars',
        'role_vars',
        'default_vars',
        'vars_prompt',
        'vars_files',
        'handlers',
        'remote_user',
        'remote_port',
        'included_roles',
        'accelerate',
        'accelerate_port',
        'accelerate_ipv6',
        'sudo',
        'sudo_user',
        'transport',
        'playbook',
        'tags',
        'gather_facts',
        'serial',
        '_ds',
        '_handlers',
        '_tasks',
        'basedir',
        'any_errors_fatal',
        'roles',
        'max_fail_pct',
        '_play_hosts',
        'su',
        'su_user',
        'vault_password',
        'no_log',
        'environment',
    ]

    # to catch typos and so forth -- these are userland names
    # and don't line up 1:1 with how they are stored
    VALID_KEYS = [
        'hosts',
        'name',
        'vars',
        'vars_prompt',
        'vars_files',
        'tasks',
        'handlers',
        'remote_user',
        'user',
        'port',
        'include',
        'accelerate',
        'accelerate_port',
        'accelerate_ipv6',
        'sudo',
        'sudo_user',
        'connection',
        'tags',
        'gather_facts',
        'serial',
        'any_errors_fatal',
        'roles',
        'role_names',
        'pre_tasks',
        'post_tasks',
        'max_fail_percentage',
        'su',
        'su_user',
        'vault_password',
        'no_log',
        'environment',
    ]

    # *************************************************

    def __init__(self, playbook, ds, basedir, vault_password=None):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError(
                    "%s is not a legal parameter at this level in an Ansible Playbook"
                    % x)

        # allow all playbook keys to be set by --extra-vars
        self.vars = ds.get('vars', {})
        self.vars_prompt = ds.get('vars_prompt', {})
        self.playbook = playbook
        self.vars = self._get_vars()
        self.vars_file_vars = dict()  # these are vars read in from vars_files:
        self.role_vars = dict(
        )  # these are vars read in from vars/main.yml files in roles
        self.basedir = basedir
        self.roles = ds.get('roles', None)
        self.tags = ds.get('tags', None)
        self.vault_password = vault_password
        self.environment = ds.get('environment', {})

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [str, unicode]:
            self.tags = self.tags.split(",")
        elif type(self.tags) != list:
            self.tags = []

        # make sure we have some special internal variables set, which
        # we use later when loading tasks and handlers
        load_vars = dict()
        load_vars['playbook_dir'] = os.path.abspath(self.basedir)
        if self.playbook.inventory.basedir() is not None:
            load_vars['inventory_dir'] = self.playbook.inventory.basedir()
        if self.playbook.inventory.src() is not None:
            load_vars['inventory_file'] = self.playbook.inventory.src()

        # We first load the vars files from the datastructure
        # so we have the default variables to pass into the roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        processed_vars_files = self._update_vars_files_for_host(None)

        # now we load the roles into the datastructure
        self.included_roles = []
        ds = self._load_roles(self.roles, ds)

        # and finally re-process the vars files as they may have been updated
        # by the included roles, but exclude any which have been processed
        self.vars_files = utils.list_difference(ds.get('vars_files', []),
                                                processed_vars_files)
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')

        self._update_vars_files_for_host(None)

        # template everything to be efficient, but do not pre-mature template
        # tasks/handlers as they may have inventory scope overrides. We also
        # create a set of temporary variables for templating, so we don't
        # trample on the existing vars structures
        _tasks = ds.pop('tasks', [])
        _handlers = ds.pop('handlers', [])

        temp_vars = utils.merge_hash(self.vars, self.vars_file_vars)
        temp_vars = utils.merge_hash(temp_vars, self.playbook.extra_vars)

        ds = template(basedir, ds, temp_vars)
        ds['tasks'] = _tasks
        ds['handlers'] = _handlers

        self._ds = ds

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            try:
                hosts = ';'.join(hosts)
            except TypeError, e:
                raise errors.AnsibleError('improper host declaration: %s' %
                                          str(e))

        self.serial = str(ds.get('serial', 0))
        self.hosts = hosts
        self.name = ds.get('name', self.hosts)
        self._tasks = ds.get('tasks', [])
        self._handlers = ds.get('handlers', [])
        self.remote_user = ds.get('remote_user',
                                  ds.get('user', self.playbook.remote_user))
        self.remote_port = ds.get('port', self.playbook.remote_port)
        self.sudo = ds.get('sudo', self.playbook.sudo)
        self.sudo_user = ds.get('sudo_user', self.playbook.sudo_user)
        self.transport = ds.get('connection', self.playbook.transport)
        self.remote_port = self.remote_port
        self.any_errors_fatal = utils.boolean(
            ds.get('any_errors_fatal', 'false'))
        self.accelerate = utils.boolean(ds.get('accelerate', 'false'))
        self.accelerate_port = ds.get('accelerate_port', None)
        self.accelerate_ipv6 = ds.get('accelerate_ipv6', False)
        self.max_fail_pct = int(ds.get('max_fail_percentage', 100))
        self.su = ds.get('su', self.playbook.su)
        self.su_user = ds.get('su_user', self.playbook.su_user)
        self.no_log = utils.boolean(ds.get('no_log', 'false'))

        # gather_facts is not a simple boolean, as None means  that a 'smart'
        # fact gathering mode will be used, so we need to be careful here as
        # calling utils.boolean(None) returns False
        self.gather_facts = ds.get('gather_facts', None)
        if self.gather_facts:
            self.gather_facts = utils.boolean(self.gather_facts)

        # Fail out if user specifies a sudo param with a su param in a given play
        if (ds.get('sudo') or ds.get('sudo_user')) and (ds.get('su')
                                                        or ds.get('su_user')):
            raise errors.AnsibleError(
                'sudo params ("sudo", "sudo_user") and su params '
                '("su", "su_user") cannot be used together')

        load_vars['role_names'] = ds.get('role_names', [])

        self._tasks = self._load_tasks(self._ds.get('tasks', []), load_vars)
        self._handlers = self._load_tasks(self._ds.get('handlers', []),
                                          load_vars)

        # apply any missing tags to role tasks
        self._late_merge_role_tags()

        if self.sudo_user != 'root':
            self.sudo = True

        # place holder for the discovered hosts to be used in this play
        self._play_hosts = None
Ejemplo n.º 38
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for fetch operations '''

        if self.runner.noop_on_check(inject):
            return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not (yet) supported for this module'))

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source = options.get('src', None)
        dest = options.get('dest', None)
        flat = options.get('flat', False)
        flat = utils.boolean(flat)
        fail_on_missing = options.get('fail_on_missing', False)
        fail_on_missing = utils.boolean(fail_on_missing)
        if source is None or dest is None:
            results = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, result=results)

        if flat:
            if dest.endswith("/"):
                # if the path ends with "/", we'll use the source filename as the
                # destination filename
                base = os.path.basename(source)
                dest = os.path.join(dest, base)
            if not dest.startswith("/"):
                # if dest does not start with "/", we'll assume a relative path
                dest = utils.path_dwim(self.runner.basedir, dest)
        else:
            # files are saved in dest dir, with a subdir for each host, then the filename
            dest = "%s/%s/%s" % (utils.path_dwim(self.runner.basedir, dest), conn.host, source)
        dest   = dest.replace("//","/")

        # calculate md5 sum for the remote file
        remote_md5 = self.runner._remote_md5(conn, tmp, source)

        # use slurp if sudo and permissions are lacking
        remote_data = None
        if remote_md5 in ('1', '2') or self.runner.sudo:
            slurpres = self.runner._execute_module(conn, tmp, 'slurp', 'src=%s' % source, inject=inject)
            if slurpres.is_successful():
                if slurpres.result['encoding'] == 'base64':
                    remote_data = base64.b64decode(slurpres.result['content'])
                if remote_data is not None:
                    remote_md5 = utils.md5s(remote_data)

        # these don't fail because you may want to transfer a log file that possibly MAY exist
        # but keep going to fetch other log files
        if remote_md5 == '0':
            result = dict(msg="unable to calculate the md5 sum of the remote file", file=source, changed=False)
            return ReturnData(conn=conn, result=result)
        if remote_md5 == '1':
            if fail_on_missing:
                result = dict(failed=True, msg="the remote file does not exist", file=source)
            else:
                result = dict(msg="the remote file does not exist, not transferring, ignored", file=source, changed=False)
            return ReturnData(conn=conn, result=result)
        if remote_md5 == '2':
            result = dict(msg="no read permission on remote file, not transferring, ignored", file=source, changed=False)
            return ReturnData(conn=conn, result=result)

        # calculate md5 sum for the local file
        local_md5 = utils.md5(dest)

        if remote_md5 != local_md5:
            # create the containing directories, if needed
            if not os.path.isdir(os.path.dirname(dest)):
                os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                conn.fetch_file(source, dest)
            else:
                f = open(dest, 'w')
                f.write(remote_data)
                f.close()
            new_md5 = utils.md5(dest)
            if new_md5 != remote_md5:
                result = dict(failed=True, md5sum=new_md5, msg="md5 mismatch", file=source, dest=dest)
                return ReturnData(conn=conn, result=result)
            result = dict(changed=True, md5sum=new_md5, dest=dest)
            return ReturnData(conn=conn, result=result)
        else:
            result = dict(changed=False, md5sum=local_md5, file=source, dest=dest)
            return ReturnData(conn=conn, result=result)
Ejemplo n.º 39
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source  = options.get('src', None)
        dest    = options.get('dest', None)
        copy    = utils.boolean(options.get('copy', 'yes'))
        creates = options.get('creates', None)

        if source is None or dest is None:
            result = dict(failed=True, msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)

        if creates:
            # do not run the command if the line contains creates=filename
            # and the filename already exists. This allows idempotence
            # of command executions.
            module_args_tmp = ""
            complex_args_tmp = dict(path=creates, get_md5=False, get_checksum=False)
            module_return = self.runner._execute_module(conn, tmp, 'stat', module_args_tmp, inject=inject,
                                                        complex_args=complex_args_tmp, persist_files=True)
            stat = module_return.result.get('stat', None)
            if stat and stat.get('exists', False):
                return ReturnData(
	            conn=conn,
                    comm_ok=True,
                    result=dict(
                        skipped=True,
                        changed=False,
                        msg=("skipped, since %s exists" % creates)
                    )
                )

        dest = self.runner._remote_expand_user(conn, dest, tmp) # CCTODO: Fix path for Windows hosts.
        source = template.template(self.runner.basedir, os.path.expanduser(source), inject)
        if copy:
            if '_original_file' in inject:
                source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir)
            else:
                source = utils.path_dwim(self.runner.basedir, source)

        remote_checksum = self.runner._remote_checksum(conn, tmp, dest, inject)
        if remote_checksum == '4':
            result = dict(failed=True, msg="python isn't present on the system.  Unable to compute checksum")
            return ReturnData(conn=conn, result=result)
        if remote_checksum != '3':
            result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest)
            return ReturnData(conn=conn, result=result)

        if copy:
            # transfer the file to a remote tmp location
            tmp_src = tmp + 'source'
            conn.put_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side
        # fix file permissions when the copy is done as a different user
        if copy:
            if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root':
                if not self.runner.noop_on_check(inject):
                    self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp)
            # Build temporary module_args.
            new_module_args = dict(
                src=tmp_src,
                original_basename=os.path.basename(source),
            )

            # make sure checkmod is passed on correctly
            if self.runner.noop_on_check(inject):
                new_module_args['CHECKMODE'] = True

            module_args = utils.merge_module_args(module_args, new_module_args)
        else:
            module_args = "%s original_basename=%s" % (module_args, pipes.quote(os.path.basename(source)))
            # make sure checkmod is passed on correctly
            if self.runner.noop_on_check(inject):
                module_args += " CHECKMODE=True"
        return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
Ejemplo n.º 40
0
    def run(self, conn, tmp, module_name, module_args,
        inject, complex_args=None, **kwargs):
        
        # Auxiliary return data functions
        def rd(comm_ok=True, skipped=False, changed=False, **kwargs):
            result = kwargs
            result['changed'] = changed
            if skipped:
                result['skipped'] = True
            return ReturnData(conn=conn, comm_ok=comm_ok, result=result)

        # Combine args
        _module_args = template.template(self.runner.basedir, 
                module_args, inject)
        _complex_args = template.template(self.runner.basedir, 
                complex_args, inject)
        args  = {}
        if _complex_args:
            args.update(_complex_args)
        args.update(utils.parse_kv(_module_args))

        def ae(msg, **kwargs):
            return rd(failed=True, msg=msg, **kwargs)

        if not args:
            return ae("No arguments provided")

        try:
            connection = Connection(conn, tmp, inject, self.runner)
            check_mode = utils.boolean(args.get('CHECKMODE','No'))

            if 'secret' in args:
                secret = utils.boolean(args['secret'])
            else:
                secret = DEFAULT_SECRET

            gpg_bin = args.get('gpg_bin', DEFAULT_GPG_BIN)
            source_gpg_bin = args.get('source_gpg_bin', DEFAULT_GPG_BIN)

            if 'use_agent' in args:
                use_agent = utils.boolean(args['use_agent'])
            else:
                use_agent = DEFAULT_USE_AGENT

            passphrase = args['passphrase'] if 'passphrase' in args else None
            _keyring = args['keyring'] if 'keyring' in args else DEFAULT_KEYRING
            keyring = PluginKeyring.build(connection, location=_keyring, 
                    secret=secret, gpg_bin=gpg_bin, use_agent=use_agent)

            if 'source_keyring' in args:
                source_keyring = PluginKeyring.build(connection,
                        location=args['source_keyring'], secret=secret, gpg_bin=source_gpg_bin)
            else:
                source_keyring = None

            if 'list_keys' in args:
                result = keyring.list_keys()
                return rd(**result)

            elif 'state' in args:
                state = args['state']
                if not state in VALID_STATES:
                    return ae("invalid state '%s' must be in %s" % (state, ",".join(VALID_STATES)))
            else:
                state = DEFAULT_STATE

            if state == 'copied':
                if not 'key_id' in args:
                    return ae("'key_id' is required if you want it copied")
                key_id = args['key_id']
                if not keyring.has_key(key_id):
                    return ae("'%s' not found in keyring" % key_id)
                result = keyring.export_key(key_id)
                return rd(**result)

            if not ('key_id' in args or 'key' in 'key' in args):
                return ae("A key or a key_id is require to ensure %s" % state)
            if 'key' in args and not 'key_id' in args:
                key_id = get_key_id_from_key(args['key'])
            else:
                key_id = args['key_id']

            if state == 'absent':
                if keyring.has_key(key_id):
                    if not check_mode:
                        result = keyring.remove_key(key_id)
                        if keyring.has_key(key_id):
                            return ae("Failed to remove key", **result)
                    return rd(changed=True)
                else:
                    return rd(changed=False)

            if state in ('present', 'signed'):
                changed = False
                if not keyring.has_key(key_id):
                    changed = True
                    if 'key' in args:
                        key = args['key']
                    elif not source_keyring is None:
                        result = source_keyring.export_key(key_id)
                        if ('failed' in result and result['failed']) or not 'key' in result:
                            return ae("Failed to export key", **result)
                        key = result['key']
                    else:
                        return ae("Source keyring is required if not key is provided")
                    if not check_mode:
                        result = keyring.import_key(key)
                        if 'failed' in result and result['failed']:
                            return ae("Failed to import key for signing", **result)
                        if state == 'present':
                            return rd(**result)
                if state == 'present':
                    return rd(changed=changed)

            if state == 'signed':
                key = keyring.search_key(key_id)
                signing_key = args['signing_key'] if 'signing_key' in args else None
                if key.check_signature(signing_key):
                    changed = True
                if check_mode:
                    return rd(changed=changed)
                result = keyring.sign_key(key_id, passphrase, 
                        signing_keyid=signing_key)
                if 'signed' in result and not result['signed']:
                    return ae("Failed to sign key", **result)
                return rd(**result)

            return ae("Unsupported state '%s'" % state)


        except ConnectionError as rme:
            return rme.return_data
        except GPGAnsibleError as e:
            return ae(e.msg, **e.kwargs)
Ejemplo n.º 41
0
    def run(self,
            conn,
            tmp,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):
        ''' handler for fetch operations '''

        if self.runner.noop_on_check(inject):
            return ReturnData(
                conn=conn,
                comm_ok=True,
                result=dict(
                    skipped=True,
                    msg='check mode not (yet) supported for this module'))

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source = options.get('src', None)
        dest = options.get('dest', None)
        flat = options.get('flat', False)
        flat = utils.boolean(flat)
        fail_on_missing = options.get('fail_on_missing', False)
        fail_on_missing = utils.boolean(fail_on_missing)
        validate_checksum = options.get('validate_checksum', None)
        if validate_checksum is not None:
            validate_checksum = utils.boolean(validate_checksum)
        # Alias for validate_checksum (old way of specifying it)
        validate_md5 = options.get('validate_md5', None)
        if validate_md5 is not None:
            validate_md5 = utils.boolean(validate_md5)
        if validate_md5 is None and validate_checksum is None:
            # Default
            validate_checksum = True
        elif validate_checksum is None:
            validate_checksum = validate_md5
        elif validate_md5 is not None and validate_checksum is not None:
            results = dict(
                failed=True,
                msg=
                "validate_checksum and validate_md5 cannot both be specified")
            return ReturnData(conn, result=results)

        if source is None or dest is None:
            results = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, result=results)

        source = conn.shell.join_path(source)
        source = self.runner._remote_expand_user(conn, source, tmp)

        # calculate checksum for the remote file
        remote_checksum = self.runner._remote_checksum(conn, tmp, source,
                                                       inject)

        # use slurp if sudo and permissions are lacking
        remote_data = None
        if remote_checksum in ('1', '2') or self.runner.sudo:
            slurpres = self.runner._execute_module(conn,
                                                   tmp,
                                                   'slurp',
                                                   'src=%s' % source,
                                                   inject=inject)
            if slurpres.is_successful():
                if slurpres.result['encoding'] == 'base64':
                    remote_data = base64.b64decode(slurpres.result['content'])
                if remote_data is not None:
                    remote_checksum = utils.checksum_s(remote_data)
                # the source path may have been expanded on the
                # target system, so we compare it here and use the
                # expanded version if it's different
                remote_source = slurpres.result.get('source')
                if remote_source and remote_source != source:
                    source = remote_source

        # calculate the destination name
        if os.path.sep not in conn.shell.join_path('a', ''):
            source_local = source.replace('\\', '/')
        else:
            source_local = source

        dest = os.path.expanduser(dest)
        if flat:
            if dest.endswith("/"):
                # if the path ends with "/", we'll use the source filename as the
                # destination filename
                base = os.path.basename(source_local)
                dest = os.path.join(dest, base)
            if not dest.startswith("/"):
                # if dest does not start with "/", we'll assume a relative path
                dest = utils.path_dwim(self.runner.basedir, dest)
        else:
            # files are saved in dest dir, with a subdir for each host, then the filename
            dest = "%s/%s/%s" % (utils.path_dwim(self.runner.basedir, dest),
                                 inject['inventory_hostname'], source_local)

        dest = dest.replace("//", "/")

        if remote_checksum in ('0', '1', '2', '3', '4'):
            # these don't fail because you may want to transfer a log file that possibly MAY exist
            # but keep going to fetch other log files
            if remote_checksum == '0':
                result = dict(
                    msg="unable to calculate the checksum of the remote file",
                    file=source,
                    changed=False)
            elif remote_checksum == '1':
                if fail_on_missing:
                    result = dict(failed=True,
                                  msg="the remote file does not exist",
                                  file=source)
                else:
                    result = dict(
                        msg=
                        "the remote file does not exist, not transferring, ignored",
                        file=source,
                        changed=False)
            elif remote_checksum == '2':
                result = dict(
                    msg=
                    "no read permission on remote file, not transferring, ignored",
                    file=source,
                    changed=False)
            elif remote_checksum == '3':
                result = dict(
                    msg=
                    "remote file is a directory, fetch cannot work on directories",
                    file=source,
                    changed=False)
            elif remote_checksum == '4':
                result = dict(
                    msg=
                    "python isn't present on the system.  Unable to compute checksum",
                    file=source,
                    changed=False)
            return ReturnData(conn=conn, result=result)

        # calculate checksum for the local file
        local_checksum = utils.checksum(dest)

        if remote_checksum != local_checksum:
            # create the containing directories, if needed
            if not os.path.isdir(os.path.dirname(dest)):
                os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                conn.fetch_file(source, dest)
            else:
                f = open(dest, 'w')
                f.write(remote_data)
                f.close()
            new_checksum = utils.secure_hash(dest)
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                new_md5 = utils.md5(dest)
            except ValueError:
                new_md5 = None

            if validate_checksum and new_checksum != remote_checksum:
                result = dict(failed=True,
                              md5sum=new_md5,
                              msg="checksum mismatch",
                              file=source,
                              dest=dest,
                              remote_md5sum=None,
                              checksum=new_checksum,
                              remote_checksum=remote_checksum)
                return ReturnData(conn=conn, result=result)
            result = dict(changed=True,
                          md5sum=new_md5,
                          dest=dest,
                          remote_md5sum=None,
                          checksum=new_checksum,
                          remote_checksum=remote_checksum)
            return ReturnData(conn=conn, result=result)
        else:
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                local_md5 = utils.md5(dest)
            except ValueError:
                local_md5 = None

            result = dict(changed=False,
                          md5sum=local_md5,
                          file=source,
                          dest=dest,
                          checksum=local_checksum)
            return ReturnData(conn=conn, result=result)
Ejemplo n.º 42
0
    def __init__(self, play, ds, module_vars=None):
        ''' constructor loads from a task or handler datastructure '''

        for x in ds.keys():

            # code to allow for saying "modulename: args" versus "action: modulename args"
            if x in utils.plugins.module_finder:
                ds['action'] = x + " " + ds[x]
                ds.pop(x)

            # code to allow "with_glob" and to reference a lookup plugin named glob
            elif x.startswith("with_"):
                plugin_name = x.replace("with_", "")
                if plugin_name in utils.plugins.lookup_loader:
                    ds['items_lookup_plugin'] = plugin_name
                    ds['items_lookup_terms'] = ds[x]
                    ds.pop(x)
                else:
                    raise errors.AnsibleError(
                        "cannot find lookup plugin named %s for usage in with_%s"
                        % (plugin_name, plugin_name))

            elif x.startswith("when_"):
                when_name = x.replace("when_", "")
                ds['when'] = "%s %s" % (when_name, ds[x])
                ds.pop(x)

            elif not x in Task.VALID_KEYS:
                raise errors.AnsibleError(
                    "%s is not a legal parameter in an Ansible task or handler"
                    % x)

        self.module_vars = module_vars
        self.play = play

        # load various attributes
        self.name = ds.get('name', None)
        self.tags = ['all']
        self.register = ds.get('register', None)
        self.sudo = utils.boolean(ds.get('sudo', play.sudo))

        if self.sudo:
            self.sudo_user = ds.get('sudo_user', play.sudo_user)
            self.sudo_pass = ds.get('sudo_pass', play.playbook.sudo_pass)
        else:
            self.sudo_user = None
            self.sudo_pass = None

        # Both are defined
        if ('action' in ds) and ('local_action' in ds):
            raise errors.AnsibleError(
                "the 'action' and 'local_action' attributes can not be used together"
            )
        # Both are NOT defined
        elif (not 'action' in ds) and (not 'local_action' in ds):
            raise errors.AnsibleError(
                "'action' or 'local_action' attribute missing in task \"%s\"" %
                ds.get('name', '<Unnamed>'))
        # Only one of them is defined
        elif 'local_action' in ds:
            self.action = ds.get('local_action', '')
            self.delegate_to = '127.0.0.1'
        else:
            self.action = ds.get('action', '')
            self.delegate_to = ds.get('delegate_to', None)
            self.transport = ds.get('transport', play.transport)

        # delegate_to can use variables
        if not (self.delegate_to is None):
            self.delegate_to = utils.template(None, self.delegate_to,
                                              self.module_vars)
            # delegate_to: localhost should use local transport
            if self.delegate_to in ['127.0.0.1', 'localhost']:
                self.transport = 'local'

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.only_if = ds.get('only_if', 'True')
        self.when = ds.get('when', None)

        self.async_seconds = int(ds.get('async', 0))  # not async by default
        self.async_poll_interval = int(ds.get('poll',
                                              10))  # default poll = 10 seconds
        self.notify = ds.get('notify', [])
        self.first_available_file = ds.get('first_available_file', None)

        self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
        self.items_lookup_terms = ds.get('items_lookup_terms', None)

        self.ignore_errors = ds.get('ignore_errors', False)

        # action should be a string
        if not isinstance(self.action, basestring):
            raise errors.AnsibleError(
                "action is of type '%s' and not a string in task. name: %s" %
                (type(self.action).__name__, self.name))

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [self.notify]

        # split the action line into a module name + arguments
        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError(
                "invalid/missing action in task. name: %s" % self.name)
        self.module_name = tokens[0]
        self.module_args = ''
        if len(tokens) > 1:
            self.module_args = tokens[1]

        import_tags = self.module_vars.get('tags', [])
        if type(import_tags) in [str, unicode]:
            # allow the user to list comma delimited tags
            import_tags = import_tags.split(",")

        self.name = utils.template(None, self.name, self.module_vars)
        self.action = utils.template(None, self.action, self.module_vars)

        # handle mutually incompatible options
        incompatibles = [
            x for x in [self.first_available_file, self.items_lookup_plugin]
            if x is not None
        ]
        if len(incompatibles) > 1:
            raise errors.AnsibleError(
                "with_(plugin), and first_available_file are mutually incompatible in a single task"
            )

        # make first_available_file accessable to Runner code
        if self.first_available_file:
            self.module_vars[
                'first_available_file'] = self.first_available_file

        if self.items_lookup_plugin is not None:
            self.module_vars['items_lookup_plugin'] = self.items_lookup_plugin
            self.module_vars['items_lookup_terms'] = self.items_lookup_terms

        # allow runner to see delegate_to option
        self.module_vars['delegate_to'] = self.delegate_to

        # make ignore_errors accessable to Runner code
        self.module_vars['ignore_errors'] = self.ignore_errors

        # tags allow certain parts of a playbook to be run without running the whole playbook
        apply_tags = ds.get('tags', None)
        if apply_tags is not None:
            if type(apply_tags) in [str, unicode]:
                self.tags.append(apply_tags)
            elif type(apply_tags) == list:
                self.tags.extend(apply_tags)
        self.tags.extend(import_tags)

        if self.when is not None:
            if self.only_if != 'True':
                raise errors.AnsibleError(
                    'when obsoletes only_if, only use one or the other')
            self.only_if = self.compile_when_to_only_if(self.when)
Ejemplo n.º 43
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):

        # load up options
        options  = {}
        if complex_args:
            options.update(complex_args)

        options.update(utils.parse_kv(module_args))

        src = options.get('src', None)
        dest = options.get('dest', None)
        delimiter = options.get('delimiter', None)
        remote_src = utils.boolean(options.get('remote_src', 'yes'))
        regexp = options.get('regexp', None)


        if src is None or dest is None:
            result = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, comm_ok=False, result=result)

        if remote_src:
            return self.runner._execute_module(conn, tmp, 'assemble', module_args, inject=inject, complex_args=complex_args)
        elif '_original_file' in inject:
            src = utils.path_dwim_relative(inject['_original_file'], 'files', src, self.runner.basedir)
        else:
            # the source is local, so expand it here
            src = os.path.expanduser(src)

        _re = None
        if regexp is not None:
            _re = re.compile(regexp)

        # Does all work assembling the file
        path = self._assemble_from_fragments(src, delimiter, _re)

        path_checksum = utils.checksum_s(path)
        dest = self.runner._remote_expand_user(conn, dest, tmp)
        remote_checksum = self.runner._remote_checksum(conn, tmp, dest, inject)

        if path_checksum != remote_checksum:
            resultant = file(path).read()
            if self.runner.diff:
                dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, inject=inject, persist_files=True)
                if 'content' in dest_result.result:
                    dest_contents = dest_result.result['content']
                    if dest_result.result['encoding'] == 'base64':
                        dest_contents = base64.b64decode(dest_contents)
                    else:
                        raise Exception("unknown encoding, failed: %s" % dest_result.result)
            xfered = self.runner._transfer_str(conn, tmp, 'src', resultant)

            # fix file permissions when the copy is done as a different user
            if self.runner.become and self.runner.become_user != 'root':
                self.runner._remote_chmod(conn, 'a+r', xfered, tmp)

            # run the copy module
            new_module_args = dict(
                src=xfered,
                dest=dest,
                original_basename=os.path.basename(src),
            )
            module_args_tmp = utils.merge_module_args(module_args, new_module_args)

            if self.runner.noop_on_check(inject):
                return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=src, after=resultant))
            else:
                res = self.runner._execute_module(conn, tmp, 'copy', module_args_tmp, inject=inject)
                res.diff = dict(after=resultant)
                return res
        else:
            new_module_args = dict(
                src=xfered,
                dest=dest,
                original_basename=os.path.basename(src),
            )

            # make sure checkmod is passed on correctly
            if self.runner.noop_on_check(inject):
                new_module_args['CHECKMODE'] = True

            module_args_tmp = utils.merge_module_args(module_args, new_module_args)

            return self.runner._execute_module(conn, tmp, 'file', module_args_tmp, inject=inject)
Ejemplo n.º 44
0
    def __init__(self, playbook, ds, basedir, vault_password=None):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter at this level in an Ansible Playbook" % x)

        # allow all playbook keys to be set by --extra-vars
        self.vars             = ds.get('vars', {})
        self.vars_prompt      = ds.get('vars_prompt', {})
        self.playbook         = playbook
        self.vars             = self._get_vars()
        self.basedir          = basedir
        self.roles            = ds.get('roles', None)
        self.tags             = ds.get('tags', None)
        self.vault_password   = vault_password

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [ str, unicode ]:
            self.tags = self.tags.split(",")
        elif type(self.tags) != list:
            self.tags = []

        # make sure we have some special internal variables set
        self.vars['playbook_dir'] = os.path.abspath(self.basedir)
        if self.playbook.inventory.basedir() is not None:
            self.vars['inventory_dir'] = self.playbook.inventory.basedir()
        if self.playbook.inventory.src() is not None:
            self.vars['inventory_file'] = self.playbook.inventory.src()

        # We first load the vars files from the datastructure
        # so we have the default variables to pass into the roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        processed_vars_files = self._update_vars_files_for_host(None)

        # now we load the roles into the datastructure
        self.included_roles = []
        ds = self._load_roles(self.roles, ds)

        # and finally re-process the vars files as they may have been updated
        # by the included roles, but exclude any which have been processed
        self.vars_files = utils.list_difference(ds.get('vars_files', []), processed_vars_files)
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')

        self._update_vars_files_for_host(None)

        # apply any extra_vars specified on the command line now
        if type(self.playbook.extra_vars) == dict:
            self.vars = utils.combine_vars(self.vars, self.playbook.extra_vars)

        # template everything to be efficient, but do not pre-mature template
        # tasks/handlers as they may have inventory scope overrides
        _tasks    = ds.pop('tasks', [])
        _handlers = ds.pop('handlers', [])
        ds = template(basedir, ds, self.vars)
        ds['tasks'] = _tasks
        ds['handlers'] = _handlers

        self._ds = ds

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            hosts = ';'.join(hosts)
        self.serial           = str(ds.get('serial', 0))
        self.hosts            = hosts
        self.name             = ds.get('name', self.hosts)
        self._tasks           = ds.get('tasks', [])
        self._handlers        = ds.get('handlers', [])
        self.remote_user      = ds.get('remote_user', ds.get('user', self.playbook.remote_user))
        self.remote_port      = ds.get('port', self.playbook.remote_port)
        self.sudo             = ds.get('sudo', self.playbook.sudo)
        self.sudo_user        = ds.get('sudo_user', self.playbook.sudo_user)
        self.transport        = ds.get('connection', self.playbook.transport)
        self.remote_port      = self.remote_port
        self.any_errors_fatal = utils.boolean(ds.get('any_errors_fatal', 'false'))
        self.accelerate       = utils.boolean(ds.get('accelerate', 'false'))
        self.accelerate_port  = ds.get('accelerate_port', None)
        self.accelerate_ipv6  = ds.get('accelerate_ipv6', False)
        self.max_fail_pct     = int(ds.get('max_fail_percentage', 100))
        self.su               = ds.get('su', self.playbook.su)
        self.su_user          = ds.get('su_user', self.playbook.su_user)
        self.no_log           = utils.boolean(ds.get('no_log', 'false'))

        # gather_facts is not a simple boolean, as None means  that a 'smart'
        # fact gathering mode will be used, so we need to be careful here as
        # calling utils.boolean(None) returns False
        self.gather_facts = ds.get('gather_facts', None)
        if self.gather_facts:
            self.gather_facts = utils.boolean(self.gather_facts)

        # Fail out if user specifies a sudo param with a su param in a given play
        if (ds.get('sudo') or ds.get('sudo_user')) and (ds.get('su') or ds.get('su_user')):
            raise errors.AnsibleError('sudo params ("sudo", "sudo_user") and su params '
                                      '("su", "su_user") cannot be used together')

        load_vars = {}
        load_vars['role_names'] = ds.get('role_names',[])

        self._tasks      = self._load_tasks(self._ds.get('tasks', []), load_vars)
        self._handlers   = self._load_tasks(self._ds.get('handlers', []), load_vars)

        # apply any missing tags to role tasks
        self._late_merge_role_tags()

        if self.sudo_user != 'root':
            self.sudo = True

        # place holder for the discovered hosts to be used in this play
        self._play_hosts = None
Ejemplo n.º 45
0
        elif isinstance(hosts, list):
            try:
                hosts = ';'.join(hosts)
            except TypeError,e:
                raise errors.AnsibleError('improper host declaration: %s' % str(e))

        self.serial           = str(ds.get('serial', 0))
        self.hosts            = hosts
        self.name             = ds.get('name', self.hosts)
        self._tasks           = ds.get('tasks', [])
        self._handlers        = ds.get('handlers', [])
        self.remote_user      = ds.get('remote_user', ds.get('user', self.playbook.remote_user))
        self.remote_port      = ds.get('port', self.playbook.remote_port)
        self.transport        = ds.get('connection', self.playbook.transport)
        self.remote_port      = self.remote_port
        self.any_errors_fatal = utils.boolean(ds.get('any_errors_fatal', 'false'))
        self.accelerate       = utils.boolean(ds.get('accelerate', 'false'))
        self.accelerate_port  = ds.get('accelerate_port', None)
        self.accelerate_ipv6  = ds.get('accelerate_ipv6', False)
        self.max_fail_pct     = int(ds.get('max_fail_percentage', 100))
        self.no_log           = utils.boolean(ds.get('no_log', 'false'))
        self.force_handlers   = utils.boolean(ds.get('force_handlers', self.playbook.force_handlers))

        # Fail out if user specifies conflicting privilege escalations
        if (ds.get('become') or ds.get('become_user')) and (ds.get('sudo') or ds.get('sudo_user')):
            raise errors.AnsibleError('sudo params ("become", "become_user") and su params ("sudo", "sudo_user") cannot be used together')
        if (ds.get('become') or ds.get('become_user')) and (ds.get('su') or ds.get('su_user')):
            raise errors.AnsibleError('sudo params ("become", "become_user") and su params ("su", "su_user") cannot be used together')
        if (ds.get('sudo') or ds.get('sudo_user')) and (ds.get('su') or ds.get('su_user')):
            raise errors.AnsibleError('sudo params ("sudo", "sudo_user") and su params ("su", "su_user") cannot be used together')
Ejemplo n.º 46
0
    def run(self,
            conn,
            tmp,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source = options.get('src', None)
        dest = options.get('dest', None)
        copy = utils.boolean(options.get('copy', 'yes'))
        creates = options.get('creates', None)

        if source is None or dest is None:
            result = dict(failed=True,
                          msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)

        if creates:
            # do not run the command if the line contains creates=filename
            # and the filename already exists. This allows idempotence
            # of command executions.
            module_args_tmp = "path=%s" % creates
            module_return = self.runner._execute_module(
                conn,
                tmp,
                'stat',
                module_args_tmp,
                inject=inject,
                complex_args=complex_args,
                persist_files=True)
            stat = module_return.result.get('stat', None)
            if stat and stat.get('exists', False):
                return ReturnData(conn=conn,
                                  comm_ok=True,
                                  result=dict(skipped=True,
                                              changed=False,
                                              msg=("skipped, since %s exists" %
                                                   creates)))

        dest = self.runner._remote_expand_user(
            conn, dest, tmp)  # CCTODO: Fix path for Windows hosts.
        source = template.template(self.runner.basedir,
                                   os.path.expanduser(source), inject)
        if copy:
            if '_original_file' in inject:
                source = utils.path_dwim_relative(inject['_original_file'],
                                                  'files', source,
                                                  self.runner.basedir)
            else:
                source = utils.path_dwim(self.runner.basedir, source)

        remote_checksum = self.runner._remote_checksum(conn, tmp, dest, inject)
        if remote_checksum == '4':
            result = dict(
                failed=True,
                msg=
                "python isn't present on the system.  Unable to compute checksum"
            )
            return ReturnData(conn=conn, result=result)
        if remote_checksum != '3':
            result = dict(failed=True,
                          msg="dest '%s' must be an existing dir" % dest)
            return ReturnData(conn=conn, result=result)

        if copy:
            # transfer the file to a remote tmp location
            tmp_src = tmp + 'source'
            conn.put_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side
        # fix file permissions when the copy is done as a different user
        if copy:
            if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root':
                if not self.runner.noop_on_check(inject):
                    self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp)
            # Build temporary module_args.
            new_module_args = dict(
                src=tmp_src,
                original_basename=os.path.basename(source),
            )

            # make sure checkmod is passed on correctly
            if self.runner.noop_on_check(inject):
                new_module_args['CHECKMODE'] = True

            module_args = utils.merge_module_args(module_args, new_module_args)
        else:
            module_args = "%s original_basename=%s" % (
                module_args, pipes.quote(os.path.basename(source)))
            # make sure checkmod is passed on correctly
            if self.runner.noop_on_check(inject):
                module_args += " CHECKMODE=True"
        return self.runner._execute_module(conn,
                                           tmp,
                                           'unarchive',
                                           module_args,
                                           inject=inject,
                                           complex_args=complex_args)
Ejemplo n.º 47
0
    def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
        # this number is arbitrary, but it seems sane
        if level > 20:
            raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
        for role in roles:
            role_path,role_vars = self._get_role_path(role)
            role_vars = utils.combine_vars(passed_vars, role_vars)
            vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
            vars_data = {}
            if os.path.isfile(vars):
                vars_data = utils.parse_yaml_from_file(vars)
                if vars_data:
                    role_vars = utils.combine_vars(vars_data, role_vars)
            defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')))
            defaults_data = {}
            if os.path.isfile(defaults):
                defaults_data = utils.parse_yaml_from_file(defaults)
            # the meta directory contains the yaml that should
            # hold the list of dependencies (if any)
            meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
            if os.path.isfile(meta):
                data = utils.parse_yaml_from_file(meta)
                if data:
                    dependencies = data.get('dependencies',[])
                    for dep in dependencies:
                        allow_dupes = False
                        (dep_path,dep_vars) = self._get_role_path(dep)
                        meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta')))
                        if os.path.isfile(meta):
                            meta_data = utils.parse_yaml_from_file(meta)
                            if meta_data:
                                allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))

                        # if tags are set from this role, merge them
                        # into the tags list for the dependent role
                        if "tags" in passed_vars:
                            for included_role_dep in dep_stack:
                                included_dep_name = included_role_dep[0]
                                included_dep_vars = included_role_dep[2]
                                if included_dep_name == dep:
                                    if "tags" in included_dep_vars:
                                        included_dep_vars["tags"] = list(set(included_dep_vars["tags"] + passed_vars["tags"]))
                                    else:
                                        included_dep_vars["tags"] = passed_vars["tags"].copy()

                        dep_vars = utils.combine_vars(passed_vars, dep_vars)
                        dep_vars = utils.combine_vars(role_vars, dep_vars)
                        vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
                        vars_data = {}
                        if os.path.isfile(vars):
                            vars_data = utils.parse_yaml_from_file(vars)
                            if vars_data:
                                dep_vars = utils.combine_vars(vars_data, dep_vars)
                        defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults')))
                        dep_defaults_data = {}
                        if os.path.isfile(defaults):
                            dep_defaults_data = utils.parse_yaml_from_file(defaults)
                        if 'role' in dep_vars:
                            del dep_vars['role']

                        if "tags" in passed_vars:
                            if not self._is_valid_tag(passed_vars["tags"]):
                                # one of the tags specified for this role was in the
                                # skip list, or we're limiting the tags and it didn't 
                                # match one, so we just skip it completely
                                continue

                        if not allow_dupes:
                            if dep in self.included_roles:
                                # skip back to the top, since we don't want to
                                # do anything else with this role
                                continue
                            else:
                                self.included_roles.append(dep)

                        # pass along conditionals from roles to dep roles
                        if type(role) is dict:
                            if 'when' in passed_vars:
                                if 'when' in dep_vars:
                                    tmpcond = []

                                    if type(passed_vars['when']) is str:
                                        tmpcond.append(passed_vars['when'])
                                    elif type(passed_vars['when']) is list:
                                        tmpcond.join(passed_vars['when'])

                                    if type(dep_vars['when']) is str:
                                        tmpcond.append(dep_vars['when'])
                                    elif type(dep_vars['when']) is list:
                                        tmpcond += dep_vars['when']

                                    if len(tmpcond) > 0:
                                        dep_vars['when'] = tmpcond
                                else:
                                    dep_vars['when'] = passed_vars['when']

                        self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
                        dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data])

            # only add the current role when we're at the top level,
            # otherwise we'll end up in a recursive loop 
            if level == 0:
                self.included_roles.append(role)
                dep_stack.append([role,role_path,role_vars,defaults_data])
        return dep_stack
Ejemplo n.º 48
0
    def run(self,
            conn,
            tmp,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)

        options.update(utils.parse_kv(module_args))

        src = options.get('src', None)
        dest = options.get('dest', None)
        delimiter = options.get('delimiter', None)
        remote_src = utils.boolean(options.get('remote_src', 'yes'))
        regexp = options.get('regexp', None)

        if src is None or dest is None:
            result = dict(failed=True, msg="src and dest are required")
            return ReturnData(conn=conn, comm_ok=False, result=result)

        if remote_src:
            return self.runner._execute_module(conn,
                                               tmp,
                                               'assemble',
                                               module_args,
                                               inject=inject,
                                               complex_args=complex_args)
        elif '_original_file' in inject:
            src = utils.path_dwim_relative(inject['_original_file'], 'files',
                                           src, self.runner.basedir)
        else:
            # the source is local, so expand it here
            src = os.path.expanduser(src)

        _re = None
        if regexp is not None:
            _re = re.compile(regexp)

        # Does all work assembling the file
        path = self._assemble_from_fragments(src, delimiter, _re)

        path_checksum = utils.checksum_s(path)
        dest = self.runner._remote_expand_user(conn, dest, tmp)
        remote_checksum = self.runner._remote_checksum(conn, tmp, dest, inject)

        if path_checksum != remote_checksum:
            resultant = file(path).read()
            if self.runner.diff:
                dest_result = self.runner._execute_module(conn,
                                                          tmp,
                                                          'slurp',
                                                          "path=%s" % dest,
                                                          inject=inject,
                                                          persist_files=True)
                if 'content' in dest_result.result:
                    dest_contents = dest_result.result['content']
                    if dest_result.result['encoding'] == 'base64':
                        dest_contents = base64.b64decode(dest_contents)
                    else:
                        raise Exception("unknown encoding, failed: %s" %
                                        dest_result.result)
            xfered = self.runner._transfer_str(conn, tmp, 'src', resultant)

            # fix file permissions when the copy is done as a different user
            if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root':
                self.runner._remote_chmod(conn, 'a+r', xfered, tmp)

            # run the copy module
            new_module_args = dict(
                src=xfered,
                dest=dest,
                original_basename=os.path.basename(src),
            )
            module_args_tmp = utils.merge_module_args(module_args,
                                                      new_module_args)

            if self.runner.noop_on_check(inject):
                return ReturnData(conn=conn,
                                  comm_ok=True,
                                  result=dict(changed=True),
                                  diff=dict(before_header=dest,
                                            after_header=src,
                                            after=resultant))
            else:
                res = self.runner._execute_module(conn,
                                                  tmp,
                                                  'copy',
                                                  module_args_tmp,
                                                  inject=inject)
                res.diff = dict(after=resultant)
                return res
        else:
            new_module_args = dict(
                src=xfered,
                dest=dest,
                original_basename=os.path.basename(src),
            )

            # make sure checkmod is passed on correctly
            if self.runner.noop_on_check(inject):
                new_module_args['CHECKMODE'] = True

            module_args_tmp = utils.merge_module_args(module_args,
                                                      new_module_args)

            return self.runner._execute_module(conn,
                                               tmp,
                                               'file',
                                               module_args_tmp,
                                               inject=inject)
Ejemplo n.º 49
0
    def __init__(self, play, ds, module_vars=None, additional_conditions=None):
        ''' constructor loads from a task or handler datastructure '''

        # meta directives are used to tell things like ansible/playbook to run
        # operations like handler execution.  Meta tasks are not executed
        # normally.
        if 'meta' in ds:
            self.meta = ds['meta']
            self.tags = []
            return
        else:
            self.meta = None

        library = os.path.join(play.basedir, 'library')
        if os.path.exists(library):
            utils.plugins.module_finder.add_directory(library)

        for x in ds.keys():

            # code to allow for saying "modulename: args" versus "action: modulename args"
            if x in utils.plugins.module_finder:

                if 'action' in ds:
                    raise errors.AnsibleError(
                        "multiple actions specified in task %s" %
                        (ds.get('name', ds['action'])))
                if isinstance(ds[x], dict):
                    if 'args' in ds:
                        raise errors.AnsibleError(
                            "can't combine args: and a dict for %s: in task %s"
                            % (x, ds.get('name', "%s: %s" % (x, ds[x]))))
                    ds['args'] = ds[x]
                    ds[x] = ''
                elif ds[x] is None:
                    ds[x] = ''
                if not isinstance(ds[x], basestring):
                    raise errors.AnsibleError(
                        "action specified for task %s has invalid type %s" %
                        (ds.get('name', "%s: %s" % (x, ds[x])), type(ds[x])))
                ds['action'] = x + " " + ds[x]
                ds.pop(x)

            # code to allow "with_glob" and to reference a lookup plugin named glob
            elif x.startswith("with_"):
                plugin_name = x.replace("with_", "")
                if plugin_name in utils.plugins.lookup_loader:
                    ds['items_lookup_plugin'] = plugin_name
                    ds['items_lookup_terms'] = ds[x]
                    ds.pop(x)
                else:
                    raise errors.AnsibleError(
                        "cannot find lookup plugin named %s for usage in with_%s"
                        % (plugin_name, plugin_name))

            elif x == 'when':
                ds['when'] = "jinja2_compare %s" % (ds[x])
            elif x.startswith("when_"):
                if 'when' in ds:
                    raise errors.AnsibleError(
                        "multiple when_* statements specified in task %s" %
                        (ds.get('name', ds['action'])))
                when_name = x.replace("when_", "")
                ds['when'] = "%s %s" % (when_name, ds[x])
                ds.pop(x)

            elif not x in Task.VALID_KEYS:
                raise errors.AnsibleError(
                    "%s is not a legal parameter in an Ansible task or handler"
                    % x)

        self.module_vars = module_vars
        self.play = play

        # load various attributes
        self.name = ds.get('name', None)
        self.tags = ['all']
        self.register = ds.get('register', None)
        self.sudo = utils.boolean(ds.get('sudo', play.sudo))
        self.environment = ds.get('environment', {})

        # rather than simple key=value args on the options line, these represent structured data and the values
        # can be hashes and lists, not just scalars
        self.args = ds.get('args', {})

        if self.sudo:
            self.sudo_user = ds.get('sudo_user', play.sudo_user)
            self.sudo_pass = ds.get('sudo_pass', play.playbook.sudo_pass)
        else:
            self.sudo_user = None
            self.sudo_pass = None

        # Both are defined
        if ('action' in ds) and ('local_action' in ds):
            raise errors.AnsibleError(
                "the 'action' and 'local_action' attributes can not be used together"
            )
        # Both are NOT defined
        elif (not 'action' in ds) and (not 'local_action' in ds):
            raise errors.AnsibleError(
                "'action' or 'local_action' attribute missing in task \"%s\"" %
                ds.get('name', '<Unnamed>'))
        # Only one of them is defined
        elif 'local_action' in ds:
            self.action = ds.get('local_action', '')
            self.delegate_to = '127.0.0.1'
        else:
            self.action = ds.get('action', '')
            self.delegate_to = ds.get('delegate_to', None)
            self.transport = ds.get('connection',
                                    ds.get('transport', play.transport))

        if isinstance(self.action, dict):
            if 'module' not in self.action:
                raise errors.AnsibleError(
                    "'module' attribute missing from action in task \"%s\"" %
                    ds.get('name', '%s' % self.action))
            if self.args:
                raise errors.AnsibleError(
                    "'args' cannot be combined with dict 'action' in task \"%s\""
                    % ds.get('name', '%s' % self.action))
            self.args = self.action
            self.action = self.args.pop('module')

        # delegate_to can use variables
        if not (self.delegate_to is None):
            # delegate_to: localhost should use local transport
            if self.delegate_to in ['127.0.0.1', 'localhost']:
                self.transport = 'local'

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.only_if = ds.get('only_if', 'True')
        self.when = ds.get('when', None)

        self.async_seconds = int(ds.get('async', 0))  # not async by default
        self.async_poll_interval = int(ds.get('poll',
                                              10))  # default poll = 10 seconds
        self.notify = ds.get('notify', [])
        self.first_available_file = ds.get('first_available_file', None)

        self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
        self.items_lookup_terms = ds.get('items_lookup_terms', None)

        self.ignore_errors = ds.get('ignore_errors', False)
        self.any_errors_fatal = ds.get('any_errors_fatal',
                                       play.any_errors_fatal)

        # action should be a string
        if not isinstance(self.action, basestring):
            raise errors.AnsibleError(
                "action is of type '%s' and not a string in task. name: %s" %
                (type(self.action).__name__, self.name))

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [self.notify]

        # split the action line into a module name + arguments
        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError(
                "invalid/missing action in task. name: %s" % self.name)
        self.module_name = tokens[0]
        self.module_args = ''
        if len(tokens) > 1:
            self.module_args = tokens[1]

        import_tags = self.module_vars.get('tags', [])
        if type(import_tags) in [str, unicode]:
            # allow the user to list comma delimited tags
            import_tags = import_tags.split(",")

        # handle mutually incompatible options
        incompatibles = [
            x for x in [self.first_available_file, self.items_lookup_plugin]
            if x is not None
        ]
        if len(incompatibles) > 1:
            raise errors.AnsibleError(
                "with_(plugin), and first_available_file are mutually incompatible in a single task"
            )

        # make first_available_file accessable to Runner code
        if self.first_available_file:
            self.module_vars[
                'first_available_file'] = self.first_available_file

        if self.items_lookup_plugin is not None:
            self.module_vars['items_lookup_plugin'] = self.items_lookup_plugin
            self.module_vars['items_lookup_terms'] = self.items_lookup_terms

        # allow runner to see delegate_to option
        self.module_vars['delegate_to'] = self.delegate_to

        # make ignore_errors accessable to Runner code
        self.module_vars['ignore_errors'] = self.ignore_errors

        # tags allow certain parts of a playbook to be run without running the whole playbook
        apply_tags = ds.get('tags', None)
        if apply_tags is not None:
            if type(apply_tags) in [str, unicode]:
                self.tags.append(apply_tags)
            elif type(apply_tags) == list:
                self.tags.extend(apply_tags)
        self.tags.extend(import_tags)

        if self.when is not None:
            if self.only_if != 'True':
                raise errors.AnsibleError(
                    'when obsoletes only_if, only use one or the other')
            self.only_if = utils.compile_when_to_only_if(self.when)

        if additional_conditions:
            self.only_if = [self.only_if]
            self.only_if.extend(additional_conditions)
Ejemplo n.º 50
0
    def __init__(self, playbook, ds, basedir, vault_password=None):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError(
                    "%s is not a legal parameter in an Ansible Playbook" % x)

        # allow all playbook keys to be set by --extra-vars
        self.vars = ds.get('vars', {})
        self.vars_prompt = ds.get('vars_prompt', {})
        self.playbook = playbook
        self.vars = self._get_vars()
        self.basedir = basedir
        self.roles = ds.get('roles', None)
        self.tags = ds.get('tags', None)
        self.vault_password = vault_password

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [str, unicode]:
            self.tags = self.tags.split(",")
        elif type(self.tags) != list:
            self.tags = []

        # We first load the vars files from the datastructure
        # so we have the default variables to pass into the roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        self._update_vars_files_for_host(None)

        # now we load the roles into the datastructure
        self.included_roles = []
        ds = self._load_roles(self.roles, ds)

        # and finally re-process the vars files as they may have
        # been updated by the included roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')

        self._update_vars_files_for_host(None)

        # apply any extra_vars specified on the command line now
        if type(self.playbook.extra_vars) == dict:
            self.vars = utils.combine_vars(self.vars, self.playbook.extra_vars)

        # template everything to be efficient, but do not pre-mature template
        # tasks/handlers as they may have inventory scope overrides
        _tasks = ds.pop('tasks', [])
        _handlers = ds.pop('handlers', [])
        ds = template(basedir, ds, self.vars)
        ds['tasks'] = _tasks
        ds['handlers'] = _handlers

        self._ds = ds

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            hosts = ';'.join(hosts)
        self.serial = int(ds.get('serial', 0))
        self.hosts = hosts
        self.name = ds.get('name', self.hosts)
        self._tasks = ds.get('tasks', [])
        self._handlers = ds.get('handlers', [])
        self.remote_user = ds.get('remote_user',
                                  ds.get('user', self.playbook.remote_user))
        self.remote_port = ds.get('port', self.playbook.remote_port)
        self.sudo = ds.get('sudo', self.playbook.sudo)
        self.sudo_user = ds.get('sudo_user', self.playbook.sudo_user)
        self.transport = ds.get('connection', self.playbook.transport)
        self.remote_port = self.remote_port
        self.any_errors_fatal = utils.boolean(
            ds.get('any_errors_fatal', 'false'))
        self.accelerate = utils.boolean(ds.get('accelerate', 'false'))
        self.accelerate_port = ds.get('accelerate_port', None)
        self.accelerate_ipv6 = ds.get('accelerate_ipv6', False)
        self.max_fail_pct = int(ds.get('max_fail_percentage', 100))
        self.su = ds.get('su', self.playbook.su)
        self.su_user = ds.get('su_user', self.playbook.su_user)

        # gather_facts is not a simple boolean, as None means  that a 'smart'
        # fact gathering mode will be used, so we need to be careful here as
        # calling utils.boolean(None) returns False
        self.gather_facts = ds.get('gather_facts', None)
        if self.gather_facts:
            self.gather_facts = utils.boolean(self.gather_facts)

        # Fail out if user specifies a sudo param with a su param in a given play
        if (ds.get('sudo') or ds.get('sudo_user')) and (ds.get('su')
                                                        or ds.get('su_user')):
            raise errors.AnsibleError(
                'sudo params ("sudo", "sudo_user") and su params '
                '("su", "su_user") cannot be used together')

        load_vars = {}
        load_vars['role_names'] = ds.get('role_names', [])
        load_vars['playbook_dir'] = self.basedir
        if self.playbook.inventory.basedir() is not None:
            load_vars['inventory_dir'] = self.playbook.inventory.basedir()

        self._tasks = self._load_tasks(self._ds.get('tasks', []), load_vars)
        self._handlers = self._load_tasks(self._ds.get('handlers', []),
                                          load_vars)

        # apply any missing tags to role tasks
        self._late_merge_role_tags()

        if self.sudo_user != 'root':
            self.sudo = True

        # place holder for the discovered hosts to be used in this play
        self._play_hosts = None
Ejemplo n.º 51
0
    def run(self, conn, tmp, module_name, module_args,
        inject, complex_args=None, **kwargs):

        ''' generates params and passes them on to the rsync module '''

        self.inject = inject

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))

        src = options.get('src', None)
        dest = options.get('dest', None)

        src = template.template(self.runner.basedir, src, inject)
        dest = template.template(self.runner.basedir, dest, inject)

        try:
            options['local_rsync_path'] = inject['ansible_rsync_path']
        except KeyError:
            pass

        # from the perspective of the rsync call the delegate is the localhost
        src_host = '127.0.0.1'
        dest_host = inject.get('ansible_ssh_host', inject['inventory_hostname'])

        # allow ansible_ssh_host to be templated
        dest_host = template.template(self.runner.basedir, dest_host, inject, fail_on_undefined=True)
        dest_is_local = dest_host in ['127.0.0.1', 'localhost']

        # CHECK FOR NON-DEFAULT SSH PORT
        dest_port = options.get('dest_port')
        inv_port = inject.get('ansible_ssh_port', inject['inventory_hostname'])
        if inv_port != dest_port and inv_port != inject['inventory_hostname']:
            options['dest_port'] = inv_port

        # edge case: explicit delegate and dest_host are the same
        if dest_host == inject['delegate_to']:
            dest_host = '127.0.0.1'

        # SWITCH SRC AND DEST PER MODE
        if options.get('mode', 'push') == 'pull':
            (dest_host, src_host) = (src_host, dest_host)

        # CHECK DELEGATE HOST INFO
        use_delegate = False
        if conn.delegate != conn.host:
            if 'hostvars' in inject:
                if conn.delegate in inject['hostvars'] and self.original_transport != 'local':
                    # use a delegate host instead of localhost
                    use_delegate = True

        # COMPARE DELEGATE, HOST AND TRANSPORT                             
        process_args = False
        if not dest_host is src_host and self.original_transport != 'local':
            # interpret and inject remote host info into src or dest
            process_args = True

        # MUNGE SRC AND DEST PER REMOTE_HOST INFO
        if process_args or use_delegate:

            user = None
            if utils.boolean(options.get('set_remote_user', 'yes')):
                if use_delegate:
                    user = inject['hostvars'][conn.delegate].get('ansible_ssh_user')

                if not use_delegate or not user:
                    user = inject.get('ansible_ssh_user',
                                    self.runner.remote_user)
                
            if use_delegate:
                # FIXME
                private_key = inject.get('ansible_ssh_private_key_file', self.runner.private_key_file)
            else:
                private_key = inject.get('ansible_ssh_private_key_file', self.runner.private_key_file)

            private_key = template.template(self.runner.basedir, private_key, inject, fail_on_undefined=True)

            if not private_key is None:
                private_key = os.path.expanduser(private_key)
                options['private_key'] = private_key
                
            # use the mode to define src and dest's url
            if options.get('mode', 'push') == 'pull':
                # src is a remote path: <user>@<host>, dest is a local path
                src = self._process_remote(src_host, src, user)
                dest = self._process_origin(dest_host, dest, user)
            else:
                # src is a local path, dest is a remote path: <user>@<host>
                src = self._process_origin(src_host, src, user)
                dest = self._process_remote(dest_host, dest, user)

        options['src'] = src
        options['dest'] = dest
        if 'mode' in options:
            del options['mode']

        # Allow custom rsync path argument.
        rsync_path = options.get('rsync_path', None)

        # If no rsync_path is set, sudo was originally set, and dest is remote then add 'sudo rsync' argument.
        if not rsync_path and self.transport_overridden and self.original_sudo and not dest_is_local:
            rsync_path = 'sudo rsync'

        # make sure rsync path is quoted.
        if rsync_path:
            options['rsync_path'] = '"' + rsync_path + '"'

        module_args = ""
        if self.runner.noop_on_check(inject):
            module_args = "CHECKMODE=True"

        # run the module and store the result
        result = self.runner._execute_module(conn, tmp, 'synchronize', module_args, complex_args=options, inject=inject)

        # reset the sudo property                 
        self.runner.sudo = self.original_sudo

        return result
Ejemplo n.º 52
0
    def __init__(self, play, ds, module_vars=None, additional_conditions=None):
        ''' constructor loads from a task or handler datastructure '''

        # meta directives are used to tell things like ansible/playbook to run
        # operations like handler execution.  Meta tasks are not executed
        # normally.
        if 'meta' in ds:
            self.meta = ds['meta']
            self.tags = []
            return
        else:
            self.meta = None


        library = os.path.join(play.basedir, 'library')
        if os.path.exists(library):
            utils.plugins.module_finder.add_directory(library)

        for x in ds.keys():

            # code to allow for saying "modulename: args" versus "action: modulename args"
            if x in utils.plugins.module_finder:

                if 'action' in ds:
                    raise errors.AnsibleError("multiple actions specified in task %s" % (ds.get('name', ds['action'])))
                if isinstance(ds[x], dict):
                    if 'args' in ds:
                        raise errors.AnsibleError("can't combine args: and a dict for %s: in task %s" % (x, ds.get('name', "%s: %s" % (x, ds[x]))))
                    ds['args'] = ds[x]
                    ds[x] = ''
                elif ds[x] is None:
                    ds[x] = ''
                if not isinstance(ds[x], basestring):
                    raise errors.AnsibleError("action specified for task %s has invalid type %s" % (ds.get('name', "%s: %s" % (x, ds[x])), type(ds[x])))
                ds['action'] = x + " " + ds[x]
                ds.pop(x)

            # code to allow "with_glob" and to reference a lookup plugin named glob
            elif x.startswith("with_"):
                plugin_name = x.replace("with_","")
                if plugin_name in utils.plugins.lookup_loader:
                    ds['items_lookup_plugin'] = plugin_name
                    ds['items_lookup_terms'] = ds[x]
                    ds.pop(x)
                else:
                    raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))

            elif x in [ 'changed_when', 'when']:
                ds[x] = "jinja2_compare %s" % (ds[x])
            elif x.startswith("when_"):
                if 'when' in ds:
                    raise errors.AnsibleError("multiple when_* statements specified in task %s" % (ds.get('name', ds['action'])))
                when_name = x.replace("when_","")
                ds['when'] = "%s %s" % (when_name, ds[x])
                ds.pop(x)

            elif not x in Task.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter in an Ansible task or handler" % x)

        self.module_vars = module_vars
        self.play        = play

        # load various attributes
        self.name         = ds.get('name', None)
        self.tags         = [ 'all' ]
        self.register     = ds.get('register', None)
        self.sudo         = utils.boolean(ds.get('sudo', play.sudo))
        self.environment  = ds.get('environment', {})

        # rather than simple key=value args on the options line, these represent structured data and the values
        # can be hashes and lists, not just scalars
        self.args         = ds.get('args', {})

        if self.sudo:
            self.sudo_user    = ds.get('sudo_user', play.sudo_user)
            self.sudo_pass    = ds.get('sudo_pass', play.playbook.sudo_pass)
        else:
            self.sudo_user    = None
            self.sudo_pass    = None
        
        # Both are defined
        if ('action' in ds) and ('local_action' in ds):
            raise errors.AnsibleError("the 'action' and 'local_action' attributes can not be used together")
        # Both are NOT defined
        elif (not 'action' in ds) and (not 'local_action' in ds):
            raise errors.AnsibleError("'action' or 'local_action' attribute missing in task \"%s\"" % ds.get('name', '<Unnamed>'))
        # Only one of them is defined
        elif 'local_action' in ds:
            self.action      = ds.get('local_action', '')
            self.delegate_to = '127.0.0.1'
        else:
            self.action      = ds.get('action', '')
            self.delegate_to = ds.get('delegate_to', None)
            self.transport   = ds.get('connection', ds.get('transport', play.transport))

        if isinstance(self.action, dict):
            if 'module' not in self.action:
                raise errors.AnsibleError("'module' attribute missing from action in task \"%s\"" % ds.get('name', '%s' % self.action))
            if self.args:
                raise errors.AnsibleError("'args' cannot be combined with dict 'action' in task \"%s\"" % ds.get('name', '%s' % self.action))
            self.args = self.action
            self.action = self.args.pop('module')

        # delegate_to can use variables
        if not (self.delegate_to is None):
            # delegate_to: localhost should use local transport
            if self.delegate_to in ['127.0.0.1', 'localhost']:
                self.transport   = 'local'

        # notified by is used by Playbook code to flag which hosts
        # need to run a notifier
        self.notified_by = []

        # if no name is specified, use the action line as the name
        if self.name is None:
            self.name = self.action

        # load various attributes
        self.only_if = ds.get('only_if', 'True')
        self.when    = ds.get('when', None)
        self.changed_when = ds.get('changed_when', None)

        if self.changed_when is not None:
            self.changed_when = utils.compile_when_to_only_if(self.changed_when)

        self.async_seconds = int(ds.get('async', 0))  # not async by default
        self.async_poll_interval = int(ds.get('poll', 10))  # default poll = 10 seconds
        self.notify = ds.get('notify', [])
        self.first_available_file = ds.get('first_available_file', None)

        self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
        self.items_lookup_terms  = ds.get('items_lookup_terms', None)
     

        self.ignore_errors = ds.get('ignore_errors', False)
        self.any_errors_fatal = ds.get('any_errors_fatal', play.any_errors_fatal)

        self.always_run = ds.get('always_run', False)

        # action should be a string
        if not isinstance(self.action, basestring):
            raise errors.AnsibleError("action is of type '%s' and not a string in task. name: %s" % (type(self.action).__name__, self.name))

        # notify can be a string or a list, store as a list
        if isinstance(self.notify, basestring):
            self.notify = [ self.notify ]

        # split the action line into a module name + arguments
        tokens = self.action.split(None, 1)
        if len(tokens) < 1:
            raise errors.AnsibleError("invalid/missing action in task. name: %s" % self.name)
        self.module_name = tokens[0]
        self.module_args = ''
        if len(tokens) > 1:
            self.module_args = tokens[1]

        import_tags = self.module_vars.get('tags',[])
        if type(import_tags) in [str,unicode]:
            # allow the user to list comma delimited tags
            import_tags = import_tags.split(",")

        # handle mutually incompatible options
        incompatibles = [ x for x in [ self.first_available_file, self.items_lookup_plugin ] if x is not None ]
        if len(incompatibles) > 1:
            raise errors.AnsibleError("with_(plugin), and first_available_file are mutually incompatible in a single task")

        # make first_available_file accessable to Runner code
        if self.first_available_file:
            self.module_vars['first_available_file'] = self.first_available_file

        if self.items_lookup_plugin is not None:
            self.module_vars['items_lookup_plugin'] = self.items_lookup_plugin
            self.module_vars['items_lookup_terms'] = self.items_lookup_terms

        # allow runner to see delegate_to option
        self.module_vars['delegate_to'] = self.delegate_to

        # make some task attributes accessible to Runner code
        self.module_vars['ignore_errors'] = self.ignore_errors
        self.module_vars['register'] = self.register
        self.module_vars['changed_when'] = self.changed_when
        self.module_vars['always_run'] = self.always_run

        # tags allow certain parts of a playbook to be run without running the whole playbook
        apply_tags = ds.get('tags', None)
        if apply_tags is not None:
            if type(apply_tags) in [ str, unicode ]:
                self.tags.append(apply_tags)
            elif type(apply_tags) == list:
                self.tags.extend(apply_tags)
        self.tags.extend(import_tags)

        if self.when is not None:
            if self.only_if != 'True':
                raise errors.AnsibleError('when obsoletes only_if, only use one or the other')
            self.only_if = utils.compile_when_to_only_if(self.when)

        if additional_conditions:
            self.only_if = [ self.only_if ] 
            self.only_if.extend(additional_conditions)
Ejemplo n.º 53
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))

        crt = options.get('crt', None)
        copy = utils.boolean(options.get('copy', 'yes'))
        creates = options.get('creates', None)


        # this module requires at least the crt= to be present
        if crt is None:
            result = dict(failed=True, msg="crt is required")
            return ReturnData(conn=conn, result=result)


        # skip if creates= is added to the module and the destination file already exists
        if creates:
            stat_module_args = ""
            stat_complex_args = dict(path=creates, get_md5=False, get_checksum=False)
            stat_module = self.runner._execute_module(
                conn,
                tmp,
                'stat',
                stat_module_args,
                complex_args=stat_complex_args,
                inject=inject,
                persist_files=True
            )
            stat = stat_module.result.get('stat', None)
            if stat and stat.get('exists', False):
                return ReturnData(
                    conn=conn,
                    comm_ok=True,
                    result=dict(
                        skipped=True,
                        changed=False,
                        msg=("skipped, since %s exists" % creates)
                    )
                )

        crt = utils.template.template(self.runner.basedir, os.path.expanduser(crt), inject)

        # copy files
        if copy:
            source = utils.path_dwim(self.runner.basedir, crt)
            dest = tmp + os.path.basename(crt)
            conn.put_file(source, dest)

            if self.runner.become and self.runner.become_user != 'root':
                if not self.runner.noop_on_check(inject):
                    self.runner._remote_chmod(conn, 'a+r', dest, tmp)

            new_module_args = dict(crt=dest)
            if self.runner.noop_on_check(inject):
                new_module_args['CHECKMODE'] = True

            module_args = utils.merge_module_args(module_args, new_module_args)
        else:
            if self.runner.noop_on_check(inject):
                module_args += " CHECKMODE=True"


        # run keystore module
        return self.runner._execute_module(conn, tmp, 'keystore', module_args, complex_args=complex_args, inject=inject)
Ejemplo n.º 54
0
    def __init__(self, playbook, ds, basedir):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter in an Ansible Playbook" % x)

        # allow all playbook keys to be set by --extra-vars
        self.vars             = ds.get('vars', {})
        self.vars_prompt      = ds.get('vars_prompt', {})
        self.playbook         = playbook
        self.vars             = self._get_vars()
        self.basedir          = basedir
        self.roles            = ds.get('roles', None)
        self.tags             = ds.get('tags', None)

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [ str, unicode ]:
            self.tags = self.tags.split(",")
        elif type(self.tags) != list:
            self.tags = []

        # We first load the vars files from the datastructure
        # so we have the default variables to pass into the roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        self._update_vars_files_for_host(None)

        # now we load the roles into the datastructure
        self.included_roles = []
        ds = self._load_roles(self.roles, ds)
        
        # and finally re-process the vars files as they may have
        # been updated by the included roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        self._update_vars_files_for_host(None)

        # template everything to be efficient, but do not pre-mature template
        # tasks/handlers as they may have inventory scope overrides
        _tasks    = ds.pop('tasks', [])
        _handlers = ds.pop('handlers', [])
        ds = template(basedir, ds, self.vars)
        ds['tasks'] = _tasks
        ds['handlers'] = _handlers

        self._ds = ds

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            hosts = ';'.join(hosts)
        self.serial           = int(ds.get('serial', 0))
        self.hosts            = hosts
        self.name             = ds.get('name', self.hosts)
        self._tasks           = ds.get('tasks', [])
        self._handlers        = ds.get('handlers', [])
        self.remote_user      = ds.get('remote_user', ds.get('user', self.playbook.remote_user))
        self.remote_port      = ds.get('port', self.playbook.remote_port)
        self.sudo             = ds.get('sudo', self.playbook.sudo)
        self.sudo_user        = ds.get('sudo_user', self.playbook.sudo_user)
        self.transport        = ds.get('connection', self.playbook.transport)
        self.gather_facts     = ds.get('gather_facts', None)
        self.remote_port      = self.remote_port
        self.any_errors_fatal = utils.boolean(ds.get('any_errors_fatal', 'false'))
        self.accelerate       = utils.boolean(ds.get('accelerate', 'false'))
        self.accelerate_port  = ds.get('accelerate_port', None)
        self.accelerate_ipv6  = ds.get('accelerate_ipv6', False)
        self.max_fail_pct     = int(ds.get('max_fail_percentage', 100))

        load_vars = {}
        load_vars['playbook_dir'] = self.basedir
        if self.playbook.inventory.basedir() is not None:
            load_vars['inventory_dir'] = self.playbook.inventory.basedir()

        self._tasks      = self._load_tasks(self._ds.get('tasks', []), load_vars)
        self._handlers   = self._load_tasks(self._ds.get('handlers', []), load_vars)


        if self.sudo_user != 'root':
            self.sudo = True
Ejemplo n.º 55
0
    def run(self, conn, tmp, module_name, module_args,
        inject, complex_args=None, **kwargs):

        ''' generates params and passes them on to the rsync module '''

        self.inject = inject

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))

        src = options.get('src', None)
        dest = options.get('dest', None)
        use_ssh_args = options.pop('use_ssh_args', None)

        src = template.template(self.runner.basedir, src, inject)
        dest = template.template(self.runner.basedir, dest, inject)
        use_ssh_args = template.template(self.runner.basedir, use_ssh_args, inject)

        try:
            options['local_rsync_path'] = inject['ansible_rsync_path']
        except KeyError:
            pass

        # from the perspective of the rsync call the delegate is the localhost
        src_host = '127.0.0.1'
        dest_host = inject.get('ansible_ssh_host', inject['inventory_hostname'])

        # allow ansible_ssh_host to be templated
        dest_host = template.template(self.runner.basedir, dest_host, inject, fail_on_undefined=True)
        dest_is_local = dest_host in ['127.0.0.1', 'localhost']

        # CHECK FOR NON-DEFAULT SSH PORT
        dest_port = options.get('dest_port')
        inv_port = inject.get('ansible_ssh_port', inject['inventory_hostname'])
        if inv_port != dest_port and inv_port != inject['inventory_hostname']:
            options['dest_port'] = inv_port

        # edge case: explicit delegate and dest_host are the same
        if dest_host == inject['delegate_to']:
            dest_host = '127.0.0.1'

        # SWITCH SRC AND DEST PER MODE
        if options.get('mode', 'push') == 'pull':
            (dest_host, src_host) = (src_host, dest_host)

        # CHECK DELEGATE HOST INFO
        use_delegate = False
        if conn.delegate != conn.host:
            if 'hostvars' in inject:
                if conn.delegate in inject['hostvars'] and self.original_transport != 'local':
                    # use a delegate host instead of localhost
                    use_delegate = True

        # COMPARE DELEGATE, HOST AND TRANSPORT
        process_args = False
        if not dest_host is src_host and self.original_transport != 'local':
            # interpret and inject remote host info into src or dest
            process_args = True

        # MUNGE SRC AND DEST PER REMOTE_HOST INFO
        if process_args or use_delegate:

            user = None
            if utils.boolean(options.get('set_remote_user', 'yes')):
                if use_delegate:
                    user = inject['hostvars'][conn.delegate].get('ansible_ssh_user')

                if not use_delegate or not user:
                    user = inject.get('ansible_ssh_user',
                                    self.runner.remote_user)

            if use_delegate:
                # FIXME
                private_key = inject.get('ansible_ssh_private_key_file', self.runner.private_key_file)
            else:
                private_key = inject.get('ansible_ssh_private_key_file', self.runner.private_key_file)

            private_key = template.template(self.runner.basedir, private_key, inject, fail_on_undefined=True)

            if not private_key is None:
                private_key = os.path.expanduser(private_key)
                options['private_key'] = private_key

            # use the mode to define src and dest's url
            if options.get('mode', 'push') == 'pull':
                # src is a remote path: <user>@<host>, dest is a local path
                src = self._process_remote(src_host, src, user)
                dest = self._process_origin(dest_host, dest, user)
            else:
                # src is a local path, dest is a remote path: <user>@<host>
                src = self._process_origin(src_host, src, user)
                dest = self._process_remote(dest_host, dest, user)

        options['src'] = src
        options['dest'] = dest
        if 'mode' in options:
            del options['mode']
        if use_ssh_args:
            options['ssh_args'] = constants.ANSIBLE_SSH_ARGS

        # Allow custom rsync path argument.
        rsync_path = options.get('rsync_path', None)

        # If no rsync_path is set, sudo was originally set, and dest is remote then add 'sudo rsync' argument.
        if not rsync_path and self.transport_overridden and self.original_become and not dest_is_local and self.runner.become_method == 'sudo':
            rsync_path = 'sudo rsync'

        # make sure rsync path is quoted.
        if rsync_path:
            options['rsync_path'] = '"' + rsync_path + '"'

        module_args = ""
        if self.runner.noop_on_check(inject):
            module_args = "CHECKMODE=True"

        # run the module and store the result
        result = self.runner._execute_module(conn, tmp, 'synchronize', module_args, complex_args=options, inject=inject)

        # reset the sudo property
        self.runner.become = self.original_become

        return result
Ejemplo n.º 56
0
    def __init__(self, playbook, ds, basedir):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter in an Ansible Playbook" % x)

        # allow all playbook keys to be set by --extra-vars
        self.vars             = ds.get('vars', {})
        self.vars_prompt      = ds.get('vars_prompt', {})
        self.playbook         = playbook
        self.vars             = self._get_vars()
        self.basedir          = basedir
        self.roles            = ds.get('roles', None)
        self.tags             = ds.get('tags', None)

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [ str, unicode ]:
            self.tags = self.tags.split(",")
        elif type(self.tags) != list:
            self.tags = []

        # We first load the vars files from the datastructure
        # so we have the default variables to pass into the roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        self._update_vars_files_for_host(None)

        # now we load the roles into the datastructure
        self.included_roles = []
        ds = self._load_roles(self.roles, ds)
        
        # and finally re-process the vars files as they may have
        # been updated by the included roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        self._update_vars_files_for_host(None)

        # template everything to be efficient, but do not pre-mature template
        # tasks/handlers as they may have inventory scope overrides
        _tasks    = ds.pop('tasks', [])
        _handlers = ds.pop('handlers', [])
        ds = template(basedir, ds, self.vars)
        ds['tasks'] = _tasks
        ds['handlers'] = _handlers

        self._ds = ds

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            hosts = ';'.join(hosts)
        self.serial           = int(ds.get('serial', 0))
        self.hosts            = hosts
        self.name             = ds.get('name', self.hosts)
        self._tasks           = ds.get('tasks', [])
        self._handlers        = ds.get('handlers', [])
        self.remote_user      = ds.get('remote_user', ds.get('user', self.playbook.remote_user))
        self.remote_port      = ds.get('port', self.playbook.remote_port)
        self.sudo             = ds.get('sudo', self.playbook.sudo)
        self.sudo_user        = ds.get('sudo_user', self.playbook.sudo_user)
        self.transport        = ds.get('connection', self.playbook.transport)
        self.gather_facts     = ds.get('gather_facts', None)
        self.remote_port      = self.remote_port
        self.any_errors_fatal = utils.boolean(ds.get('any_errors_fatal', 'false'))
        self.accelerate       = utils.boolean(ds.get('accelerate', 'false'))
        self.accelerate_port  = ds.get('accelerate_port', None)
        self.accelerate_ipv6  = ds.get('accelerate_ipv6', False)
        self.max_fail_pct     = int(ds.get('max_fail_percentage', 100))

        load_vars = {}
        load_vars['playbook_dir'] = self.basedir
        if self.playbook.inventory.basedir() is not None:
            load_vars['inventory_dir'] = self.playbook.inventory.basedir()

        self._tasks      = self._load_tasks(self._ds.get('tasks', []), load_vars)
        self._handlers   = self._load_tasks(self._ds.get('handlers', []), load_vars)


        if self.sudo_user != 'root':
            self.sudo = True
Ejemplo n.º 57
0
    def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
        # this number is arbitrary, but it seems sane
        if level > 20:
            raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
        for role in roles:
            role_path,role_vars = self._get_role_path(role)
            role_vars = utils.combine_vars(passed_vars, role_vars)
            vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
            vars_data = {}
            if os.path.isfile(vars):
                vars_data = utils.parse_yaml_from_file(vars)
                if vars_data:
                    role_vars = utils.combine_vars(vars_data, role_vars)
            defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')))
            defaults_data = {}
            if os.path.isfile(defaults):
                defaults_data = utils.parse_yaml_from_file(defaults)
            # the meta directory contains the yaml that should
            # hold the list of dependencies (if any)
            meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
            if os.path.isfile(meta):
                data = utils.parse_yaml_from_file(meta)
                if data:
                    dependencies = data.get('dependencies',[])
                    for dep in dependencies:
                        allow_dupes = False
                        (dep_path,dep_vars) = self._get_role_path(dep)
                        meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta')))
                        if os.path.isfile(meta):
                            meta_data = utils.parse_yaml_from_file(meta)
                            if meta_data:
                                allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))

                        if not allow_dupes:
                            if dep in self.included_roles:
                                continue
                            else:
                                self.included_roles.append(dep)

                        dep_vars = utils.combine_vars(passed_vars, dep_vars)
                        dep_vars = utils.combine_vars(role_vars, dep_vars)
                        vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
                        vars_data = {}
                        if os.path.isfile(vars):
                            vars_data = utils.parse_yaml_from_file(vars)
                            if vars_data:
                                dep_vars = utils.combine_vars(vars_data, dep_vars)
                        defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults')))
                        dep_defaults_data = {}
                        if os.path.isfile(defaults):
                            dep_defaults_data = utils.parse_yaml_from_file(defaults)
                        if 'role' in dep_vars:
                            del dep_vars['role']
                        self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
                        dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data])
            # only add the current role when we're at the top level,
            # otherwise we'll end up in a recursive loop 
            if level == 0:
                self.included_roles.append(role)
                dep_stack.append([role,role_path,role_vars,defaults_data])
        return dep_stack