def matchtask(self, file, task):
     if task["action"]["module"] in self._commands:
         executable = os.path.basename(task["action"]["module_arguments"][0])
         if executable in self._arguments and \
                 boolean(task['action'].get('warn', True)) and \
                 boolean(task.get('args', {}).get('warn', True)):
             message = "{0} used in place of argument {1} to file module"
             return message.format(executable, self._arguments[executable])
    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        lower precedence than those set on the play or host.
        '''

        if options.connection:
            self.connection = options.connection

        if hasattr(options, 'connection_args') and options.connection_args:
            self.connection_args = options.connection_args

        self.remote_user = options.remote_user
        self.private_key_file = options.private_key_file
        self.ssh_common_args = options.ssh_common_args
        self.sftp_extra_args = options.sftp_extra_args
        self.scp_extra_args = options.scp_extra_args
        self.ssh_extra_args = options.ssh_extra_args

        # privilege escalation
        self.become        = options.become
        self.become_method = options.become_method
        self.become_user   = options.become_user

        # general flags (should we move out?)
        if options.verbosity:
            self.verbosity  = options.verbosity
        if options.check:
            self.check_mode = boolean(options.check)
        if hasattr(options, 'force_handlers') and options.force_handlers:
            self.force_handlers = boolean(options.force_handlers)
        if hasattr(options, 'step') and options.step:
            self.step = boolean(options.step)
        if hasattr(options, 'start_at_task') and options.start_at_task:
            self.start_at_task = to_unicode(options.start_at_task)
        if hasattr(options, 'diff') and options.diff:
            self.diff = boolean(options.diff)
        if hasattr(options, 'timeout') and options.timeout:
            self.timeout = int(options.timeout)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, string_types):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, string_types):
                self.skip_tags.update(options.skip_tags.split(','))
Example #3
0
    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        lower precedence than those set on the play or host.
        '''

        if options.connection:
            self.connection = options.connection

        self.remote_user = options.remote_user
        self.private_key_file = options.private_key_file
        self.ssh_common_args = options.ssh_common_args
        self.sftp_extra_args = options.sftp_extra_args
        self.scp_extra_args = options.scp_extra_args
        self.ssh_extra_args = options.ssh_extra_args

        # privilege escalation
        self.become        = options.become
        self.become_method = options.become_method
        self.become_user   = options.become_user

        # general flags (should we move out?)
        if options.verbosity:
            self.verbosity  = options.verbosity
        if options.check:
            self.check_mode = boolean(options.check)
        if hasattr(options, 'force_handlers') and options.force_handlers:
            self.force_handlers = boolean(options.force_handlers)
        if hasattr(options, 'step') and options.step:
            self.step = boolean(options.step)
        if hasattr(options, 'start_at_task') and options.start_at_task:
            self.start_at_task = to_unicode(options.start_at_task)
        if hasattr(options, 'diff') and options.diff:
            self.diff = boolean(options.diff)
        if hasattr(options, 'timeout') and options.timeout:
            self.timeout = int(options.timeout)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, string_types):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, string_types):
                self.skip_tags.update(options.skip_tags.split(','))
Example #4
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        self._display.vvv('ActionModule run')

        result = super(ActionModule, self).run(tmp, task_vars)

        _template = self._task.args.get('template', None)
        _host = self._task.args.get('host', None)
        _login = self._task.args.get('login', None)
        _enable = boolean(self._task.args.get('enable', 'yes'))
        _bash = boolean(self._task.args.get('bash', 'no'))
        _su = boolean(self._task.args.get('su', 'no'))
        _root = boolean(self._task.args.get('root', 'no'))
        _reboot = boolean(self._task.args.get('reboot', 'no'))
        _timeout = self._task.args.get('timeout', None)

        if (type(_login) == unicode):
            _login = ast.literal_eval(_login)

        login = {'user': [], 'enable': _login['enable']}
        for passwd in reversed(_login['passwd']):
            login['user'].append((_login['user'], passwd))

        if _timeout is None:
            _timeout = 30

        if _template is not None:
            if self._task._role is not None:
                _template = self._loader.path_dwim_relative(
                    self._task._role._role_path, 'templates', _template)
            else:
                _template = self._loader.path_dwim_relative(
                    self._loader.get_basedir(), 'templates', _template)

            f = open(_template, 'r')
            template_data = to_unicode(f.read())
            f.close()

            _template = self._templar.template(template_data)

        self._display.vvv(self._connection.transport)
        result['stdout'] = self._connection.exec_command(template=_template,
                                                         host=_host,
                                                         login=login,
                                                         enable=_enable,
                                                         bash=_bash,
                                                         su=_su,
                                                         root=_root,
                                                         reboot=_reboot,
                                                         timeout=_timeout)

        return result
Example #5
0
    def get_next_task_for_host(self, host, peek=False):

        display.debug("getting the next task for host %s" % host.name)
        s = self.get_host_state(host)

        task = None
        if s.run_state == self.ITERATING_COMPLETE:
            display.debug("host %s is done iterating, returning" % host.name)
            return (None, None)
        elif s.run_state == self.ITERATING_SETUP:
            s.run_state = self.ITERATING_TASKS
            s.pending_setup = True

            # Gather facts if the default is 'smart' and we have not yet
            # done it for this host; or if 'explicit' and the play sets
            # gather_facts to True; or if 'implicit' and the play does
            # NOT explicitly set gather_facts to False.

            gathering = C.DEFAULT_GATHERING
            implied = self._play.gather_facts is None or boolean(
                self._play.gather_facts)

            if (gathering == 'implicit' and implied) or \
               (gathering == 'explicit' and boolean(self._play.gather_facts)) or \
               (gathering == 'smart' and implied and not host._gathered_facts):
                if not peek:
                    # mark the host as having gathered facts
                    host.set_gathered_facts(True)

                task = Task()
                task.action = 'setup'
                task.args = {}
                task.set_loader(self._play._loader)
            else:
                s.pending_setup = False

        if not task:
            (s, task) = self._get_next_task_from_state(s, peek=peek)

        if task and task._role:
            # if we had a current role, mark that role as completed
            if s.cur_role and task._role != s.cur_role and host.name in s.cur_role._had_task_run and not peek:
                s.cur_role._completed[host.name] = True
            s.cur_role = task._role

        if not peek:
            self._host_states[host.name] = s

        display.debug("done getting next task for host %s" % host.name)
        display.debug(" ^ task is: %s" % task)
        display.debug(" ^ state is: %s" % s)
        return (s, task)
Example #6
0
    def get_next_task_for_host(self, host, peek=False):

        display.debug("getting the next task for host %s" % host.name)
        s = self.get_host_state(host)

        task = None
        if s.run_state == self.ITERATING_COMPLETE:
            display.debug("host %s is done iterating, returning" % host.name)
            return (None, None)
        elif s.run_state == self.ITERATING_SETUP:
            s.run_state = self.ITERATING_TASKS
            s.pending_setup = True

            # Gather facts if the default is 'smart' and we have not yet
            # done it for this host; or if 'explicit' and the play sets
            # gather_facts to True; or if 'implicit' and the play does
            # NOT explicitly set gather_facts to False.

            gathering = C.DEFAULT_GATHERING
            implied = self._play.gather_facts is None or boolean(self._play.gather_facts)

            if (gathering == 'implicit' and implied) or \
               (gathering == 'explicit' and boolean(self._play.gather_facts)) or \
               (gathering == 'smart' and implied and not host._gathered_facts):
                if not peek:
                    # mark the host as having gathered facts
                    host.set_gathered_facts(True)

                task = Task()
                task.action = 'setup'
                task.args   = {}
                task.set_loader(self._play._loader)
            else:
                s.pending_setup = False

        if not task:
            (s, task) = self._get_next_task_from_state(s, peek=peek)

        if task and task._role:
            # if we had a current role, mark that role as completed
            if s.cur_role and task._role != s.cur_role and host.name in s.cur_role._had_task_run and not peek:
                s.cur_role._completed[host.name] = True
            s.cur_role = task._role

        if not peek:
            self._host_states[host.name] = s

        display.debug("done getting next task for host %s" % host.name)
        display.debug(" ^ task is: %s" % task)
        display.debug(" ^ state is: %s" % s)
        return (s, task)
Example #7
0
    def run(self, tmp=None, task_vars=dict()):
        ''' handler for file transfer operations '''

        source = self._task.args.get('src', None)
        content = self._task.args.get('content', None)
        dest = self._task.args.get('dest', None)
        raw = boolean(self._task.args.get('raw', 'no'))
        force = boolean(self._task.args.get('force', 'yes'))

        # content with newlines is going to be escaped to safely load in yaml
        # now we need to unescape it so that the newlines are evaluated properly
        # when writing the file to disk
        if content:
            if isinstance(content, unicode):
                try:
                    content = content.decode('unicode-escape')
                except UnicodeDecodeError:
                    pass

        # FIXME: first available file needs to be reworked somehow...
        #if (source is None and content is None and not 'first_available_file' in inject) or dest is None:
        #    result=dict(failed=True, msg="src (or content) and dest are required")
        #    return ReturnData(conn=conn, result=result)
        #elif (source is not None or 'first_available_file' in inject) and content is not None:
        #    result=dict(failed=True, msg="src and content are mutually exclusive")
        #    return ReturnData(conn=conn, result=result)

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = source.endswith("/")

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if type(content) is dict:
                    content_tempfile = self._create_content_tempfile(
                        json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception, err:
                return dict(failed=True,
                            msg="could not write content temp file: %s" % err)
Example #8
0
    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        higher precedence than those set on the play or host.
        '''

        # FIXME: set other values from options here?

        self.verbosity = options.verbosity
        if options.connection:
            self.connection = options.connection

        if options.check:
            self.check_mode = boolean(options.check)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, basestring):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, basestring):
                self.skip_tags.update(options.skip_tags.split(','))
Example #9
0
    def set_host_overrides(self, host):
        self._host = host

        host_vars = combine_vars(host.get_group_vars(), host.get_vars())
        self._nspawn_cmd = host_vars.get('nspawn_command', 'systemd-nspawn')
        self._nspawn_args = shlex.split(host_vars.get('nspawn_extra_args', ''))
        self._nspawn_sudo = boolean(host_vars.get('nspawn_sudo', 'false'))
Example #10
0
    def set_host_overrides(self, host):
        self._host = host

        host_vars = combine_vars(host.get_group_vars(), host.get_vars())
        self._nspawn_cmd = host_vars.get('nspawn_command', 'systemd-nspawn')
        self._nspawn_args = shlex.split(host_vars.get('nspawn_extra_args', ''))
        self._nspawn_sudo = boolean(host_vars.get('nspawn_sudo', 'false'))
Example #11
0
 def matchtask(self, file, task):
     if task["action"]["module"] in self._commands and task["action"]["module_arguments"]:
         executable = os.path.basename(task["action"]["module_arguments"][0])
         if executable in self._modules and \
                 boolean(task['action'].get('warn', True)):
             message = "{0} used in place of {1} module"
             return message.format(executable, self._modules[executable])
Example #12
0
    def get_next_task_for_host(self, host, peek=False):

        s = self.get_host_state(host)

        task = None
        if s.run_state == self.ITERATING_COMPLETE:
            return None
        elif s.run_state == self.ITERATING_SETUP:
            s.run_state = self.ITERATING_TASKS
            s.pending_setup = True
            if self._play.gather_facts == 'smart' and not host._gathered_facts or boolean(self._play.gather_facts):
                if not peek:
                    # mark the host as having gathered facts
                    host.set_gathered_facts(True)

                task = Task()
                task.action = 'setup'
                task.args   = {}
                task.set_loader(self._play._loader)
            else:
                s.pending_setup = False

        if not task:
            (s, task) = self._get_next_task_from_state(s, peek=peek)

        if task and task._role:
            # if we had a current role, mark that role as completed
            if s.cur_role and task._role != s.cur_role and s.cur_role._had_task_run and not peek:
                s.cur_role._completed = True
            s.cur_role = task._role

        if not peek:
            self._host_states[host.name] = s

        return (s, task)
Example #13
0
    def get_next_task_for_host(self, host, peek=False):

        s = self.get_host_state(host)

        task = None
        if s.run_state == self.ITERATING_COMPLETE:
            return None
        elif s.run_state == self.ITERATING_SETUP:
            s.run_state = self.ITERATING_TASKS
            s.pending_setup = True
            if self._play.gather_facts == 'smart' and not host._gathered_facts or boolean(self._play.gather_facts):
                if not peek:
                    # mark the host as having gathered facts
                    host.set_gathered_facts(True)

                task = Task()
                task.action = 'setup'
                task.args   = {}
                task.set_loader(self._play._loader)
            else:
                s.pending_setup = False

        if not task:
            (s, task) = self._get_next_task_from_state(s, peek=peek)

        if task and task._role:
            # if we had a current role, mark that role as completed
            if s.cur_role and task._role != s.cur_role and s.cur_role._had_task_run and not peek:
                s.cur_role._completed = True
            s.cur_role = task._role

        if not peek:
            self._host_states[host.name] = s

        return (s, task)
Example #14
0
    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        higher precedence than those set on the play or host.
        '''

        # FIXME: set other values from options here?

        self.verbosity = options.verbosity
        if options.connection:
            self.connection = options.connection

        if options.check:
            self.check_mode = boolean(options.check)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, basestring):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, basestring):
                self.skip_tags.update(options.skip_tags.split(','))
Example #15
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        if 'msg' in self._task.args:
            if 'fail' in self._task.args and boolean(self._task.args['fail']):
                result['failed'] = True
                result['msg'] = self._task.args['msg']
            else:
                result['msg'] = self._task.args['msg']
        # FIXME: move the LOOKUP_REGEX somewhere else
        elif 'var' in self._task.args:  # and not utils.LOOKUP_REGEX.search(self._task.args['var']):
            results = self._templar.template(self._task.args['var'],
                                             convert_bare=True)
            if results == self._task.args['var']:
                results = "VARIABLE IS NOT DEFINED!"
            result[self._task.args['var']] = results
        else:
            result['msg'] = 'here we are'

        # force flag to make debug output module always verbose
        result['_ansible_verbose_always'] = True

        return result
Example #16
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        facts = dict()
        if self._task.args:
            for (k, v) in iteritems(self._task.args):
                k = self._templar.template(k)

                if not isidentifier(k):
                    result['failed'] = True
                    result[
                        'msg'] = "The variable name '%s' is not valid. Variables must start with a letter or underscore character, and contain only letters, numbers and underscores." % k
                    return result

                if isinstance(v, basestring) and v.lower() in ('true', 'false',
                                                               'yes', 'no'):
                    v = boolean(v)
                facts[k] = v

        result['changed'] = False
        result['ansible_facts'] = facts
        return result
Example #17
0
    def run(self, tmp=None, task_vars=dict()):

        src = self._task.args.get("src", None)
        dest = self._task.args.get("dest", None)
        remote_src = boolean(self._task.args.get("remote_src", "no"))

        if src is None:
            return dict(failed=True, msg="src is required")
        elif remote_src:
            # everything is remote, so we just execute the module
            # without changing any of the module arguments
            return self._execute_module(task_vars=task_vars)

        if self._task._role is not None:
            src = self._loader.path_dwim_relative(self._task._role._role_path, "files", src)
        else:
            src = self._loader.path_dwim_relative(self._loader.get_basedir(), "files", src)

        # create the remote tmp dir if needed, and put the source file there
        if tmp is None or "-tmp-" not in tmp:
            tmp = self._make_tmp_path()

        tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
        self._connection.put_file(src, tmp_src)

        if self._play_context.become and self._play_context.become_user != "root":
            if not self._play_context.check_mode:
                self._remote_chmod("a+r", tmp_src, tmp)

        new_module_args = self._task.args.copy()
        new_module_args.update(dict(src=tmp_src))

        return self._execute_module("patch", module_args=new_module_args, task_vars=task_vars)
Example #18
0
    def run(self, terms, variables, **kwargs):

        anydict = False
        skip = False

        for term in terms:
            if isinstance(term, dict):
                anydict = True

        total_search = []
        if anydict:
            for term in terms:
                if isinstance(term, dict):
                    files = term.get('files', [])
                    paths = term.get('paths', [])
                    skip  = boolean(term.get('skip', False))

                    filelist = files
                    if isinstance(files, string_types):
                        files = files.replace(',', ' ')
                        files = files.replace(';', ' ')
                        filelist = files.split(' ')

                    pathlist = paths
                    if paths:
                        if isinstance(paths, string_types):
                            paths = paths.replace(',', ' ')
                            paths = paths.replace(':', ' ')
                            paths = paths.replace(';', ' ')
                            pathlist = paths.split(' ')

                    if not pathlist:
                        total_search = filelist
                    else:
                        for path in pathlist:
                            for fn in filelist:
                                f = os.path.join(path, fn)
                                total_search.append(f)
                else:
                    total_search.append(term)
        else:
            total_search = self._flatten(terms)

        for fn in total_search:
            try:
                fn = self._templar.template(fn)
            except (AnsibleUndefinedVariable, UndefinedError):
                continue

             # get subdir if set by task executor, default to files otherwise
            subdir = getattr(self, '_subdir', 'files')
            path = None
            path = self.find_file_in_search_path(variables, subdir, fn, ignore_missing=True)
            if path is not None:
                return [path]
        else:
            if skip:
                return []
            else:
                raise AnsibleLookupError("No file was found when using with_first_found. Use the 'skip: true' option to allow this task to be skipped if no files are found")
Example #19
0
    def run(self, tmp=None, task_vars=dict()):
        ''' handler for file transfer operations '''

        source  = self._task.args.get('src', None)
        content = self._task.args.get('content', None)
        dest    = self._task.args.get('dest', None)
        raw     = boolean(self._task.args.get('raw', 'no'))
        force   = boolean(self._task.args.get('force', 'yes'))

        # content with newlines is going to be escaped to safely load in yaml
        # now we need to unescape it so that the newlines are evaluated properly
        # when writing the file to disk
        if content:
            if isinstance(content, unicode):
                try:
                    content = content.decode('unicode-escape')
                except UnicodeDecodeError:
                    pass

        # FIXME: first available file needs to be reworked somehow...
        #if (source is None and content is None and not 'first_available_file' in inject) or dest is None:
        #    result=dict(failed=True, msg="src (or content) and dest are required")
        #    return ReturnData(conn=conn, result=result)
        #elif (source is not None or 'first_available_file' in inject) and content is not None:
        #    result=dict(failed=True, msg="src and content are mutually exclusive")
        #    return ReturnData(conn=conn, result=result)

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = source.endswith("/")

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if type(content) is dict:
                    content_tempfile = self._create_content_tempfile(json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception, err:
                return dict(failed=True, msg="could not write content temp file: %s" % err)
Example #20
0
    def post_validate(self, templar):
        '''
        we can't tell that everything is of the right type until we have
        all the variables.  Run basic types (from isa) as well as
        any _post_validate_<foo> functions.
        '''

        basedir = None
        if self._loader is not None:
            basedir = self._loader.get_basedir()

        for (name, attribute) in iteritems(self._get_base_attributes()):

            if getattr(self, name) is None:
                if not attribute.required:
                    continue
                else:
                    raise AnsibleParserError(
                        "the field '%s' is required but was not set" % name)

            try:
                # Run the post-validator if present. These methods are responsible for
                # using the given templar to template the values, if required.
                method = getattr(self, '_post_validate_%s' % name, None)
                if method:
                    value = method(attribute, getattr(self, name), templar)
                else:
                    # if the attribute contains a variable, template it now
                    value = templar.template(getattr(self, name))

                # and make sure the attribute is of the type it should be
                if value is not None:
                    if attribute.isa == 'string':
                        value = unicode(value)
                    elif attribute.isa == 'int':
                        value = int(value)
                    elif attribute.isa == 'bool':
                        value = boolean(value)
                    elif attribute.isa == 'list':
                        if not isinstance(value, list):
                            value = [value]
                    elif attribute.isa == 'dict' and not isinstance(
                            value, dict):
                        raise TypeError()

                # and assign the massaged value back to the attribute field
                setattr(self, name, value)

            except (TypeError, ValueError) as e:
                raise AnsibleParserError(
                    "the field '%s' has an invalid value (%s), and could not be converted to an %s. Error was: %s"
                    % (name, value, attribute.isa, e),
                    obj=self.get_ds())
            except UndefinedError as e:
                if templar._fail_on_undefined_errors and name != 'name':
                    raise AnsibleParserError(
                        "the field '%s' has an invalid value, which appears to include a variable that is undefined. The error was: %s"
                        % (name, e),
                        obj=self.get_ds())
Example #21
0
    def get_next_task_for_host(self, host, peek=False):

        s = self.get_host_state(host)

        task = None
        if s.run_state == self.ITERATING_COMPLETE:
            return None
        elif s.run_state == self.ITERATING_SETUP:
            s.run_state = self.ITERATING_TASKS
            s.pending_setup = True

            # Gather facts if the default is 'smart' and we have not yet
            # done it for this host; or if 'explicit' and the play sets
            # gather_facts to True; or if 'implicit' and the play does
            # NOT explicitly set gather_facts to False.

            gathering = C.DEFAULT_GATHERING
            if ((gathering == 'smart' and not host._gathered_facts) or
                (gathering == 'explicit' and boolean(self._play.gather_facts))
                    or (gathering == 'implicit' and
                        (self._play.gather_facts is None
                         or boolean(self._play.gather_facts)))):
                if not peek:
                    # mark the host as having gathered facts
                    host.set_gathered_facts(True)

                task = Task()
                task.action = 'setup'
                task.args = {}
                task.set_loader(self._play._loader)
            else:
                s.pending_setup = False

        if not task:
            (s, task) = self._get_next_task_from_state(s, peek=peek)

        if task and task._role:
            # if we had a current role, mark that role as completed
            if s.cur_role and task._role != s.cur_role and s.cur_role._had_task_run and not peek:
                s.cur_role._completed = True
            s.cur_role = task._role

        if not peek:
            self._host_states[host.name] = s

        return (s, task)
Example #22
0
    def _file_transport_command(self, in_path, out_path, sftp_action):
        # scp and sftp require square brackets for IPv6 addresses, but
        # accept them for hostnames and IPv4 addresses too.
        host = '[%s]' % self.host

        # since this can be a non-bool now, we need to handle it correctly
        scp_if_ssh = C.DEFAULT_SCP_IF_SSH
        if not isinstance(scp_if_ssh, bool):
            scp_if_ssh = scp_if_ssh.lower()
            if scp_if_ssh in BOOLEANS:
                scp_if_ssh = boolean(scp_if_ssh)
            elif scp_if_ssh != 'smart':
                raise AnsibleOptionsError(
                    'scp_if_ssh needs to be one of [smart|True|False]')

        # create a list of commands to use based on config options
        methods = ['sftp']
        if scp_if_ssh == 'smart':
            methods.append('scp')
        elif scp_if_ssh:
            methods = ['scp']

        success = False
        res = None
        for method in methods:
            if method == 'sftp':
                cmd = self._build_command('sftp', to_bytes(host))
                in_data = u"{0} {1} {2}\n".format(sftp_action,
                                                  pipes.quote(in_path),
                                                  pipes.quote(out_path))
            elif method == 'scp':
                cmd = self._build_command(
                    'scp', in_path, u'{0}:{1}'.format(host,
                                                      pipes.quote(out_path)))
                in_data = None

            in_data = to_bytes(in_data, nonstring='passthru')
            (returncode, stdout, stderr) = self._run(cmd,
                                                     in_data,
                                                     checkrc=False)
            # Check the return code and rollover to next method if failed
            if returncode == 0:
                success = True
                break
            else:
                # If not in smart mode, the data will be printed by the raise below
                if scp_if_ssh == 'smart':
                    display.warning(
                        msg=
                        '%s transfer mechanism failed on %s. Use ANSIBLE_DEBUG=1 to see detailed information'
                        % (method, host))
                    display.debug(msg='%s' % to_native(stdout))
                    display.debug(msg='%s' % to_native(stderr))
                res = (returncode, stdout, stderr)

        if not success:
            raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}"\
                    .format(to_native(out_path), to_native(res[1]), to_native(res[2])))
Example #23
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''

        if task_vars is None:
            task_vars = dict()
        result = super(ActionModule, self).run(tmp, task_vars)

        crt = self._task.args.get('crt', None)
        copy = boolean(self._task.args.get('copy', 'yes'))
        creates = self._task.args.get('creates', None)

        # this module requires at least the crt= to be present
        if crt is None:
            result['failed'] = True
            result['msg'] = "crt is required"
            return result

        if not tmp:
            tmp = self._make_tmp_path()

        # skip if creates= is added to the module and the destination file already exists
        if creates:
            result = self._execute_module(module_name='stat',
                                          module_args=dict(path=creates),
                                          task_vars=task_vars)
            stat = result.get('stat', None)

            if stat and stat.get('exists', False):
                result['skipped'] = True
                result['msg'] = "skipped, since %s exists" % creates
                return result

        crt = os.path.expanduser(crt)

        # copy files
        if copy:
            source = self._loader.path_dwim_relative(
                self._loader.get_basedir(), 'files', crt)

            dest = tmp + os.path.basename(source)
            self._connection.put_file(source, dest)

            if self._play_context.become and self._play_context.become_user != 'root':
                if not self._play_context.check_mode:
                    self._remote_chmod('a+r', dest)

            new_module_args = self._task.args.copy()
            new_module_args.update(dict(crt=dest, ), )

        else:
            new_module_args = self._task.args.copy()

        # run keystore module
        result.update(
            self._execute_module(module_args=new_module_args,
                                 task_vars=task_vars))
        return result
Example #24
0
 def run(self, tmp=None, task_vars=dict()):
     facts = dict()
     if self._task.args:
         for (k, v) in self._task.args.iteritems():
             k = self._templar.template(k)
             if isinstance(v, basestring) and v.lower() in ("true", "false", "yes", "no"):
                 v = boolean(v)
             facts[k] = v
     return dict(changed=False, ansible_facts=facts)
Example #25
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''

        if task_vars is None:
            task_vars = dict()
        result = super(ActionModule, self).run(tmp, task_vars)

        crt = self._task.args.get('crt', None)
        copy = boolean(self._task.args.get('copy', 'yes'))
        creates = self._task.args.get('creates', None)

        # this module requires at least the crt= to be present
        if crt is None:
            result['failed'] = True
            result['msg'] = "crt is required"
            return result

        if not tmp:
            tmp = self._make_tmp_path()

        # skip if creates= is added to the module and the destination file already exists
        if creates:
            result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars)
            stat = result.get('stat', None)

            if stat and stat.get('exists', False):
                result['skipped'] = True
                result['msg'] = "skipped, since %s exists" % creates
                return result

        crt = os.path.expanduser(crt)

        # copy files
        if copy:
            source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', crt)

            dest = tmp + os.path.basename(source)
            self._connection.put_file(source, dest)

            if self._play_context.become and self._play_context.become_user != 'root':
                if not self._play_context.check_mode:
                    self._remote_chmod('a+r', dest)


            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    crt=dest,
                ),
            )

        else:
            new_module_args = self._task.args.copy()

        # run keystore module
        result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
        return result
Example #26
0
 def run(self, tmp=None, task_vars=dict()):
     facts = dict()
     if self._task.args:
         for (k, v) in self._task.args.iteritems():
             k = self._templar.template(k)
             if isinstance(v, basestring) and v.lower() in ('true', 'false',
                                                            'yes', 'no'):
                 v = boolean(v)
             facts[k] = v
     return dict(changed=False, ansible_facts=facts)
    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        lower precedence than those set on the play or host.
        '''

        if options.connection:
            self.connection = options.connection

        # privilege escalation
        self.become = options.become
        self.become_method = options.become_method
        self.become_user = options.become_user

        self.check_mode = boolean(options.check)

        # get ssh options FIXME: make these common to all connections
        for flag in [
                'ssh_common_args', 'docker_extra_args', 'sftp_extra_args',
                'scp_extra_args', 'ssh_extra_args'
        ]:
            setattr(self, flag, getattr(options, flag, ''))

        # general flags (should we move out?)
        for flag in [
                'connection', 'remote_user', 'private_key_file', 'verbosity',
                'force_handlers', 'step', 'start_at_task', 'diff'
        ]:
            attribute = getattr(options, flag, False)
            if attribute:
                setattr(self, flag, attribute)

        if hasattr(options, 'timeout') and options.timeout:
            self.timeout = int(options.timeout)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, string_types):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, string_types):
                self.skip_tags.update(options.skip_tags.split(','))
Example #28
0
    def run(self, tmp=None, task_vars=dict()):
        facts = dict()
        if self._task.args:
            for (k, v) in self._task.args.iteritems():
                k = self._templar.template(k)

                if not isidentifier(k):
                    return dict(failed=True, msg="The variable name '%s' is not valid. Variables must start with a letter or underscore character, and contain only letters, numbers and underscores." % k)

                if isinstance(v, basestring) and v.lower() in ('true', 'false', 'yes', 'no'):
                    v = boolean(v)
                facts[k] = v
        return dict(changed=False, ansible_facts=facts)
Example #29
0
    def run(self, tmp=None, task_vars=dict()):
        facts = dict()
        if self._task.args:
            for (k, v) in iteritems(self._task.args):
                k = self._templar.template(k)

                if not isidentifier(k):
                    return dict(failed=True, msg="The variable name '%s' is not valid. Variables must start with a letter or underscore character, and contain only letters, numbers and underscores." % k)

                if isinstance(v, basestring) and v.lower() in ('true', 'false', 'yes', 'no'):
                    v = boolean(v)
                facts[k] = v
        return dict(changed=False, ansible_facts=facts)
Example #30
0
    def post_validate(self, all_vars=dict(), fail_on_undefined=True):
        '''
        we can't tell that everything is of the right type until we have
        all the variables.  Run basic types (from isa) as well as
        any _post_validate_<foo> functions.
        '''

        basedir = None
        if self._loader is not None:
            basedir = self._loader.get_basedir()

        templar = Templar(loader=self._loader, variables=all_vars, fail_on_undefined=fail_on_undefined)

        for (name, attribute) in iteritems(self._get_base_attributes()):

            if getattr(self, name) is None:
                if not attribute.required:
                    continue
                else:
                    raise AnsibleParserError("the field '%s' is required but was not set" % name)

            try:
                # if the attribute contains a variable, template it now
                value = templar.template(getattr(self, name))
                
                # run the post-validator if present
                method = getattr(self, '_post_validate_%s' % name, None)
                if method:
                    value = method(attribute, value, all_vars, fail_on_undefined)
                else:
                    # otherwise, just make sure the attribute is of the type it should be
                    if attribute.isa == 'string':
                        value = unicode(value)
                    elif attribute.isa == 'int':
                        value = int(value)
                    elif attribute.isa == 'bool':
                        value = boolean(value)
                    elif attribute.isa == 'list':
                        if not isinstance(value, list):
                            value = [ value ]
                    elif attribute.isa == 'dict' and not isinstance(value, dict):
                        raise TypeError()

                # and assign the massaged value back to the attribute field
                setattr(self, name, value)

            except (TypeError, ValueError), e:
                raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s. Error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds())
            except UndefinedError, e:
                if fail_on_undefined:
                    raise AnsibleParserError("the field '%s' has an invalid value, which appears to include a variable that is undefined. The error was: %s" % (name,e), obj=self.get_ds())
Example #31
0
    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        higher precedence than those set on the play or host.
        '''

        if options.connection:
            self.connection = options.connection

        self.remote_user = options.remote_user
        #if 'port' in options and options.port is not None:
        #    self.port        = options.port
        self.private_key_file = None

        # privilege escalation
        self.become        = options.become
        self.become_method = options.become_method
        self.become_user   = options.become_user
        self.become_pass   = ''

        # general flags (should we move out?)
        if options.verbosity:
            self.verbosity  = options.verbosity
        #if options.no_log:
        #    self.no_log     = boolean(options.no_log)
        if options.check:
            self.check_mode = boolean(options.check)



        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, basestring):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, basestring):
                self.skip_tags.update(options.skip_tags.split(','))
Example #32
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        src        = self._task.args.get('src', None)
        remote_src = boolean(self._task.args.get('remote_src', 'no'))
        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user

        if src is None:
            result['failed'] = True
            result['msg'] = "src is required"
            return result
        elif remote_src:
            # everything is remote, so we just execute the module
            # without changing any of the module arguments
            result.update(self._execute_module(task_vars=task_vars))
            return result

        try:
            src = self._find_needle('files', src)
        except AnsibleError as e:
            result['failed'] = True
            result['msg'] = to_native(e)
            return result

        # create the remote tmp dir if needed, and put the source file there
        if tmp is None or "-tmp-" not in tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
        self._transfer_file(src, tmp_src)

        self._fixup_perms2((tmp, tmp_src), remote_user)

        new_module_args = self._task.args.copy()
        new_module_args.update(
            dict(
                src=tmp_src,
            )
        )

        result.update(self._execute_module('patch', module_args=new_module_args, task_vars=task_vars))
        self._remove_tmp_path(tmp)
        return result
Example #33
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        src = self._task.args.get('src', None)
        remote_src = boolean(self._task.args.get('remote_src', 'no'))

        if src is None:
            result['failed'] = True
            result['msg'] = "src is required"
            return result
        elif remote_src:
            # everything is remote, so we just execute the module
            # without changing any of the module arguments
            result.update(self._execute_module(task_vars=task_vars))
            return result

        if self._task._role is not None:
            src = self._loader.path_dwim_relative(self._task._role._role_path,
                                                  'files', src)
        else:
            src = self._loader.path_dwim_relative(self._loader.get_basedir(),
                                                  'files', src)

        # create the remote tmp dir if needed, and put the source file there
        if tmp is None or "-tmp-" not in tmp:
            tmp = self._make_tmp_path()

        tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
        self._connection.put_file(src, tmp_src)

        if self._play_context.become and self._play_context.become_user != 'root':
            if not self._play_context.check_mode:
                self._remote_chmod('a+r', tmp_src)

        new_module_args = self._task.args.copy()
        new_module_args.update(dict(src=tmp_src, ))

        result.update(
            self._execute_module('patch',
                                 module_args=new_module_args,
                                 task_vars=task_vars))
        return result
Example #34
0
    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        higher precedence than those set on the play or host.
        '''

        if options.connection:
            self.connection = options.connection

        self.remote_user = options.remote_user
        self.private_key_file = options.private_key_file

        # privilege escalation
        self.become        = options.become
        self.become_method = options.become_method
        self.become_user   = options.become_user
        self.become_pass   = ''

        # general flags (should we move out?)
        if options.verbosity:
            self.verbosity  = options.verbosity
        #if options.no_log:
        #    self.no_log     = boolean(options.no_log)
        if options.check:
            self.check_mode = boolean(options.check)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, basestring):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, basestring):
                self.skip_tags.update(options.skip_tags.split(','))
    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        lower precedence than those set on the play or host.
        '''

        # privilege escalation
        self.become        = options.become
        self.become_method = options.become_method
        self.become_user   = options.become_user

        self.check_mode = boolean(options.check)

        # get ssh options FIXME: make these common to all connections
        for flag in ['ssh_common_args', 'docker_extra_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args']:
            setattr(self, flag, getattr(options,flag, ''))

        # general flags (should we move out?)
        for flag in ['connection','remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff']:
            attribute = getattr(options, flag, False)
            if attribute:
                setattr(self, flag, attribute)

        if hasattr(options, 'timeout') and options.timeout:
            self.timeout = int(options.timeout)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, string_types):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, string_types):
                self.skip_tags.update(options.skip_tags.split(','))
Example #36
0
    def run(self, tmp=None, task_vars=dict()):

        if 'msg' in self._task.args:
            if 'fail' in self._task.args and boolean(self._task.args['fail']):
                result = dict(failed=True, msg=self._task.args['msg'])
            else:
                result = dict(msg=self._task.args['msg'])
        # FIXME: move the LOOKUP_REGEX somewhere else
        elif 'var' in self._task.args: # and not utils.LOOKUP_REGEX.search(self._task.args['var']):
            templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=task_vars)
            results = templar.template(self._task.args['var'], convert_bare=True)
            result = dict()
            result[self._task.args['var']] = results
        else:
            result = dict(msg='here we are')

        # force flag to make debug output module always verbose
        result['verbose_always'] = True

        return result
Example #37
0
    def run(self, tmp=None, task_vars=dict()):

        if 'msg' in self._task.args:
            if 'fail' in self._task.args and boolean(self._task.args['fail']):
                result = dict(failed=True, msg=self._task.args['msg'])
            else:
                result = dict(msg=self._task.args['msg'])
        # FIXME: move the LOOKUP_REGEX somewhere else
        elif 'var' in self._task.args:  # and not utils.LOOKUP_REGEX.search(self._task.args['var']):
            results = self._templar.template(self._task.args['var'],
                                             convert_bare=True)
            result = dict()
            result[self._task.args['var']] = results
        else:
            result = dict(msg='here we are')

        # force flag to make debug output module always verbose
        result['verbose_always'] = True

        return result
Example #38
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        self._display.vvv('ActionModule run')

        result = super(ActionModule, self).run(tmp, task_vars)

        _template = self._task.args.get('template', None)
        _host = self._task.args.get('host', None)
        _install = boolean(self._task.args.get('install', 'no'))
        _url = self._task.args.get('url', None)
        _timeout = self._task.args.get('timeout', None)
        _nretry = int(self._task.args.get('retry', 1))

        if _timeout is None:
            _timeout = 300

        if _template is not None:
            if self._task._role is not None:
                _template = self._loader.path_dwim_relative(
                    self._task._role._role_path, 'templates', _template)
            else:
                _template = self._loader.path_dwim_relative(
                    self._loader.get_basedir(), 'templates', _template)

            f = open(_template, 'r')
            template_data = to_unicode(f.read())
            f.close()

            _template = self._templar.template(template_data)

        self._display.vvv(self._connection.transport)
        result['stdout'] = self._connection.exec_command(template=_template,
                                                         host=_host,
                                                         url=_url,
                                                         install=_install,
                                                         timeout=_timeout,
                                                         retry=_nretry)

        return result
Example #39
0
    def run(self, tmp=None, task_vars=dict()):

        if 'msg' in self._task.args:
            if 'fail' in self._task.args and boolean(self._task.args['fail']):
                result = dict(failed=True, msg=self._task.args['msg'])
            else:
                result = dict(msg=self._task.args['msg'])
        # FIXME: move the LOOKUP_REGEX somewhere else
        elif 'var' in self._task.args: # and not utils.LOOKUP_REGEX.search(self._task.args['var']):
            results = self._templar.template(self._task.args['var'], convert_bare=True)
            if results == self._task.args['var']:
                results = "VARIABLE IS NOT DEFINED!"
            result = dict()
            result[self._task.args['var']] = results
        else:
            result = dict(msg='here we are')

        # force flag to make debug output module always verbose
        result['_ansible_verbose_always'] = True

        return result
Example #40
0
    def run(self, tmp=None, task_vars=dict()):

        src        = self._task.args.get('src', None)
        dest       = self._task.args.get('dest', None)
        remote_src = boolean(self._task.args.get('remote_src', 'no'))

        if src is None:
            return dict(failed=True, msg="src is required")
        elif remote_src:
            # everything is remote, so we just execute the module
            # without changing any of the module arguments
            return self._execute_module()

        if self._task._role is not None:
            src = self._loader.path_dwim_relative(self._task._role._role_path, 'files', src)
        else:
            src = self._loader.path_dwim(src)

        # create the remote tmp dir if needed, and put the source file there
        if tmp is None or "-tmp-" not in tmp:
            tmp = self._make_tmp_path()

        tmp_src = self._shell.join_path(tmp, os.path.basename(src))
        self._connection.put_file(src, tmp_src)

        if self._connection_info.become and self._connection_info.become_user != 'root':
            # FIXME: noop stuff here
            #if not self.runner.noop_on_check(inject):
            #    self._remote_chmod('a+r', tmp_src, tmp)
            self._remote_chmod('a+r', tmp_src, tmp)

        new_module_args = self._task.args.copy()
        new_module_args.update(
            dict(
                src=tmp_src,
            )
        )

        return self._execute_module('patch', module_args=new_module_args)
Example #41
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        facts = dict()
        if self._task.args:
            for (k, v) in iteritems(self._task.args):
                k = self._templar.template(k)

                if not isidentifier(k):
                    result['failed'] = True
                    result['msg'] = "The variable name '%s' is not valid. Variables must start with a letter or underscore character, and contain only letters, numbers and underscores." % k
                    return result

                if isinstance(v, string_types) and v.lower() in ('true', 'false', 'yes', 'no'):
                    v = boolean(v)
                facts[k] = v

        result['changed'] = False
        result['ansible_facts'] = facts
        return result
Example #42
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        if 'msg' in self._task.args:
            if 'fail' in self._task.args and boolean(self._task.args['fail']):
                result['failed'] = True
                result['msg'] = self._task.args['msg']
            else:
                result['msg'] = self._task.args['msg']
        # FIXME: move the LOOKUP_REGEX somewhere else
        elif 'var' in self._task.args:  # and not utils.LOOKUP_REGEX.search(self._task.args['var']):
            results = self._templar.template(self._task.args['var'],
                                             convert_bare=True)
            if type(self._task.args['var']) in (list, dict):
                # If var is a list or dict, use the type as key to display
                result[to_unicode(type(self._task.args['var']))] = results
            else:
                # If var name is same as result, try to template it
                if results == self._task.args['var']:
                    try:
                        results = self._templar.template(
                            "{{" + results + "}}",
                            convert_bare=True,
                            fail_on_undefined=True)
                    except:
                        results = "VARIABLE IS NOT DEFINED!"
                result[self._task.args['var']] = results
        else:
            result['msg'] = 'here we are'

        # force flag to make debug output module always verbose
        result['_ansible_verbose_always'] = True

        return result
Example #43
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for unarchive operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        dest    = self._task.args.get('dest', None)
        copy    = boolean(self._task.args.get('copy', True))
        creates = self._task.args.get('creates', None)

        if source is None or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result

        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        if creates:
            # do not run the command if the line contains creates=filename
            # and the filename already exists. This allows idempotence
            # of command executions.
            result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars)
            stat = result.get('stat', None)
            if stat and stat.get('exists', False):
                result['skipped'] = True
                result['msg'] = "skipped, since %s exists" % creates
                self._remove_tmp_path(tmp)
                return result

        dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
        source = os.path.expanduser(source)

        if copy:
            if self._task._role is not None:
                source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
            else:
                source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)

        remote_checksum = self._remote_checksum(dest, all_vars=task_vars, follow=True)
        if remote_checksum == '4':
            result['failed'] = True
            result['msg'] = "python isn't present on the system.  Unable to compute checksum"
            self._remove_tmp_path(tmp)
            return result
        elif remote_checksum != '3':
            result['failed'] = True
            result['msg'] = "dest '%s' must be an existing dir" % dest
            self._remove_tmp_path(tmp)
            return result

        if copy:
            # transfer the file to a remote tmp location
            tmp_src = self._connection._shell.join_path(tmp, 'source')
            self._transfer_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side

        if copy:
            # fix file permissions when the copy is done as a different user
            self._fixup_perms2((tmp, tmp_src), remote_user)
            # Build temporary module_args.
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    src=tmp_src,
                    original_basename=os.path.basename(source),
                ),
            )

        else:
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    original_basename=os.path.basename(source),
                ),
            )

        # execute the unarchive module now, with the updated args
        result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
        self._remove_tmp_path(tmp)
        return result
Example #44
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for template operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source = self._task.args.get('src', None)
        dest   = self._task.args.get('dest', None)
        force  = boolean(self._task.args.get('force', True))
        state  = self._task.args.get('state', None)

        if state is not None:
            result['failed'] = True
            result['msg'] = "'state' cannot be specified on a template"
        elif source is None or dest is None:
            result['failed'] = True
            result['msg'] = "src and dest are required"
        else:
            try:
                source = self._find_needle('templates', source)
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_native(e)

        if 'failed' in result:
            return result

        # Expand any user home dir specification
        dest = self._remote_expand_user(dest)

        directory_prepended = False
        if dest.endswith(os.sep):
            directory_prepended = True
            base = os.path.basename(source)
            dest = os.path.join(dest, base)

        # template the source data locally & get ready to transfer
        b_source = to_bytes(source)
        try:
            with open(b_source, 'r') as f:
                template_data = to_text(f.read())

            try:
                template_uid = pwd.getpwuid(os.stat(b_source).st_uid).pw_name
            except:
                template_uid = os.stat(b_source).st_uid

            temp_vars = task_vars.copy()
            temp_vars['template_host']     = os.uname()[1]
            temp_vars['template_path']     = source
            temp_vars['template_mtime']    = datetime.datetime.fromtimestamp(os.path.getmtime(b_source))
            temp_vars['template_uid']      = template_uid
            temp_vars['template_fullpath'] = os.path.abspath(source)
            temp_vars['template_run_date'] = datetime.datetime.now()

            managed_default = C.DEFAULT_MANAGED_STR
            managed_str = managed_default.format(
                host = temp_vars['template_host'],
                uid  = temp_vars['template_uid'],
                file = to_bytes(temp_vars['template_path'])
            )
            temp_vars['ansible_managed'] = time.strftime(
                managed_str,
                time.localtime(os.path.getmtime(b_source))
            )

            # Create a new searchpath list to assign to the templar environment's file
            # loader, so that it knows about the other paths to find template files
            searchpath = [self._loader._basedir, os.path.dirname(source)]
            if self._task._role is not None:
                if C.DEFAULT_ROLES_PATH:
                    searchpath[:0] = C.DEFAULT_ROLES_PATH
                searchpath.insert(1, self._task._role._role_path)

            self._templar.environment.loader.searchpath = searchpath

            old_vars = self._templar._available_variables
            self._templar.set_available_variables(temp_vars)
            resultant = self._templar.template(template_data, preserve_trailing_newlines=True, escape_backslashes=False, convert_data=False)
            self._templar.set_available_variables(old_vars)
        except Exception as e:
            result['failed'] = True
            result['msg'] = type(e).__name__ + ": " + str(e)
            return result

        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        local_checksum = checksum_s(resultant)
        remote_checksum = self.get_checksum(dest, task_vars, not directory_prepended, source=source, tmp=tmp)
        if isinstance(remote_checksum, dict):
            # Error from remote_checksum is a dict.  Valid return is a str
            result.update(remote_checksum)
            return result

        diff = {}
        new_module_args = self._task.args.copy()

        if (remote_checksum == '1') or (force and local_checksum != remote_checksum):

            result['changed'] = True
            # if showing diffs, we need to get the remote value
            if self._play_context.diff:
                diff = self._get_diff_data(dest, resultant, task_vars, source_file=False)

            if not self._play_context.check_mode:  # do actual work through copy
                xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant)

                # fix file permissions when the copy is done as a different user
                self._fixup_perms2((tmp, xfered), remote_user)

                # run the copy module
                new_module_args.update(
                   dict(
                       src=xfered,
                       dest=dest,
                       original_basename=os.path.basename(source),
                       follow=True,
                   ),
                )
                result.update(self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False))

            if result.get('changed', False) and self._play_context.diff:
                result['diff'] = diff

        else:
            # when running the file module based on the template data, we do
            # not want the source filename (the name of the template) to be used,
            # since this would mess up links, so we clear the src param and tell
            # the module to follow links.  When doing that, we have to set
            # original_basename to the template just in case the dest is
            # a directory.
            new_module_args.update(
                dict(
                    src=None,
                    original_basename=os.path.basename(source),
                    follow=True,
                ),
            )
            result.update(self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False))

        self._remove_tmp_path(tmp)

        return result
Example #45
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for fetch operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        if self._play_context.check_mode:
            result['skipped'] = True
            result['msg'] = 'check mode not (yet) supported for this module'
            return result

        source            = self._task.args.get('src', None)
        dest              = self._task.args.get('dest', None)
        flat              = boolean(self._task.args.get('flat'))
        fail_on_missing   = boolean(self._task.args.get('fail_on_missing'))
        validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5')))

        if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
            result['failed'] = True
            result['msg'] = "validate_checksum and validate_md5 cannot both be specified"
            return result

        if source is None or dest is None:
            result['failed'] = True
            result['msg'] = "src and dest are required"
            return result

        source = self._connection._shell.join_path(source)
        source = self._remote_expand_user(source)

        remote_checksum = None
        if not self._play_context.become:
            # calculate checksum for the remote file, don't bother if using become as slurp will be used
            remote_checksum = self._remote_checksum(source, all_vars=task_vars)

        # use slurp if permissions are lacking or privilege escalation is needed
        remote_data = None
        if remote_checksum in ('1', '2', None):
            slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp)
            if slurpres.get('failed'):
                if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'):
                    result['msg'] = "the remote file does not exist, not transferring, ignored"
                    result['file'] = source
                    result['changed'] = False
                else:
                    result.update(slurpres)
                return result
            else:
                if slurpres['encoding'] == 'base64':
                    remote_data = base64.b64decode(slurpres['content'])
                if remote_data is not None:
                    remote_checksum = checksum_s(remote_data)
                # the source path may have been expanded on the
                # target system, so we compare it here and use the
                # expanded version if it's different
                remote_source = slurpres.get('source')
                if remote_source and remote_source != source:
                    source = remote_source

        # calculate the destination name
        if os.path.sep not in self._connection._shell.join_path('a', ''):
            source = self._connection._shell._unquote(source)
            source_local = source.replace('\\', '/')
        else:
            source_local = source

        dest = os.path.expanduser(dest)
        if flat:
            if dest.endswith(os.sep):
                # if the path ends with "/", we'll use the source filename as the
                # destination filename
                base = os.path.basename(source_local)
                dest = os.path.join(dest, base)
            if not dest.startswith("/"):
                # if dest does not start with "/", we'll assume a relative path
                dest = self._loader.path_dwim(dest)
        else:
            # files are saved in dest dir, with a subdir for each host, then the filename
            if 'inventory_hostname' in task_vars:
                target_name = task_vars['inventory_hostname']
            else:
                target_name = self._play_context.remote_addr
            dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)

        dest = dest.replace("//","/")

        if remote_checksum in ('0', '1', '2', '3', '4'):
            # these don't fail because you may want to transfer a log file that
            # possibly MAY exist but keep going to fetch other log files
            if remote_checksum == '0':
                result['msg'] = "unable to calculate the checksum of the remote file"
                result['file'] = source
                result['changed'] = False
            elif remote_checksum == '1':
                if fail_on_missing:
                    result['failed'] = True
                    result['msg'] = "the remote file does not exist"
                    result['file'] = source
                else:
                    result['msg'] = "the remote file does not exist, not transferring, ignored"
                    result['file'] = source
                    result['changed'] = False
            elif remote_checksum == '2':
                result['msg'] = "no read permission on remote file, not transferring, ignored"
                result['file'] = source
                result['changed'] = False
            elif remote_checksum == '3':
                result['msg'] = "remote file is a directory, fetch cannot work on directories"
                result['file'] = source
                result['changed'] = False
            elif remote_checksum == '4':
                result['msg'] = "python isn't present on the system.  Unable to compute checksum"
                result['file'] = source
                result['changed'] = False
            return result

        # calculate checksum for the local file
        local_checksum = checksum(dest)

        if remote_checksum != local_checksum:
            # create the containing directories, if needed
            makedirs_safe(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                self._connection.fetch_file(source, dest)
            else:
                try:
                    f = open(to_bytes(dest, errors='strict'), 'w')
                    f.write(remote_data)
                    f.close()
                except (IOError, OSError) as e:
                    raise AnsibleError("Failed to fetch the file: %s" % e)
            new_checksum = secure_hash(dest)
            # For backwards compatibility. We'll return None on FIPS enabled systems
            try:
                new_md5 = md5(dest)
            except ValueError:
                new_md5 = None

            if validate_checksum and new_checksum != remote_checksum:
                result.update(dict(failed=True, md5sum=new_md5,
                    msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
                    checksum=new_checksum, remote_checksum=remote_checksum))
            else:
                result.update(dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum))
        else:
            # For backwards compatibility. We'll return None on FIPS enabled systems
            try:
                local_md5 = md5(dest)
            except ValueError:
                local_md5 = None
            result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))

        return result
Example #46
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for unarchive operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        dest    = self._task.args.get('dest', None)
        remote_src = boolean(self._task.args.get('remote_src', False))
        creates = self._task.args.get('creates', None)

        # "copy" is deprecated in favor of "remote_src".
        if 'copy' in self._task.args:
            # They are mutually exclusive.
            if 'remote_src' in self._task.args:
                result['failed'] = True
                result['msg'] = "parameters are mutually exclusive: ('copy', 'remote_src')"
                return result
            # We will take the information from copy and store it in
            # the remote_src var to use later in this file.
            remote_src = not boolean(self._task.args.get('copy'))

        if source is None or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result

        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        if creates:
            # do not run the command if the line contains creates=filename
            # and the filename already exists. This allows idempotence
            # of command executions.
            result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars)
            stat = result.get('stat', None)
            if stat and stat.get('exists', False):
                result['skipped'] = True
                result['msg'] = "skipped, since %s exists" % creates
                self._remove_tmp_path(tmp)
                return result

        dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
        source = os.path.expanduser(source)

        if not remote_src:
            try:
                source = self._loader.get_real_file(self._find_needle('files', source))
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_str(e)
                self._remove_tmp_path(tmp)
                return result

        remote_checksum = self._remote_checksum(dest, all_vars=task_vars, follow=True)
        if remote_checksum == '4':
            result['failed'] = True
            result['msg'] = "python isn't present on the system.  Unable to compute checksum"
            self._remove_tmp_path(tmp)
            return result
        elif remote_checksum != '3':
            result['failed'] = True
            result['msg'] = "dest '%s' must be an existing dir" % dest
            self._remove_tmp_path(tmp)
            return result

        if not remote_src:
            # transfer the file to a remote tmp location
            tmp_src = self._connection._shell.join_path(tmp, 'source')
            self._transfer_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side

        if not remote_src:
            # fix file permissions when the copy is done as a different user
            self._fixup_perms((tmp, tmp_src), remote_user)
            # Build temporary module_args.
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    src=tmp_src,
                    original_basename=os.path.basename(source),
                ),
            )

        else:
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    original_basename=os.path.basename(source),
                ),
            )

        # execute the unarchive module now, with the updated args
        result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
        self._remove_tmp_path(tmp)
        return result
Example #47
0
    def run(self, tmp=None, task_vars=None):
        ''' generates params and passes them on to the rsync module '''
        # When modifying this function be aware of the tricky convolutions
        # your thoughts have to go through:
        #
        # In normal ansible, we connect from controller to inventory_hostname
        # (playbook's hosts: field) or controller to delegate_to host and run
        # a module on one of those hosts.
        #
        # So things that are directly related to the core of ansible are in
        # terms of that sort of connection that always originate on the
        # controller.
        #
        # In synchronize we use ansible to connect to either the controller or
        # to the delegate_to host and then run rsync which makes its own
        # connection from controller to inventory_hostname or delegate_to to
        # inventory_hostname.
        #
        # That means synchronize needs to have some knowledge of the
        # controller to inventory_host/delegate host that ansible typically
        # establishes and use those to construct a command line for rsync to
        # connect from the inventory_host to the controller/delegate.  The
        # challenge for coders is remembering which leg of the trip is
        # associated with the conditions that you're checking at any one time.
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        # self._connection accounts for delegate_to so
        # remote_transport is the transport ansible thought it would need
        # between the controller and the delegate_to host or the controller
        # and the remote_host if delegate_to isn't set.

        remote_transport = False
        if self._connection.transport != 'local':
            remote_transport = True

        try:
            delegate_to = self._task.delegate_to
        except (AttributeError, KeyError):
            delegate_to = None

        # ssh paramiko and local are fully supported transports.  Anything
        # else only works with delegate_to
        if delegate_to is None and self._connection.transport not in (
                'ssh', 'paramiko', 'local'):
            result['failed'] = True
            result[
                'msg'] = "synchronize uses rsync to function. rsync needs to connect to the remote host via ssh or a direct filesystem copy. This remote host is being accessed via %s instead so it cannot work." % self._connection.transport
            return result

        use_ssh_args = self._task.args.pop('use_ssh_args', None)

        # Parameter name needed by the ansible module
        self._task.args['_local_rsync_path'] = task_vars.get(
            'ansible_rsync_path') or 'rsync'

        # rsync thinks that one end of the connection is localhost and the
        # other is the host we're running the task for  (Note: We use
        # ansible's delegate_to mechanism to determine which host rsync is
        # running on so localhost could be a non-controller machine if
        # delegate_to is used)
        src_host = '127.0.0.1'
        inventory_hostname = task_vars.get('inventory_hostname')
        dest_host_inventory_vars = task_vars['hostvars'].get(
            inventory_hostname)
        try:
            dest_host = dest_host_inventory_vars['ansible_host']
        except KeyError:
            dest_host = dest_host_inventory_vars.get('ansible_ssh_host',
                                                     inventory_hostname)

        localhost_ports = set()
        for host in C.LOCALHOST:
            localhost_vars = task_vars['hostvars'].get(host, {})
            for port_var in MAGIC_VARIABLE_MAPPING['port']:
                port = localhost_vars.get(port_var, None)
                if port:
                    break
            else:
                port = C.DEFAULT_REMOTE_PORT
            localhost_ports.add(port)

        # dest_is_local tells us if the host rsync runs on is the same as the
        # host rsync puts the files on.  This is about *rsync's connection*,
        # not about the ansible connection to run the module.
        dest_is_local = False
        if not delegate_to and remote_transport is False:
            dest_is_local = True
        elif delegate_to and delegate_to == dest_host:
            dest_is_local = True

        # CHECK FOR NON-DEFAULT SSH PORT
        inv_port = task_vars.get('ansible_ssh_port',
                                 None) or C.DEFAULT_REMOTE_PORT
        if self._task.args.get('dest_port', None) is None:
            if inv_port is not None:
                self._task.args['dest_port'] = inv_port

        # Set use_delegate if we are going to run rsync on a delegated host
        # instead of localhost
        use_delegate = False
        if dest_host == delegate_to:
            # edge case: explicit delegate and dest_host are the same
            # so we run rsync on the remote machine targeting its localhost
            # (itself)
            dest_host = '127.0.0.1'
            use_delegate = True
        elif delegate_to is not None and remote_transport:
            # If we're delegating to a remote host then we need to use the
            # delegate_to settings
            use_delegate = True

        # Delegate to localhost as the source of the rsync unless we've been
        # told (via delegate_to) that a different host is the source of the
        # rsync
        if not use_delegate and remote_transport:
            # Create a connection to localhost to run rsync on
            new_stdin = self._connection._new_stdin

            # Unike port, there can be only one shell
            localhost_shell = None
            for host in C.LOCALHOST:
                localhost_vars = task_vars['hostvars'].get(host, {})
                for shell_var in MAGIC_VARIABLE_MAPPING['shell']:
                    localhost_shell = localhost_vars.get(shell_var, None)
                    if localhost_shell:
                        break
                if localhost_shell:
                    break
            else:
                localhost_shell = os.path.basename(C.DEFAULT_EXECUTABLE)
            self._play_context.shell = localhost_shell

            new_connection = connection_loader.get('local', self._play_context,
                                                   new_stdin)
            self._connection = new_connection
            self._override_module_replaced_vars(task_vars)

        # SWITCH SRC AND DEST HOST PER MODE
        if self._task.args.get('mode', 'push') == 'pull':
            (dest_host, src_host) = (src_host, dest_host)

        # MUNGE SRC AND DEST PER REMOTE_HOST INFO
        src = self._task.args.get('src', None)
        dest = self._task.args.get('dest', None)
        if src is None or dest is None:
            return dict(
                failed=True,
                msg="synchronize requires both src and dest parameters are set"
            )

        if not dest_is_local:
            # Private key handling
            private_key = self._play_context.private_key_file

            if private_key is not None:
                private_key = os.path.expanduser(private_key)
                self._task.args['private_key'] = private_key

            # Src and dest rsync "path" handling
            # Determine if we need a user@
            user = None
            if boolean(self._task.args.get('set_remote_user', 'yes')):
                if use_delegate:
                    user = task_vars.get('ansible_delegated_vars',
                                         dict()).get('ansible_ssh_user', None)
                    if not user:
                        user = C.DEFAULT_REMOTE_USER

                else:
                    user = task_vars.get(
                        'ansible_ssh_user') or self._play_context.remote_user

            # use the mode to define src and dest's url
            if self._task.args.get('mode', 'push') == 'pull':
                # src is a remote path: <user>@<host>, dest is a local path
                src = self._process_remote(src_host, src, user, inv_port
                                           in localhost_ports)
                dest = self._process_origin(dest_host, dest, user)
            else:
                # src is a local path, dest is a remote path: <user>@<host>
                src = self._process_origin(src_host, src, user)
                dest = self._process_remote(dest_host, dest, user, inv_port
                                            in localhost_ports)
        else:
            # Still need to munge paths (to account for roles) even if we aren't
            # copying files between hosts
            if not src.startswith('/'):
                src = self._get_absolute_path(path=src)
            if not dest.startswith('/'):
                dest = self._get_absolute_path(path=dest)

        self._task.args['src'] = src
        self._task.args['dest'] = dest

        # Allow custom rsync path argument
        rsync_path = self._task.args.get('rsync_path', None)

        if not dest_is_local:
            if self._play_context.become and not rsync_path:
                # If no rsync_path is set, become was originally set, and dest is
                # remote then add privilege escalation here.
                if self._play_context.become_method == 'sudo':
                    rsync_path = 'sudo rsync'
                # TODO: have to add in the rest of the become methods here

            # We cannot use privilege escalation on the machine running the
            # module.  Instead we run it on the machine rsync is connecting
            # to.
            self._play_context.become = False

        # make sure rsync path is quoted.
        if rsync_path:
            self._task.args['rsync_path'] = '"%s"' % rsync_path

        if use_ssh_args:
            ssh_args = [
                getattr(self._play_context, 'ssh_args', ''),
                getattr(self._play_context, 'ssh_common_args', ''),
                getattr(self._play_context, 'ssh_extra_args', ''),
            ]
            self._task.args['ssh_args'] = ' '.join([a for a in ssh_args if a])

        # run the module and store the result
        result.update(self._execute_module('synchronize', task_vars=task_vars))

        if 'SyntaxError' in result.get('exception', result.get('msg', '')):
            # Emit a warning about using python3 because synchronize is
            # somewhat unique in running on localhost
            result['exception'] = result['msg']
            result[
                'msg'] = 'SyntaxError parsing module.  Perhaps invoking "python" on your local (or delegate_to) machine invokes python3.  You can set ansible_python_interpreter for localhost (or the delegate_to machine) to the location of python2 to fix this'
        return result
Example #48
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        content = self._task.args.get('content', None)
        dest    = self._task.args.get('dest', None)
        raw     = boolean(self._task.args.get('raw', 'no'))
        force   = boolean(self._task.args.get('force', 'yes'))
        faf     = self._task.first_available_file
        remote_src = boolean(self._task.args.get('remote_src', False))
        follow  = boolean(self._task.args.get('follow', False))

        if (source is None and content is None and faf is None) or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result
        elif (source is not None or faf is not None) and content is not None:
            result['failed'] = True
            result['msg'] = "src and content are mutually exclusive"
            return result
        elif content is not None and dest is not None and dest.endswith("/"):
            result['failed'] = True
            result['msg'] = "dest must be a file if content is defined"
            return result

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = self._connection._shell.path_has_trailing_slash(source)

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if isinstance(content, dict) or isinstance(content, list):
                    content_tempfile = self._create_content_tempfile(json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception as err:
                result['failed'] = True
                result['msg'] = "could not write content temp file: %s" % to_native(err)
                return result

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        elif faf:
            source = self._get_first_available_file(faf, task_vars.get('_original_file', None))
        elif remote_src:
            result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars, delete_remote_tmp=False))
            return result
        else:  # find in expected paths
            try:
                source = self._find_needle('files', source)
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_text(e)
                return result

        # A list of source file tuples (full_path, relative_path) which will try to copy to the destination
        source_files = []

        # If source is a directory populate our list else source is a file and translate it to a tuple.
        if os.path.isdir(to_bytes(source, errors='surrogate_or_strict')):
            # Get the amount of spaces to remove to get the relative path.
            if source_trailing_slash:
                sz = len(source)
            else:
                sz = len(source.rsplit('/', 1)[0]) + 1

            # Walk the directory and append the file tuples to source_files.
            for base_path, sub_folders, files in os.walk(to_bytes(source)):
                for file in files:
                    full_path = to_text(os.path.join(base_path, file), errors='surrogate_or_strict')
                    rel_path = full_path[sz:]
                    if rel_path.startswith('/'):
                        rel_path = rel_path[1:]
                    source_files.append((full_path, rel_path))

                # recurse into subdirs
                for sf in sub_folders:
                    source_files += self._get_recursive_files(os.path.join(source, to_text(sf)), sz=sz)

            # If it's recursive copy, destination is always a dir,
            # explicitly mark it so (note - copy module relies on this).
            if not self._connection._shell.path_has_trailing_slash(dest):
                dest = self._connection._shell.join_path(dest, '')
        else:
            source_files.append((source, os.path.basename(source)))

        changed = False
        module_return = dict(changed=False)

        # A register for if we executed a module.
        # Used to cut down on command calls when not recursive.
        module_executed = False

        # Tell _execute_module to delete the file if there is one file.
        delete_remote_tmp = (len(source_files) == 1)

        # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not delete_remote_tmp:
            if tmp is None or "-tmp-" not in tmp:
                tmp = self._make_tmp_path(remote_user)
                self._cleanup_remote_tmp = True

        # expand any user home dir specifier
        dest = self._remote_expand_user(dest)

        diffs = []
        for source_full, source_rel in source_files:

            source_full = self._loader.get_real_file(source_full)

            # Generate a hash of the local file.
            local_checksum = checksum(source_full)

            # If local_checksum is not defined we can't find the file so we should fail out.
            if local_checksum is None:
                result['failed'] = True
                result['msg'] = "could not find src=%s" % source_full
                self._remove_tmp_path(tmp)
                return result

            # This is kind of optimization - if user told us destination is
            # dir, do path manipulation right away, otherwise we still check
            # for dest being a dir via remote call below.
            if self._connection._shell.path_has_trailing_slash(dest):
                dest_file = self._connection._shell.join_path(dest, source_rel)
            else:
                dest_file = self._connection._shell.join_path(dest)

            # Attempt to get remote file info
            dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp)

            if dest_status['exists'] and dest_status['isdir']:
                # The dest is a directory.
                if content is not None:
                    # If source was defined as content remove the temporary file and fail out.
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    self._remove_tmp_path(tmp)
                    result['failed'] = True
                    result['msg'] = "can not use content with a dir as dest"
                    return result
                else:
                    # Append the relative source location to the destination and get remote stats again
                    dest_file = self._connection._shell.join_path(dest, source_rel)
                    dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp)

            if dest_status['exists'] and not force:
                # remote_file does not exist so continue to next iteration.
                continue

            if local_checksum != dest_status['checksum']:
                # The checksums don't match and we will change or error out.
                changed = True

                # Create a tmp path if missing only if this is not recursive.
                # If this is recursive we already have a tmp path.
                if delete_remote_tmp:
                    if tmp is None or "-tmp-" not in tmp:
                        tmp = self._make_tmp_path(remote_user)
                        self._cleanup_remote_tmp = True

                if self._play_context.diff and not raw:
                    diffs.append(self._get_diff_data(dest_file, source_full, task_vars))

                if self._play_context.check_mode:
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    changed = True
                    module_return = dict(changed=True)
                    continue

                # Define a remote directory that we will copy the file to.
                tmp_src = self._connection._shell.join_path(tmp, 'source')

                remote_path = None

                if not raw:
                    remote_path = self._transfer_file(source_full, tmp_src)
                else:
                    self._transfer_file(source_full, dest_file)

                # We have copied the file remotely and no longer require our content_tempfile
                self._remove_tempfile_if_content_defined(content, content_tempfile)
                self._loader.cleanup_tmp_file(source_full)

                # fix file permissions when the copy is done as a different user
                if remote_path:
                    self._fixup_perms2((tmp, remote_path), remote_user)

                if raw:
                    # Continue to next iteration if raw is defined.
                    continue

                # Run the copy module

                # src and dest here come after original and override them
                # we pass dest only to make sure it includes trailing slash in case of recursive copy
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=tmp_src,
                        dest=dest,
                        original_basename=source_rel,
                    )
                )
                if 'content' in new_module_args:
                    del new_module_args['content']

                module_return = self._execute_module(module_name='copy',
                        module_args=new_module_args, task_vars=task_vars,
                        tmp=tmp, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            else:
                # no need to transfer the file, already correct hash, but still need to call
                # the file module in case we want to change attributes
                self._remove_tempfile_if_content_defined(content, content_tempfile)
                self._loader.cleanup_tmp_file(source_full)

                if raw:
                    # Continue to next iteration if raw is defined.
                    self._remove_tmp_path(tmp)
                    continue

                # Build temporary module_args.
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=source_rel,
                        dest=dest,
                        original_basename=source_rel
                    )
                )

                # Execute the file module.
                module_return = self._execute_module(module_name='file',
                        module_args=new_module_args, task_vars=task_vars,
                        tmp=tmp, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            if not module_return.get('checksum'):
                module_return['checksum'] = local_checksum
            if module_return.get('failed'):
                result.update(module_return)
                if not delete_remote_tmp:
                    self._remove_tmp_path(tmp)
                return result
            if module_return.get('changed'):
                changed = True

            # the file module returns the file path as 'path', but
            # the copy module uses 'dest', so add it if it's not there
            if 'path' in module_return and 'dest' not in module_return:
                module_return['dest'] = module_return['path']

        # Delete tmp path if we were recursive or if we did not execute a module.
        if not delete_remote_tmp or (delete_remote_tmp and not module_executed):
            self._remove_tmp_path(tmp)

        if module_executed and len(source_files) == 1:
            result.update(module_return)
        else:
            result.update(dict(dest=dest, src=source, changed=changed))

        if diffs:
            result['diff'] = diffs

        return result
    def run(self, tmp=None, task_vars=dict()):
        ''' generates params and passes them on to the rsync module '''

        original_transport = task_vars.get('ansible_connection') or self._play_context.connection
        remote_transport = False
        if original_transport != 'local':
            remote_transport = True

        try:
            delegate_to = self._task.delegate_to
        except (AttributeError, KeyError):
            delegate_to = None

        use_ssh_args = self._task.args.pop('use_ssh_args', None)

        # Parameter name needed by the ansible module
        self._task.args['_local_rsync_path'] = task_vars.get('ansible_rsync_path') or 'rsync'

        # rsync thinks that one end of the connection is localhost and the
        # other is the host we're running the task for  (Note: We use
        # ansible's delegate_to mechanism to determine which host rsync is
        # running on so localhost could be a non-controller machine if
        # delegate_to is used)
        src_host  = '127.0.0.1'
        dest_host = task_vars.get('ansible_ssh_host') or task_vars.get('inventory_hostname')

        dest_is_local = dest_host in C.LOCALHOST

        # CHECK FOR NON-DEFAULT SSH PORT
        if self._task.args.get('dest_port', None) is None:
            inv_port = task_vars.get('ansible_ssh_port', None) or C.DEFAULT_REMOTE_PORT
            if inv_port is not None:
                self._task.args['dest_port'] = inv_port

        # Set use_delegate if we are going to run rsync on a delegated host
        # instead of localhost
        use_delegate = False
        if dest_host == delegate_to:
            # edge case: explicit delegate and dest_host are the same
            # so we run rsync on the remote machine targetting its localhost
            # (itself)
            dest_host = '127.0.0.1'
            use_delegate = True
        elif delegate_to is not None and remote_transport:
            # If we're delegating to a remote host then we need to use the
            # delegate_to settings
            use_delegate = True

        # Delegate to localhost as the source of the rsync unless we've been
        # told (via delegate_to) that a different host is the source of the
        # rsync
        transport_overridden = False
        if not use_delegate and remote_transport:
            # Create a connection to localhost to run rsync on
            new_stdin = self._connection._new_stdin
            new_connection = connection_loader.get('local', self._play_context, new_stdin)
            self._connection = new_connection
            transport_overridden = True
            self._override_module_replaced_vars(task_vars)

        # COMPARE DELEGATE, HOST AND TRANSPORT
        between_multiple_hosts = False
        if dest_host != src_host and remote_transport:
            # We're not copying two filesystem trees on the same host so we
            # need to correctly format the paths for rsync (like
            # user@host:path/to/tree
            between_multiple_hosts = True

        # SWITCH SRC AND DEST HOST PER MODE
        if self._task.args.get('mode', 'push') == 'pull':
            (dest_host, src_host) = (src_host, dest_host)

        # MUNGE SRC AND DEST PER REMOTE_HOST INFO
        src = self._task.args.get('src', None)
        dest = self._task.args.get('dest', None)
        if between_multiple_hosts or use_delegate:
            # Private key handling
            if use_delegate:
                private_key = task_vars.get('ansible_ssh_private_key_file') or self._play_context.private_key_file
            else:
                private_key = task_vars.get('ansible_ssh_private_key_file') or self._play_context.private_key_file

            if private_key is not None:
                private_key = os.path.expanduser(private_key)
                self._task.args['private_key'] = private_key

            # Src and dest rsync "path" handling
            # Determine if we need a user@
            user = None
            if boolean(self._task.args.get('set_remote_user', 'yes')):
                if use_delegate:
                    user = task_vars.get('ansible_delegated_vars', dict()).get('ansible_ssh_user', None)
                    if not user:
                        user = C.DEFAULT_REMOTE_USER

                else:
                    user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user

            # use the mode to define src and dest's url
            if self._task.args.get('mode', 'push') == 'pull':
                # src is a remote path: <user>@<host>, dest is a local path
                src = self._process_remote(src_host, src, user)
                dest = self._process_origin(dest_host, dest, user)
            else:
                # src is a local path, dest is a remote path: <user>@<host>
                src = self._process_origin(src_host, src, user)
                dest = self._process_remote(dest_host, dest, user)
        else:
            # Still need to munge paths (to account for roles) even if we aren't
            # copying files between hosts
            if not src.startswith('/'):
                src = self._get_absolute_path(path=src)
            if not dest.startswith('/'):
                dest = self._get_absolute_path(path=dest)

        self._task.args['src'] = src
        self._task.args['dest'] = dest

        # Allow custom rsync path argument
        rsync_path = self._task.args.get('rsync_path', None)

        # If no rsync_path is set, sudo was originally set, and dest is remote then add 'sudo rsync' argument
        if not rsync_path and transport_overridden and self._play_context.become and self._play_context.become_method == 'sudo' and not dest_is_local:
            rsync_path = 'sudo rsync'

        # make sure rsync path is quoted.
        if rsync_path:
            self._task.args['rsync_path'] = '"%s"' % rsync_path

        if use_ssh_args:
            self._task.args['ssh_args'] = C.ANSIBLE_SSH_ARGS

        # run the module and store the result
        result = self._execute_module('synchronize', task_vars=task_vars)

        if 'SyntaxError' in result['msg']:
            # Emit a warning about using python3 because synchronize is
            # somewhat unique in running on localhost
            result['traceback'] = result['msg']
            result['msg'] = 'SyntaxError parsing module.  Perhaps invoking "python" on your local (or delegate_to) machine invokes python3.  You can set ansible_python_interpreter for localhost (or the delegate_to machine) to the location of python2 to fix this'
        return result
Example #50
0
    def post_validate(self, templar):
        '''
        we can't tell that everything is of the right type until we have
        all the variables.  Run basic types (from isa) as well as
        any _post_validate_<foo> functions.
        '''

        # save the omit value for later checking
        omit_value = templar._available_variables.get('omit')

        for (name, attribute) in iteritems(self._valid_attrs):

            if getattr(self, name) is None:
                if not attribute.required:
                    continue
                else:
                    raise AnsibleParserError("the field '%s' is required but was not set" % name)
            elif not attribute.always_post_validate and self.__class__.__name__ not in ('Task', 'Handler', 'PlayContext'):
                # Intermediate objects like Play() won't have their fields validated by
                # default, as their values are often inherited by other objects and validated
                # later, so we don't want them to fail out early
                continue

            try:
                # Run the post-validator if present. These methods are responsible for
                # using the given templar to template the values, if required.
                method = getattr(self, '_post_validate_%s' % name, None)
                if method:
                    value = method(attribute, getattr(self, name), templar)
                elif attribute.isa == 'class':
                    value = getattr(self, name)
                else:
                    # if the attribute contains a variable, template it now
                    value = templar.template(getattr(self, name))

                # if this evaluated to the omit value, set the value back to
                # the default specified in the FieldAttribute and move on
                if omit_value is not None and value == omit_value:
                    setattr(self, name, attribute.default)
                    continue

                # and make sure the attribute is of the type it should be
                if value is not None:
                    if attribute.isa == 'string':
                        value = to_text(value)
                    elif attribute.isa == 'int':
                        value = int(value)
                    elif attribute.isa == 'float':
                        value = float(value)
                    elif attribute.isa == 'bool':
                        value = boolean(value)
                    elif attribute.isa == 'percent':
                        # special value, which may be an integer or float
                        # with an optional '%' at the end
                        if isinstance(value, string_types) and '%' in value:
                            value = value.replace('%', '')
                        value = float(value)
                    elif attribute.isa in ('list', 'barelist'):
                        if value is None:
                            value = []
                        elif not isinstance(value, list):
                            if isinstance(value, string_types) and attribute.isa == 'barelist':
                                display.deprecated(
                                    "Using comma separated values for a list has been deprecated. "
                                    "You should instead use the correct YAML syntax for lists. "
                                )
                                value = value.split(',')
                            else:
                                value = [ value ]
                        if attribute.listof is not None:
                            for item in value:
                                if not isinstance(item, attribute.listof):
                                    raise AnsibleParserError("the field '%s' should be a list of %s,"
                                            " but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds())
                                elif attribute.required and attribute.listof == string_types:
                                    if item is None or item.strip() == "":
                                        raise AnsibleParserError("the field '%s' is required, and cannot have empty values" % (name,), obj=self.get_ds())
                    elif attribute.isa == 'set':
                        if value is None:
                            value = set()
                        elif not isinstance(value, (list, set)):
                            if isinstance(value, string_types):
                                value = value.split(',')
                            else:
                                # Making a list like this handles strings of
                                # text and bytes properly
                                value = [ value ]
                        if not isinstance(value, set):
                            value = set(value)
                    elif attribute.isa == 'dict':
                        if value is None:
                            value = dict()
                        elif not isinstance(value, dict):
                            raise TypeError("%s is not a dictionary" % value)
                    elif attribute.isa == 'class':
                        if not isinstance(value, attribute.class_type):
                            raise TypeError("%s is not a valid %s (got a %s instead)" % (name, attribute.class_type, type(value)))
                        value.post_validate(templar=templar)

                # and assign the massaged value back to the attribute field
                setattr(self, name, value)

            except (TypeError, ValueError) as e:
                raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s."
                        " Error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds())
            except (AnsibleUndefinedVariable, UndefinedError) as e:
                if templar._fail_on_undefined_errors and name != 'name':
                    raise AnsibleParserError("the field '%s' has an invalid value, which appears to include a variable that is undefined."
                            " The error was: %s" % (name,e), obj=self.get_ds())

        self._finalized = True
Example #51
0
    def run(self, tmp=None, task_vars=dict()):
        """ handler for fetch operations """

        if self._connection_info.check_mode:
            return dict(skipped=True, msg="check mode not (yet) supported for this module")

        source = self._task.args.get("src", None)
        dest = self._task.args.get("dest", None)
        flat = boolean(self._task.args.get("flat"))
        fail_on_missing = boolean(self._task.args.get("fail_on_missing"))
        validate_checksum = boolean(self._task.args.get("validate_checksum", self._task.args.get("validate_md5")))

        if "validate_md5" in self._task.args and "validate_checksum" in self._task.args:
            return dict(failed=True, msg="validate_checksum and validate_md5 cannot both be specified")

        if source is None or dest is None:
            return dict(failed=True, msg="src and dest are required")

        source = self._connection._shell.join_path(source)
        source = self._remote_expand_user(source, tmp)

        # calculate checksum for the remote file
        remote_checksum = self._remote_checksum(tmp, source)

        # use slurp if sudo and permissions are lacking
        remote_data = None
        if remote_checksum in ("1", "2") or self._connection_info.become:
            slurpres = self._execute_module(
                module_name="slurp", module_args=dict(src=source), task_vars=task_vars, tmp=tmp
            )
            if slurpres.get("rc") == 0:
                if slurpres["encoding"] == "base64":
                    remote_data = base64.b64decode(slurpres["content"])
                if remote_data is not None:
                    remote_checksum = checksum_s(remote_data)
                # the source path may have been expanded on the
                # target system, so we compare it here and use the
                # expanded version if it's different
                remote_source = slurpres.get("source")
                if remote_source and remote_source != source:
                    source = remote_source
            else:
                # FIXME: should raise an error here? the old code did nothing
                pass

        # calculate the destination name
        if os.path.sep not in self._connection._shell.join_path("a", ""):
            source_local = source.replace("\\", "/")
        else:
            source_local = source

        dest = os.path.expanduser(dest)
        if flat:
            if dest.endswith(os.sep):
                # if the path ends with "/", we'll use the source filename as the
                # destination filename
                base = os.path.basename(source_local)
                dest = os.path.join(dest, base)
            if not dest.startswith("/"):
                # if dest does not start with "/", we'll assume a relative path
                dest = self._loader.path_dwim(dest)
        else:
            # files are saved in dest dir, with a subdir for each host, then the filename
            if "inventory_hostname" in task_vars:
                target_name = task_vars["inventory_hostname"]
            else:
                target_name = self._connection_info.remote_addr
            dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)

        dest = dest.replace("//", "/")

        if remote_checksum in ("0", "1", "2", "3", "4"):
            # these don't fail because you may want to transfer a log file that possibly MAY exist
            # but keep going to fetch other log files
            if remote_checksum == "0":
                result = dict(msg="unable to calculate the checksum of the remote file", file=source, changed=False)
            elif remote_checksum == "1":
                if fail_on_missing:
                    result = dict(failed=True, msg="the remote file does not exist", file=source)
                else:
                    result = dict(
                        msg="the remote file does not exist, not transferring, ignored", file=source, changed=False
                    )
            elif remote_checksum == "2":
                result = dict(
                    msg="no read permission on remote file, not transferring, ignored", file=source, changed=False
                )
            elif remote_checksum == "3":
                result = dict(
                    msg="remote file is a directory, fetch cannot work on directories", file=source, changed=False
                )
            elif remote_checksum == "4":
                result = dict(
                    msg="python isn't present on the system.  Unable to compute checksum", file=source, changed=False
                )
            return result

        # calculate checksum for the local file
        local_checksum = checksum(dest)

        if remote_checksum != local_checksum:
            # create the containing directories, if needed
            makedirs_safe(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                self._connection.fetch_file(source, dest)
            else:
                f = open(dest, "w")
                f.write(remote_data)
                f.close()
            new_checksum = secure_hash(dest)
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                new_md5 = md5(dest)
            except ValueError:
                new_md5 = None

            if validate_checksum and new_checksum != remote_checksum:
                return dict(
                    failed=True,
                    md5sum=new_md5,
                    msg="checksum mismatch",
                    file=source,
                    dest=dest,
                    remote_md5sum=None,
                    checksum=new_checksum,
                    remote_checksum=remote_checksum,
                )
            return dict(
                changed=True,
                md5sum=new_md5,
                dest=dest,
                remote_md5sum=None,
                checksum=new_checksum,
                remote_checksum=remote_checksum,
            )
        else:
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                local_md5 = md5(dest)
            except ValueError:
                local_md5 = None

            return dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)
Example #52
0
    def run(self, terms, variables, **kwargs):

        result = None
        anydict = False
        skip = False

        for term in terms:
            if isinstance(term, dict):
                anydict = True

        total_search = []
        if anydict:
            for term in terms:
                if isinstance(term, dict):
                    files = term.get('files', [])
                    paths = term.get('paths', [])
                    skip  = boolean(term.get('skip', False))

                    filelist = files
                    if isinstance(files, basestring):
                        files = files.replace(',', ' ')
                        files = files.replace(';', ' ')
                        filelist = files.split(' ')

                    pathlist = paths
                    if paths:
                        if isinstance(paths, basestring):
                            paths = paths.replace(',', ' ')
                            paths = paths.replace(':', ' ')
                            paths = paths.replace(';', ' ')
                            pathlist = paths.split(' ')

                    if not pathlist:
                        total_search = filelist
                    else:
                        for path in pathlist:
                            for fn in filelist:
                                f = os.path.join(path, fn)
                                total_search.append(f)
                else:
                    total_search.append(term)
        else:
            total_search = terms

        templar = Templar(loader=self._loader, variables=variables)
        roledir = variables.get('roledir')
        for fn in total_search:
            try:
                fn = templar.template(fn)
            except (AnsibleUndefinedVariable, UndefinedError) as e:
                continue

            if os.path.isabs(fn) and os.path.exists(fn):
                return [fn]
            else:
                if roledir is not None:
                    # check the templates and vars directories too,if they exist
                    for subdir in ('templates', 'vars'):
                        path = self._loader.path_dwim_relative(roledir, subdir, fn)
                        if os.path.exists(path):
                            return [path]

                # if none of the above were found, just check the
                # current filename against the basedir (this will already
                # have ../files from runner, if it's a role task
                path = self._loader.path_dwim(fn)
                if os.path.exists(path):
                    return [path]
        else:
            if skip:
                return []
            else:
                return [None]
Example #53
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        if self._play_context.check_mode:
            result['skipped'] = True
            result['msg'] = "skipped, this module does not support check_mode."
            return result

        src        = self._task.args.get('src', None)
        dest       = self._task.args.get('dest', None)
        delimiter  = self._task.args.get('delimiter', None)
        remote_src = self._task.args.get('remote_src', 'yes')
        regexp     = self._task.args.get('regexp', None)
        follow     = self._task.args.get('follow', False)
        ignore_hidden = self._task.args.get('ignore_hidden', False)

        if src is None or dest is None:
            result['failed'] = True
            result['msg'] = "src and dest are required"
            return result

        if boolean(remote_src):
            result.update(self._execute_module(tmp=tmp, task_vars=task_vars))
            return result

        elif self._task._role is not None:
            src = self._loader.path_dwim_relative(self._task._role._role_path, 'files', src)
        else:
            src = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', src)

        _re = None
        if regexp is not None:
            _re = re.compile(regexp)

        if not os.path.isdir(src):
            result['failed'] = True
            result['msg'] = "Source (%s) is not a directory" % src
            return result

        # Does all work assembling the file
        path = self._assemble_from_fragments(src, delimiter, _re, ignore_hidden)

        path_checksum = checksum_s(path)
        dest = self._remote_expand_user(dest)
        dest_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=follow)

        diff = {}
        if path_checksum != dest_stat['checksum']:
            resultant = file(path).read()

            if self._play_context.diff:
                diff = self._get_diff_data(dest, path, task_vars)

            xfered = self._transfer_data('src', resultant)

            # fix file permissions when the copy is done as a different user
            if self._play_context.become and self._play_context.become_user != 'root':
                self._remote_chmod('a+r', xfered)

            # run the copy module

            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    src=xfered,
                    dest=dest,
                    original_basename=os.path.basename(src),
                )
            )

            res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
            if diff:
                res['diff'] = diff
            result.update(res)
            return result
        else:
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    src=xfered,
                    dest=dest,
                    original_basename=os.path.basename(src),
                )
            )

            result.update(self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp))
            return result
Example #54
0
    def run(self, terms, variables, **kwargs):

        result = None
        anydict = False
        skip = False

        for term in terms:
            if isinstance(term, dict):
                anydict = True

        total_search = []
        if anydict:
            for term in terms:
                if isinstance(term, dict):
                    files = term.get('files', [])
                    paths = term.get('paths', [])
                    skip  = boolean(term.get('skip', False))

                    filelist = files
                    if isinstance(files, basestring):
                        files = files.replace(',', ' ')
                        files = files.replace(';', ' ')
                        filelist = files.split(' ')

                    pathlist = paths
                    if paths:
                        if isinstance(paths, basestring):
                            paths = paths.replace(',', ' ')
                            paths = paths.replace(':', ' ')
                            paths = paths.replace(';', ' ')
                            pathlist = paths.split(' ')

                    if not pathlist:
                        total_search = filelist
                    else:
                        for path in pathlist:
                            for fn in filelist:
                                f = os.path.join(path, fn)
                                total_search.append(f)
                else:
                    total_search.append(term)
        else:
            total_search = self._flatten(terms)

        roledir = variables.get('roledir')
        for fn in total_search:
            try:
                fn = self._templar.template(fn)
            except (AnsibleUndefinedVariable, UndefinedError) as e:
                continue

            if os.path.isabs(fn) and os.path.exists(fn):
                return [fn]
            else:
                if roledir is not None:
                    # check the templates and vars directories too,if they exist
                    for subdir in ('templates', 'vars', 'files'):
                        path = self._loader.path_dwim_relative(roledir, subdir, fn)
                        if os.path.exists(path):
                            return [path]

                # if none of the above were found, just check the
                # current filename against the current dir
                path = self._loader.path_dwim(fn)
                if os.path.exists(path):
                    return [path]
        else:
            if skip:
                return []
            else:
                raise AnsibleLookupError("No file was found when using with_first_found. Use the 'skip: true' option to allow this task to be skipped if no files are found")
Example #55
0
    def run(self, tmp=None, task_vars=None):
        ''' generates params and passes them on to the rsync module '''
        # When modifying this function be aware of the tricky convolutions
        # your thoughts have to go through:
        #
        # In normal ansible, we connect from controller to inventory_hostname
        # (playbook's hosts: field) or controller to delegate_to host and run
        # a module on one of those hosts.
        #
        # So things that are directly related to the core of ansible are in
        # terms of that sort of connection that always originate on the
        # controller.
        #
        # In synchronize we use ansible to connect to either the controller or
        # to the delegate_to host and then run rsync which makes its own
        # connection from controller to inventory_hostname or delegate_to to
        # inventory_hostname.
        #
        # That means synchronize needs to have some knowledge of the
        # controller to inventory_host/delegate host that ansible typically
        # establishes and use those to construct a command line for rsync to
        # connect from the inventory_host to the controller/delegate.  The
        # challenge for coders is remembering which leg of the trip is
        # associated with the conditions that you're checking at any one time.
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        # self._connection accounts for delegate_to so
        # remote_transport is the transport ansible thought it would need
        # between the controller and the delegate_to host or the controller
        # and the remote_host if delegate_to isn't set.

        remote_transport = False
        if self._connection.transport != 'local':
            remote_transport = True

        try:
            delegate_to = self._task.delegate_to
        except (AttributeError, KeyError):
            delegate_to = None

        # ssh paramiko and local are fully supported transports.  Anything
        # else only works with delegate_to
        if delegate_to is None and self._connection.transport not in ('ssh', 'paramiko', 'local'):
            result['failed'] = True
            result['msg'] = "synchronize uses rsync to function. rsync needs to connect to the remote host via ssh or a direct filesystem copy. This remote host is being accessed via %s instead so it cannot work." % self._connection.transport
            return result

        use_ssh_args = self._task.args.pop('use_ssh_args', None)

        # Parameter name needed by the ansible module
        self._task.args['_local_rsync_path'] = task_vars.get('ansible_rsync_path') or 'rsync'

        # rsync thinks that one end of the connection is localhost and the
        # other is the host we're running the task for  (Note: We use
        # ansible's delegate_to mechanism to determine which host rsync is
        # running on so localhost could be a non-controller machine if
        # delegate_to is used)
        src_host = '127.0.0.1'
        inventory_hostname = task_vars.get('inventory_hostname')
        dest_host_inventory_vars = task_vars['hostvars'].get(inventory_hostname)
        try:
            dest_host = dest_host_inventory_vars['ansible_host']
        except KeyError:
            dest_host = dest_host_inventory_vars.get('ansible_ssh_host', inventory_hostname)

        localhost_ports = set()
        for host in C.LOCALHOST:
            localhost_vars = task_vars['hostvars'].get(host, {})
            for port_var in MAGIC_VARIABLE_MAPPING['port']:
                port = localhost_vars.get(port_var, None)
                if port:
                    break
            else:
                port = C.DEFAULT_REMOTE_PORT
            localhost_ports.add(port)

        # dest_is_local tells us if the host rsync runs on is the same as the
        # host rsync puts the files on.  This is about *rsync's connection*,
        # not about the ansible connection to run the module.
        dest_is_local = False
        if not delegate_to and remote_transport is False:
            dest_is_local = True
        elif delegate_to and delegate_to == dest_host:
            dest_is_local = True

        # CHECK FOR NON-DEFAULT SSH PORT
        inv_port = task_vars.get('ansible_ssh_port', None) or C.DEFAULT_REMOTE_PORT
        if self._task.args.get('dest_port', None) is None:
            if inv_port is not None:
                self._task.args['dest_port'] = inv_port

        # Set use_delegate if we are going to run rsync on a delegated host
        # instead of localhost
        use_delegate = False
        if dest_host == delegate_to:
            # edge case: explicit delegate and dest_host are the same
            # so we run rsync on the remote machine targeting its localhost
            # (itself)
            dest_host = '127.0.0.1'
            use_delegate = True
        elif delegate_to is not None and remote_transport:
            # If we're delegating to a remote host then we need to use the
            # delegate_to settings
            use_delegate = True

        # Delegate to localhost as the source of the rsync unless we've been
        # told (via delegate_to) that a different host is the source of the
        # rsync
        if not use_delegate and remote_transport:
            # Create a connection to localhost to run rsync on
            new_stdin = self._connection._new_stdin

            # Unike port, there can be only one shell
            localhost_shell = None
            for host in C.LOCALHOST:
                localhost_vars = task_vars['hostvars'].get(host, {})
                for shell_var in MAGIC_VARIABLE_MAPPING['shell']:
                    localhost_shell = localhost_vars.get(shell_var, None)
                    if localhost_shell:
                        break
                if localhost_shell:
                    break
            else:
                localhost_shell = os.path.basename(C.DEFAULT_EXECUTABLE)
            self._play_context.shell = localhost_shell

            new_connection = connection_loader.get('local', self._play_context, new_stdin)
            self._connection = new_connection
            self._override_module_replaced_vars(task_vars)

        # SWITCH SRC AND DEST HOST PER MODE
        if self._task.args.get('mode', 'push') == 'pull':
            (dest_host, src_host) = (src_host, dest_host)

        # MUNGE SRC AND DEST PER REMOTE_HOST INFO
        src = self._task.args.get('src', None)
        dest = self._task.args.get('dest', None)
        if not dest_is_local:
            # Private key handling
            if use_delegate:
                private_key = task_vars.get('ansible_ssh_private_key_file') or self._play_context.private_key_file
            else:
                private_key = task_vars.get('ansible_ssh_private_key_file') or self._play_context.private_key_file

            if private_key is not None:
                private_key = os.path.expanduser(private_key)
                self._task.args['private_key'] = private_key

            # Src and dest rsync "path" handling
            # Determine if we need a user@
            user = None
            if boolean(self._task.args.get('set_remote_user', 'yes')):
                if use_delegate:
                    user = task_vars.get('ansible_delegated_vars', dict()).get('ansible_ssh_user', None)
                    if not user:
                        user = C.DEFAULT_REMOTE_USER

                else:
                    user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user

            # use the mode to define src and dest's url
            if self._task.args.get('mode', 'push') == 'pull':
                # src is a remote path: <user>@<host>, dest is a local path
                src = self._process_remote(src_host, src, user, inv_port in localhost_ports)
                dest = self._process_origin(dest_host, dest, user)
            else:
                # src is a local path, dest is a remote path: <user>@<host>
                src = self._process_origin(src_host, src, user)
                dest = self._process_remote(dest_host, dest, user, inv_port in localhost_ports)
        else:
            # Still need to munge paths (to account for roles) even if we aren't
            # copying files between hosts
            if not src.startswith('/'):
                src = self._get_absolute_path(path=src)
            if not dest.startswith('/'):
                dest = self._get_absolute_path(path=dest)

        self._task.args['src'] = src
        self._task.args['dest'] = dest

        # Allow custom rsync path argument
        rsync_path = self._task.args.get('rsync_path', None)

        if not dest_is_local:
            if self._play_context.become and not rsync_path:
                # If no rsync_path is set, become was originally set, and dest is
                # remote then add privilege escalation here.
                if self._play_context.become_method == 'sudo':
                    rsync_path = 'sudo rsync'
                # TODO: have to add in the rest of the become methods here

            # We cannot use privilege escalation on the machine running the
            # module.  Instead we run it on the machine rsync is connecting
            # to.
            self._play_context.become = False

        # make sure rsync path is quoted.
        if rsync_path:
            self._task.args['rsync_path'] = '"%s"' % rsync_path

        if use_ssh_args:
            ssh_args = [
                getattr(self._play_context, 'ssh_args', ''),
                getattr(self._play_context, 'ssh_common_args', ''),
                getattr(self._play_context, 'ssh_extra_args', ''),
            ]
            self._task.args['ssh_args'] = ' '.join([a for a in ssh_args if a])

        # run the module and store the result
        result.update(self._execute_module('synchronize', task_vars=task_vars))

        if 'SyntaxError' in result.get('exception', result.get('msg', '')):
            # Emit a warning about using python3 because synchronize is
            # somewhat unique in running on localhost
            result['exception'] = result['msg']
            result['msg'] = 'SyntaxError parsing module.  Perhaps invoking "python" on your local (or delegate_to) machine invokes python3.  You can set ansible_python_interpreter for localhost (or the delegate_to machine) to the location of python2 to fix this'
        return result