예제 #1
0
def _find_module_utils(module_name, b_module_data, module_path, module_args,
                       task_vars, templar, module_compression, async_timeout,
                       become, become_method, become_user, become_password,
                       become_flags, environment):
    """
    Given the source of the module, convert it to a Jinja2 template to insert
    module code and return whether it's a new or old style module.
    """
    module_substyle = module_style = 'old'

    # module_style is something important to calling code (ActionBase).  It
    # determines how arguments are formatted (json vs k=v) and whether
    # a separate arguments file needs to be sent over the wire.
    # module_substyle is extra information that's useful internally.  It tells
    # us what we have to look to substitute in the module files and whether
    # we're using module replacer or ansiballz to format the module itself.
    if _is_binary(b_module_data):
        module_substyle = module_style = 'binary'
    elif REPLACER in b_module_data:
        # Do REPLACER before from ansible.module_utils because we need make sure
        # we substitute "from ansible.module_utils basic" for REPLACER
        module_style = 'new'
        module_substyle = 'python'
        b_module_data = b_module_data.replace(
            REPLACER, b'from ansible.module_utils.basic import *')
    elif b'from ansible.module_utils.' in b_module_data:
        module_style = 'new'
        module_substyle = 'python'
    elif REPLACER_WINDOWS in b_module_data:
        module_style = 'new'
        module_substyle = 'powershell'
        b_module_data = b_module_data.replace(
            REPLACER_WINDOWS, b'#Requires -Module Ansible.ModuleUtils.Legacy')
    elif re.search(b'#Requires -Module', b_module_data, re.IGNORECASE) \
            or re.search(b'#Requires -Version', b_module_data, re.IGNORECASE)\
            or re.search(b'#AnsibleRequires -OSVersion', b_module_data, re.IGNORECASE) \
            or re.search(b'#AnsibleRequires -CSharpUtil', b_module_data, re.IGNORECASE):
        module_style = 'new'
        module_substyle = 'powershell'
    elif REPLACER_JSONARGS in b_module_data:
        module_style = 'new'
        module_substyle = 'jsonargs'
    elif b'WANT_JSON' in b_module_data:
        module_substyle = module_style = 'non_native_want_json'

    shebang = None
    # Neither old-style, non_native_want_json nor binary modules should be modified
    # except for the shebang line (Done by modify_module)
    if module_style in ('old', 'non_native_want_json', 'binary'):
        return b_module_data, module_style, shebang

    output = BytesIO()
    py_module_names = set()

    if module_substyle == 'python':
        params = dict(ANSIBLE_MODULE_ARGS=module_args, )
        try:
            python_repred_params = repr(json.dumps(params))
        except TypeError as e:
            raise AnsibleError(
                "Unable to pass options to module, they must be JSON serializable: %s"
                % to_native(e))

        try:
            compression_method = getattr(zipfile, module_compression)
        except AttributeError:
            display.warning(
                u'Bad module compression string specified: %s.  Using ZIP_STORED (no compression)'
                % module_compression)
            compression_method = zipfile.ZIP_STORED

        lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ansiballz_cache')
        cached_module_filename = os.path.join(
            lookup_path, "%s-%s" % (module_name, module_compression))

        zipdata = None
        # Optimization -- don't lock if the module has already been cached
        if os.path.exists(cached_module_filename):
            display.debug('ANSIBALLZ: using cached module: %s' %
                          cached_module_filename)
            with open(cached_module_filename, 'rb') as module_data:
                zipdata = module_data.read()
        else:
            if module_name in action_write_locks.action_write_locks:
                display.debug('ANSIBALLZ: Using lock for %s' % module_name)
                lock = action_write_locks.action_write_locks[module_name]
            else:
                # If the action plugin directly invokes the module (instead of
                # going through a strategy) then we don't have a cross-process
                # Lock specifically for this module.  Use the "unexpected
                # module" lock instead
                display.debug('ANSIBALLZ: Using generic lock for %s' %
                              module_name)
                lock = action_write_locks.action_write_locks[None]

            display.debug('ANSIBALLZ: Acquiring lock')
            with lock:
                display.debug('ANSIBALLZ: Lock acquired: %s' % id(lock))
                # Check that no other process has created this while we were
                # waiting for the lock
                if not os.path.exists(cached_module_filename):
                    display.debug('ANSIBALLZ: Creating module')
                    # Create the module zip data
                    zipoutput = BytesIO()
                    zf = zipfile.ZipFile(zipoutput,
                                         mode='w',
                                         compression=compression_method)
                    # Note: If we need to import from release.py first,
                    # remember to catch all exceptions: https://github.com/ansible/ansible/issues/16523
                    zf.writestr(
                        'ansible/__init__.py',
                        b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n__version__="'
                        + to_bytes(__version__) + b'"\n__author__="' +
                        to_bytes(__author__) + b'"\n')
                    zf.writestr(
                        'ansible/module_utils/__init__.py',
                        b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n'
                    )

                    zf.writestr('__main__.py', b_module_data)

                    py_module_cache = {('__init__', ): (b'', '[builtin]')}
                    recursive_finder(module_name, b_module_data,
                                     py_module_names, py_module_cache, zf)
                    zf.close()
                    zipdata = base64.b64encode(zipoutput.getvalue())

                    # Write the assembled module to a temp file (write to temp
                    # so that no one looking for the file reads a partially
                    # written file)
                    if not os.path.exists(lookup_path):
                        # Note -- if we have a global function to setup, that would
                        # be a better place to run this
                        os.makedirs(lookup_path)
                    display.debug('ANSIBALLZ: Writing module')
                    with open(cached_module_filename + '-part', 'wb') as f:
                        f.write(zipdata)

                    # Rename the file into its final position in the cache so
                    # future users of this module can read it off the
                    # filesystem instead of constructing from scratch.
                    display.debug('ANSIBALLZ: Renaming module')
                    os.rename(cached_module_filename + '-part',
                              cached_module_filename)
                    display.debug('ANSIBALLZ: Done creating module')

            if zipdata is None:
                display.debug('ANSIBALLZ: Reading module after lock')
                # Another process wrote the file while we were waiting for
                # the write lock.  Go ahead and read the data from disk
                # instead of re-creating it.
                try:
                    with open(cached_module_filename, 'rb') as f:
                        zipdata = f.read()
                except IOError:
                    raise AnsibleError(
                        'A different worker process failed to create module file. '
                        'Look at traceback for that process for debugging information.'
                    )
        zipdata = to_text(zipdata, errors='surrogate_or_strict')

        shebang, interpreter = _get_shebang(u'/usr/bin/python', task_vars,
                                            templar)
        if shebang is None:
            shebang = u'#!/usr/bin/python'

        # Enclose the parts of the interpreter in quotes because we're
        # substituting it into the template as a Python string
        interpreter_parts = interpreter.split(u' ')
        interpreter = u"'{0}'".format(u"', '".join(interpreter_parts))

        # FUTURE: the module cache entry should be invalidated if we got this value from a host-dependent source
        rlimit_nofile = C.config.get_config_value(
            'PYTHON_MODULE_RLIMIT_NOFILE', variables=task_vars)

        if not isinstance(rlimit_nofile, int):
            rlimit_nofile = int(templar.template(rlimit_nofile))

        if rlimit_nofile:
            rlimit = ANSIBALLZ_RLIMIT_TEMPLATE % dict(
                rlimit_nofile=rlimit_nofile, )
        else:
            rlimit = ''

        coverage_config = os.environ.get('_ANSIBLE_COVERAGE_CONFIG')

        if coverage_config:
            coverage_output = os.environ['_ANSIBLE_COVERAGE_OUTPUT']

            if coverage_output:
                # Enable code coverage analysis of the module.
                # This feature is for internal testing and may change without notice.
                coverage = ANSIBALLZ_COVERAGE_TEMPLATE % dict(
                    coverage_config=coverage_config,
                    coverage_output=coverage_output,
                )
            else:
                # Verify coverage is available without importing it.
                # This will detect when a module would fail with coverage enabled with minimal overhead.
                coverage = ANSIBALLZ_COVERAGE_CHECK_TEMPLATE
        else:
            coverage = ''

        now = datetime.datetime.utcnow()
        output.write(
            to_bytes(ACTIVE_ANSIBALLZ_TEMPLATE % dict(
                zipdata=zipdata,
                ansible_module=module_name,
                params=python_repred_params,
                shebang=shebang,
                interpreter=interpreter,
                coding=ENCODING_STRING,
                year=now.year,
                month=now.month,
                day=now.day,
                hour=now.hour,
                minute=now.minute,
                second=now.second,
                coverage=coverage,
                rlimit=rlimit,
            )))
        b_module_data = output.getvalue()

    elif module_substyle == 'powershell':
        # Powershell/winrm don't actually make use of shebang so we can
        # safely set this here.  If we let the fallback code handle this
        # it can fail in the presence of the UTF8 BOM commonly added by
        # Windows text editors
        shebang = u'#!powershell'
        # create the common exec wrapper payload and set that as the module_data
        # bytes
        b_module_data = ps_manifest._create_powershell_wrapper(
            b_module_data, module_args, environment, async_timeout, become,
            become_method, become_user, become_password, become_flags,
            module_substyle)

    elif module_substyle == 'jsonargs':
        module_args_json = to_bytes(json.dumps(module_args))

        # these strings could be included in a third-party module but
        # officially they were included in the 'basic' snippet for new-style
        # python modules (which has been replaced with something else in
        # ansiballz) If we remove them from jsonargs-style module replacer
        # then we can remove them everywhere.
        python_repred_args = to_bytes(repr(module_args_json))
        b_module_data = b_module_data.replace(REPLACER_VERSION,
                                              to_bytes(repr(__version__)))
        b_module_data = b_module_data.replace(REPLACER_COMPLEX,
                                              python_repred_args)
        b_module_data = b_module_data.replace(
            REPLACER_SELINUX, to_bytes(','.join(C.DEFAULT_SELINUX_SPECIAL_FS)))

        # The main event -- substitute the JSON args string into the module
        b_module_data = b_module_data.replace(REPLACER_JSONARGS,
                                              module_args_json)

        facility = b'syslog.' + to_bytes(task_vars.get(
            'ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY),
                                         errors='surrogate_or_strict')
        b_module_data = b_module_data.replace(b'syslog.LOG_USER', facility)

    return (b_module_data, module_style, shebang)
예제 #2
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)
        del tmp  # tmp no longer has any effect

        try:
            creates = self._task.args.get('creates')
            if creates:
                # do not run the command if the line contains creates=filename
                # and the filename already exists. This allows idempotence
                # of command executions.
                if self._remote_file_exists(creates):
                    raise AnsibleActionSkip(
                        "%s exists, matching creates option" % creates)

            removes = self._task.args.get('removes')
            if removes:
                # do not run the command if the line contains removes=filename
                # and the filename does not exist. This allows idempotence
                # of command executions.
                if not self._remote_file_exists(removes):
                    raise AnsibleActionSkip(
                        "%s does not exist, matching removes option" % removes)

            # The chdir must be absolute, because a relative path would rely on
            # remote node behaviour & user config.
            chdir = self._task.args.get('chdir')
            if chdir:
                # Powershell is the only Windows-path aware shell
                if getattr(self._connection._shell, "_IS_WINDOWS", False) and \
                        not self.windows_absolute_path_detection.match(chdir):
                    raise AnsibleActionFail(
                        'chdir %s must be an absolute path for a Windows remote node'
                        % chdir)
                # Every other shell is unix-path-aware.
                if not getattr(self._connection._shell, "_IS_WINDOWS",
                               False) and not chdir.startswith('/'):
                    raise AnsibleActionFail(
                        'chdir %s must be an absolute path for a Unix-aware remote node'
                        % chdir)

            # Split out the script as the first item in raw_params using
            # shlex.split() in order to support paths and files with spaces in the name.
            # Any arguments passed to the script will be added back later.
            raw_params = to_native(self._task.args.get('_raw_params', ''),
                                   errors='surrogate_or_strict')
            parts = [
                to_text(s, errors='surrogate_or_strict')
                for s in shlex.split(raw_params.strip())
            ]
            source = parts[0]

            # Support executable paths and files with spaces in the name.
            executable = to_native(self._task.args.get('executable', ''),
                                   errors='surrogate_or_strict')

            try:
                source = self._loader.get_real_file(
                    self._find_needle('files', source),
                    decrypt=self._task.args.get('decrypt', True))
            except AnsibleError as e:
                raise AnsibleActionFail(to_native(e))

            # now we execute script, always assume changed.
            result['changed'] = True

            if not self._play_context.check_mode:
                # transfer the file to a remote tmp location
                tmp_src = self._connection._shell.join_path(
                    self._connection._shell.tmpdir, os.path.basename(source))

                # Convert raw_params to text for the purpose of replacing the script since
                # parts and tmp_src are both unicode strings and raw_params will be different
                # depending on Python version.
                #
                # Once everything is encoded consistently, replace the script path on the remote
                # system with the remainder of the raw_params. This preserves quoting in parameters
                # that would have been removed by shlex.split().
                target_command = to_text(raw_params).strip().replace(
                    parts[0], tmp_src)

                self._transfer_file(source, tmp_src)

                # set file permissions, more permissive when the copy is done as a different user
                self._fixup_perms2((self._connection._shell.tmpdir, tmp_src),
                                   execute=True)

                # add preparation steps to one ssh roundtrip executing the script
                env_dict = dict()
                env_string = self._compute_environment_string(env_dict)

                if executable:
                    script_cmd = ' '.join(
                        [env_string, executable, target_command])
                else:
                    script_cmd = ' '.join([env_string, target_command])

            if self._play_context.check_mode:
                raise _AnsibleActionDone()

            script_cmd = self._connection._shell.wrap_for_exec(script_cmd)

            exec_data = None
            # PowerShell runs the script in a special wrapper to enable things
            # like become and environment args
            if getattr(self._connection._shell, "_IS_WINDOWS", False):
                # FUTURE: use a more public method to get the exec payload
                pc = self._play_context
                exec_data = ps_manifest._create_powershell_wrapper(
                    to_bytes(script_cmd), {},
                    env_dict,
                    self._task.async_val,
                    pc.become,
                    pc.become_method,
                    pc.become_user,
                    pc.become_pass,
                    pc.become_flags,
                    substyle="script")
                # build the necessary exec wrapper command
                # FUTURE: this still doesn't let script work on Windows with non-pipelined connections or
                # full manual exec of KEEP_REMOTE_FILES
                script_cmd = self._connection._shell.build_module_command(
                    env_string='', shebang='#!powershell', cmd='')

            result.update(
                self._low_level_execute_command(cmd=script_cmd,
                                                in_data=exec_data,
                                                sudoable=True,
                                                chdir=chdir))

            if 'rc' in result and result['rc'] != 0:
                raise AnsibleActionFail('non-zero return code')

        except AnsibleAction as e:
            result.update(e.result)
        finally:
            self._remove_tmp_path(self._connection._shell.tmpdir)

        return result