def v2_playbook_on_play_start(self, play):
        self._task_count = 0

        if play:
            # figure out where the playbook FILE is
            path = os.path.abspath(self.playbook._file_name)

            # tel the logger what the playbook is
            logmech.playbook_id = path

            # if play count == 0
            # write out playbook info now
            if not self._play_count:
                pb_info = {}
                pb_info['playbook_start'] = time.time()
                pb_info['playbook'] = path
                pb_info['userid'] = getlogin()
                pb_info['extra_vars'] = play._variable_manager.extra_vars
                pb_info['inventory'] = play._variable_manager._inventory.src()
                pb_info['playbook_checksum'] = secure_hash(path)
                pb_info['check'] = self.play_context.check_mode
                pb_info['diff'] = self.play_context.diff
                logmech.play_log(json.dumps(pb_info, indent=4))

            self._play_count += 1
            # then write per-play info that doesn't duplcate the playbook info
            info = {}
            info['play'] = play.name
            info['hosts'] = play.hosts
            info['transport'] = self.play_context.connection
            info['number'] = self._play_count
            info['check'] = self.play_context.check_mode
            info['diff'] = self.play_context.diff
            logmech.play_info = info
            logmech.play_log(json.dumps(info, indent=4))
Exemple #2
0
    def put_file(self, in_path, out_path):
        super(Connection, self).put_file(in_path, out_path)
        out_path = self._shell._unquote(out_path)
        display.vvv('PUT "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
        if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
            raise AnsibleFileNotFound('file or module does not exist: "%s"' % to_native(in_path))

        script_template = u'''
            begin {{
                $path = '{0}'

                $DebugPreference = "Continue"
                $ErrorActionPreference = "Stop"
                Set-StrictMode -Version 2

                $fd = [System.IO.File]::Create($path)

                $sha1 = [System.Security.Cryptography.SHA1CryptoServiceProvider]::Create()

                $bytes = @() #initialize for empty file case
            }}
            process {{
               $bytes = [System.Convert]::FromBase64String($input)
               $sha1.TransformBlock($bytes, 0, $bytes.Length, $bytes, 0) | Out-Null
               $fd.Write($bytes, 0, $bytes.Length)
            }}
            end {{
                $sha1.TransformFinalBlock($bytes, 0, 0) | Out-Null

                $hash = [System.BitConverter]::ToString($sha1.Hash).Replace("-", "").ToLowerInvariant()

                $fd.Close()

                Write-Output "{{""sha1"":""$hash""}}"
            }}
        '''

        script = script_template.format(self._shell._escape(out_path))
        cmd_parts = self._shell._encode_script(script, as_list=True, strict_mode=False, preserve_rc=False)

        result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], stdin_iterator=self._put_file_stdin_iterator(in_path, out_path))
        # TODO: improve error handling
        if result.status_code != 0:
            raise AnsibleError(to_native(result.std_err))

        put_output = json.loads(result.std_out)
        remote_sha1 = put_output.get("sha1")

        if not remote_sha1:
            raise AnsibleError("Remote sha1 was not returned")

        local_sha1 = secure_hash(in_path)

        if not remote_sha1 == local_sha1:
            raise AnsibleError("Remote sha1 hash {0} does not match local hash {1}".format(to_native(remote_sha1), to_native(local_sha1)))
Exemple #3
0
    def _walk(b_path, b_top_level_dir):
        is_root = b_path == b_top_level_dir

        for b_item in os.listdir(b_path):
            b_abs_path = os.path.join(b_path, b_item)
            b_rel_base_dir = b'' if b_path == b_top_level_dir else b_path[
                len(b_top_level_dir) + 1:]
            rel_path = to_text(os.path.join(b_rel_base_dir, b_item),
                               errors='surrogate_or_strict')

            if os.path.isdir(b_abs_path):
                if any(b_item == b_path for b_path, root_only in b_ignore_dirs
                       if not root_only or root_only == is_root):
                    display.vvv("Skipping '%s' for collection build" %
                                to_text(b_abs_path))
                    continue

                if os.path.islink(b_abs_path):
                    b_link_target = os.path.realpath(b_abs_path)

                    if not b_link_target.startswith(b_top_level_dir):
                        display.warning(
                            "Skipping '%s' as it is a symbolic link to a directory outside the collection"
                            % to_text(b_abs_path))
                        continue

                manifest_entry = entry_template.copy()
                manifest_entry['name'] = rel_path
                manifest_entry['ftype'] = 'dir'

                manifest['files'].append(manifest_entry)

                _walk(b_abs_path, b_top_level_dir)
            else:
                if b_item == b'galaxy.yml':
                    continue
                elif any(
                        fnmatch.fnmatch(b_item, b_pattern)
                        for b_pattern, root_only in b_ignore_files
                        if not root_only or root_only == is_root):
                    display.vvv("Skipping '%s' for collection build" %
                                to_text(b_abs_path))
                    continue

                manifest_entry = entry_template.copy()
                manifest_entry['name'] = rel_path
                manifest_entry['ftype'] = 'file'
                manifest_entry['chksum_type'] = 'sha256'
                manifest_entry['chksum_sha256'] = secure_hash(b_abs_path,
                                                              hash_func=sha256)

                manifest['files'].append(manifest_entry)
def main():

    module = AnsibleModule(
        argument_spec=dict(
            dest=dict(default='README.md'),
            template=dict(required=True),
            name=dict(aliases=['template']),
            timestamp=dict(default='no', choices=['yes', 'no']),
            insert=dict(default=False)
        ),
        supports_check_mode=False
    )

    # Try to import the library
    try:
        from cfautodoc.autodoc import autodoc
    except ImportError:
        module.fail_json(msg='Install the cfautodoc tool using pip.')

    templ = module.params['name']
    dest = module.params['dest']
    timestamp = module.params['timestamp']
    insert = module.params['insert']

    # Massage timestamp for library
    if timestamp == 'yes':
        timestamp = True
    else:
        timestamp = False

    # Generate the Markdown
    towrite = autodoc(templ, dest, timestamp, insert)

    # Write the towrite list
    if secure_hash(dest) == secure_hash_s(''.join(towrite)):
        msg = 'Documentation already up to date for {0}'.format(templ)
        module.exit_json(changed=False, written=msg)
    else:
        try:
            with open(dest, 'w') as destination:
                for line in towrite:
                    destination.write(line)
            msg = '{0} documentation updated to {1}'.format(templ, dest)
            module.exit_json(changed=True, written=msg)
        except Exception as exc:
            module.fail_json(msg=exc)
def main():

    module = AnsibleModule(argument_spec=dict(dest=dict(default='README.md'),
                                              template=dict(required=True),
                                              name=dict(aliases=['template']),
                                              timestamp=dict(
                                                  default='no',
                                                  choices=['yes', 'no']),
                                              insert=dict(default=False)),
                           supports_check_mode=False)

    # Try to import the library
    try:
        from cfautodoc.autodoc import autodoc
    except ImportError:
        module.fail_json(msg='Install the cfautodoc tool using pip.')

    templ = module.params['name']
    dest = module.params['dest']
    timestamp = module.params['timestamp']
    insert = module.params['insert']

    # Massage timestamp for library
    if timestamp == 'yes':
        timestamp = True
    else:
        timestamp = False

    # Generate the Markdown
    towrite = autodoc(templ, dest, timestamp, insert)

    # Write the towrite list
    if secure_hash(dest) == secure_hash_s(''.join(towrite)):
        msg = 'Documentation already up to date for {0}'.format(templ)
        module.exit_json(changed=False, written=msg)
    else:
        try:
            with open(dest, 'w') as destination:
                for line in towrite:
                    destination.write(line)
            msg = '{0} documentation updated to {1}'.format(templ, dest)
            module.exit_json(changed=True, written=msg)
        except Exception as exc:
            module.fail_json(msg=exc)
Exemple #6
0
    def v2_playbook_on_play_start(self, play):
        # This gets called once for each play.. but we just issue a message once
        # for the first one.  One per "playbook"
        if self.playbook:
            # figure out where the playbook FILE is
            path = os.path.abspath(self.playbook._file_name)

            # Bail out early without publishing if we're in --check mode
            if self.play_context.check_mode:
                return

            if not self.playbook_path:
                try:
                    msg = Message(
                        topic="ansible.playbook.start",
                        body={
                            'playbook': path,
                            'userid': getlogin(),
                            'extra_vars': play._variable_manager.extra_vars,
                            'inventory': play._variable_manager._inventory._sources,
                            'playbook_checksum': secure_hash(path),
                            'check': self.play_context.check_mode
                        }
                    )
                    publish(msg)
                except PublishReturned as e:
                    LOGGER.warning(
                        "Fedora Messaging broker rejected message %s: %s", msg.id, e
                    )
                    print(
                        "Fedora Messaging broker rejected message %s: %s", msg.id, e
                    )
                except ConnectionException as e:
                    LOGGER.warning("Error sending message %s: %s", msg.id, e)
                    print("Error sending message %s: %s", msg.id, e)
                    print(e)
                self.playbook_path = path
Exemple #7
0
    def v2_playbook_on_play_start(self, play):
        self._task_count = 0

        if play:
            # figure out where the playbook FILE is
            path = os.path.abspath(self.playbook._file_name)

            # tel the logger what the playbook is
            logmech.playbook_id = path

            # if play count == 0
            # write out playbook info now
            if not self._play_count:
                pb_info = {}
                pb_info['playbook_start'] = time.time()
                pb_info['playbook'] = path
                pb_info['userid'] = getlogin()
                pb_info['extra_vars'] = play._variable_manager.extra_vars
                pb_info[
                    'inventory'] = play._variable_manager._inventory._sources
                pb_info['playbook_checksum'] = secure_hash(path)
                pb_info['check'] = self.play_context.check_mode
                pb_info['diff'] = self.play_context.diff
                logmech.play_log(json.dumps(pb_info, indent=4))

            self._play_count += 1
            # then write per-play info that doesn't duplcate the playbook info
            info = {}
            info['play'] = play.name
            info['hosts'] = play.hosts
            info['transport'] = self.play_context.connection
            info['number'] = self._play_count
            info['check'] = self.play_context.check_mode
            info['diff'] = self.play_context.diff
            logmech.play_info = info
            logmech.play_log(json.dumps(info, indent=4))
    def v2_playbook_on_play_start(self, play):
        # This gets called once for each play.. but we just issue a message once
        # for the first one.  One per "playbook"
        if self.playbook:
            # figure out where the playbook FILE is
            path = os.path.abspath(self.playbook._file_name)

            # Bail out early without publishing if we're in --check mode
            if self.play_context.check_mode:
                return

            if not self.playbook_path:
                fedmsg.publish(
                    modname="ansible", topic="playbook.start",
                    msg=dict(
                        playbook=path,
                        userid=getlogin(),
                        extra_vars=play._variable_manager.extra_vars,
                        inventory=play._variable_manager._inventory._sources,
                        playbook_checksum=secure_hash(path),
                        check=self.play_context.check_mode,
                    ),
                )
                self.playbook_path = path
Exemple #9
0
    def put_file(self, in_path, out_path):
        super(Connection, self).put_file(in_path, out_path)
        display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._psrp_host)

        out_path = self._shell._unquote(out_path)
        script = u'''begin {
    $ErrorActionPreference = "Stop"

    $path = '%s'
    $fd = [System.IO.File]::Create($path)
    $algo = [System.Security.Cryptography.SHA1CryptoServiceProvider]::Create()
    $bytes = @()
} process {
    $bytes = [System.Convert]::FromBase64String($input)
    $algo.TransformBlock($bytes, 0, $bytes.Length, $bytes, 0) > $null
    $fd.Write($bytes, 0, $bytes.Length)
} end {
    $fd.Close()
    $algo.TransformFinalBlock($bytes, 0, 0) > $null
    $hash = [System.BitConverter]::ToString($algo.Hash)
    $hash = $hash.Replace("-", "").ToLowerInvariant()

    Write-Output -InputObject "{`"sha1`":`"$hash`"}"
}''' % self._shell._escape(out_path)

        cmd_parts = self._shell._encode_script(script, as_list=True,
                                               strict_mode=False,
                                               preserve_rc=False)
        b_in_path = to_bytes(in_path, errors='surrogate_or_strict')
        if not os.path.exists(b_in_path):
            raise AnsibleFileNotFound('file or module does not exist: "%s"'
                                      % to_native(in_path))

        in_size = os.path.getsize(b_in_path)
        buffer_size = int(self.runspace.connection.max_payload_size / 4 * 3)

        # copying files is faster when using the raw WinRM shell and not PSRP
        # we will create a WinRS shell just for this process
        # TODO: speed this up as there is overhead creating a shell for this
        with WinRS(self.runspace.connection, codepage=65001) as shell:
            process = Process(shell, cmd_parts[0], cmd_parts[1:])
            process.begin_invoke()

            offset = 0
            with open(b_in_path, 'rb') as src_file:
                for data in iter((lambda: src_file.read(buffer_size)), b""):
                    offset += len(data)
                    display.vvvvv("PSRP PUT %s to %s (offset=%d, size=%d" %
                                  (in_path, out_path, offset, len(data)),
                                  host=self._psrp_host)
                    b64_data = base64.b64encode(data) + b"\r\n"
                    process.send(b64_data, end=(src_file.tell() == in_size))

                # the file was empty, return empty buffer
                if offset == 0:
                    process.send(b"", end=True)

            process.end_invoke()
            process.signal(SignalCode.CTRL_C)

        if process.rc != 0:
            raise AnsibleError(to_native(process.stderr))

        put_output = json.loads(process.stdout)
        remote_sha1 = put_output.get("sha1")

        if not remote_sha1:
            raise AnsibleError("Remote sha1 was not returned, stdout: '%s', "
                               "stderr: '%s'" % (to_native(process.stdout),
                                                 to_native(process.stderr)))

        local_sha1 = secure_hash(in_path)
        if not remote_sha1 == local_sha1:
            raise AnsibleError("Remote sha1 hash %s does not match local hash "
                               "%s" % (to_native(remote_sha1),
                                       to_native(local_sha1)))
Exemple #10
0
    def run(self, tmp=None, task_vars=dict()):
        ''' handler for fetch operations '''

        # FIXME: is this even required anymore?
        #if self.runner.noop_on_check(inject):
        #    return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not (yet) supported for this module'))

        source = self._task.args.get('src', None)
        dest = self._task.args.get('dest', None)
        flat = boolean(self._task.args.get('flat'))
        fail_on_missing = boolean(self._task.args.get('fail_on_missing'))
        validate_checksum = boolean(
            self._task.args.get('validate_checksum',
                                self._task.args.get('validate_md5')))

        if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
            return dict(
                failed=True,
                msg=
                "validate_checksum and validate_md5 cannot both be specified")

        if source is None or dest is None:
            return dict(failed=True, msg="src and dest are required")

        source = self._shell.join_path(source)
        source = self._remote_expand_user(source, tmp)

        # calculate checksum for the remote file
        remote_checksum = self._remote_checksum(tmp, source)

        # use slurp if sudo and permissions are lacking
        remote_data = None
        if remote_checksum in ('1', '2') or self._connection_info.become:
            slurpres = self._execute_module(module_name='slurp',
                                            module_args=dict(src=source),
                                            tmp=tmp)
            if slurpres.get('rc') == 0:
                if slurpres['encoding'] == 'base64':
                    remote_data = base64.b64decode(slurpres['content'])
                if remote_data is not None:
                    remote_checksum = checksum_s(remote_data)
                # the source path may have been expanded on the
                # target system, so we compare it here and use the
                # expanded version if it's different
                remote_source = slurpres.get('source')
                if remote_source and remote_source != source:
                    source = remote_source
            else:
                # FIXME: should raise an error here? the old code did nothing
                pass

        # calculate the destination name
        if os.path.sep not in self._shell.join_path('a', ''):
            source_local = source.replace('\\', '/')
        else:
            source_local = source

        dest = os.path.expanduser(dest)
        if flat:
            if dest.endswith(os.sep):
                # if the path ends with "/", we'll use the source filename as the
                # destination filename
                base = os.path.basename(source_local)
                dest = os.path.join(dest, base)
            if not dest.startswith("/"):
                # if dest does not start with "/", we'll assume a relative path
                dest = self._loader.path_dwim(dest)
        else:
            # files are saved in dest dir, with a subdir for each host, then the filename
            dest = "%s/%s/%s" % (self._loader.path_dwim(dest),
                                 self._connection_info.remote_addr,
                                 source_local)

        dest = dest.replace("//", "/")

        if remote_checksum in ('0', '1', '2', '3', '4'):
            # these don't fail because you may want to transfer a log file that possibly MAY exist
            # but keep going to fetch other log files
            if remote_checksum == '0':
                result = dict(
                    msg="unable to calculate the checksum of the remote file",
                    file=source,
                    changed=False)
            elif remote_checksum == '1':
                if fail_on_missing:
                    result = dict(failed=True,
                                  msg="the remote file does not exist",
                                  file=source)
                else:
                    result = dict(
                        msg=
                        "the remote file does not exist, not transferring, ignored",
                        file=source,
                        changed=False)
            elif remote_checksum == '2':
                result = dict(
                    msg=
                    "no read permission on remote file, not transferring, ignored",
                    file=source,
                    changed=False)
            elif remote_checksum == '3':
                result = dict(
                    msg=
                    "remote file is a directory, fetch cannot work on directories",
                    file=source,
                    changed=False)
            elif remote_checksum == '4':
                result = dict(
                    msg=
                    "python isn't present on the system.  Unable to compute checksum",
                    file=source,
                    changed=False)
            return result

        # calculate checksum for the local file
        local_checksum = checksum(dest)

        if remote_checksum != local_checksum:
            # create the containing directories, if needed
            if not os.path.isdir(os.path.dirname(dest)):
                os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                self._connection.fetch_file(source, dest)
            else:
                f = open(dest, 'w')
                f.write(remote_data)
                f.close()
            new_checksum = secure_hash(dest)
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                new_md5 = md5(dest)
            except ValueError:
                new_md5 = None

            if validate_checksum and new_checksum != remote_checksum:
                return dict(failed=True,
                            md5sum=new_md5,
                            msg="checksum mismatch",
                            file=source,
                            dest=dest,
                            remote_md5sum=None,
                            checksum=new_checksum,
                            remote_checksum=remote_checksum)
            return dict(changed=True,
                        md5sum=new_md5,
                        dest=dest,
                        remote_md5sum=None,
                        checksum=new_checksum,
                        remote_checksum=remote_checksum)
        else:
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                local_md5 = md5(dest)
            except ValueError:
                local_md5 = None

            return dict(changed=False,
                        md5sum=local_md5,
                        file=source,
                        dest=dest,
                        checksum=local_checksum)
Exemple #11
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for fetch operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)
        del tmp  # tmp no longer has any effect

        try:
            result = dict(msg="",
                          stderr="",
                          stdout="",
                          file="",
                          md5sum="",
                          dest="",
                          remote_md5sum="",
                          remote_checksum="",
                          checksum="",
                          delta="",
                          failed=False)
            savf_name = ''
            created = False
            is_savf = False
            savf = ''
            ifs_created = False
            backup = False
            is_lib = False
            force_save = False
            flat = False
            if self._play_context.check_mode:
                result['skipped'] = True
                result[
                    'msg'] = 'check mode not (yet) supported for this module'
                return result

            object_names = self._task.args.get('object_names', '*ALL')
            lib_name = self._task.args.get('lib_name', None)
            object_types = self._task.args.get('object_types', '*ALL')
            is_lib = boolean(self._task.args.get('is_lib', False), strict=True)
            savefile_name = self._task.args.get('savefile_name', None)
            force_save = boolean(self._task.args.get('force_save', False),
                                 strict=True)
            backup = boolean(self._task.args.get('backup', False), strict=True)
            format = self._task.args.get('format', '*SAVF')
            target_release = self._task.args.get('target_release', '*CURRENT')
            dest = self._task.args.get('dest', None)
            flat = boolean(self._task.args.get('flat', False), strict=True)
            fail_on_missing = boolean(self._task.args.get(
                'fail_on_missing', True),
                                      strict=True)
            validate_checksum = boolean(self._task.args.get(
                'validate_checksum', True),
                                        strict=True)

            # validate dest are strings FIXME: use basic.py and module specs
            if not isinstance(dest, string_types):
                result[
                    'msg'] = "Invalid type supplied for dest option, it must be a string. "

            if lib_name is None or dest is None:
                result['msg'] = "lib_name and dest are required. "

            object_names = object_names.upper()
            object_types = object_types.upper()
            format = format.upper()
            target_release = target_release.upper()

            if lib_name is not None:
                lib_name = lib_name.upper()

            if savefile_name is not None:
                savefile_name = savefile_name.upper()

            if lib_name == 'QSYS' and (is_lib is True or
                                       (object_names == '*ALL'
                                        and object_types == '*ALL')):
                result['msg'] = "QSYS can't be saved."

            if format != "*SAVF":
                result['msg'] = "format can only be *SAVF."

            if result.get('msg'):
                result['failed'] = True
                return result

            startd = datetime.datetime.now()

            if len(object_names.split()) == 1 and is_lib is not True:
                if object_types == '*ALL' or object_types == '*FILE':
                    if (object_names.split())[0][-1] == '*':
                        module_args = {
                            'object_name': object_names[0:-1] + '+',
                            'lib_name': lib_name,
                            'use_regex': True
                        }
                        module_output = self._execute_module(
                            module_name='ibmi_object_find',
                            module_args=module_args)
                        save_result = module_output
                        if len(save_result['object_list']) == 1 and save_result['object_list'][0]['OBJTYPE'] == '*FILE' and \
                           save_result['object_list'][0]['OBJATTRIBUTE'] == 'SAVF':
                            result[
                                'msg'] += "Object is a save file, fetch it directly."
                            savf_path = self._calculate_savf_path(
                                save_result['object_list'][0]['OBJNAME'],
                                lib_name)
                            savf_name = save_result['object_list'][0][
                                'OBJNAME']
                            is_savf = True
                    else:
                        module_args = {
                            'object_name': object_names,
                            'lib_name': lib_name
                        }
                        module_output = self._execute_module(
                            module_name='ibmi_object_find',
                            module_args=module_args)
                        save_result = module_output
                        if len(save_result['object_list']) == 1 and save_result['object_list'][0]['OBJTYPE'] == '*FILE' and \
                           save_result['object_list'][0]['OBJATTRIBUTE'] == 'SAVF':
                            result[
                                'msg'] += "Object is a save file, fetch it directly."
                            savf_path = self._calculate_savf_path(
                                object_names, lib_name)
                            savf_name = object_names
                            is_savf = True
            if is_savf is False:
                savf_name, savf_path = self._calculate_savf_name(
                    object_names, lib_name, is_lib, savefile_name, task_vars,
                    result)
                if is_lib is True:
                    omitfile = 'OMITOBJ((%s/%s *FILE))' % (lib_name, savf_name)
                    module_args = {
                        'lib_name': lib_name,
                        'savefile_name': savf_name,
                        'savefile_lib': lib_name,
                        'target_release': target_release,
                        'force_save': force_save,
                        'joblog': True,
                        'parameters': omitfile
                    }
                    module_output = self._execute_module(
                        module_name='ibmi_lib_save', module_args=module_args)
                else:
                    omitfile = 'OMITOBJ((%s/%s *FILE))' % (lib_name, savf_name)
                    module_args = {
                        'object_names': object_names,
                        'object_lib': lib_name,
                        'object_types': object_types,
                        'savefile_name': savf_name,
                        'savefile_lib': lib_name,
                        'target_release': target_release,
                        'force_save': force_save,
                        'joblog': False,
                        'parameters': omitfile
                    }
                    module_output = self._execute_module(
                        module_name='ibmi_object_save',
                        module_args=module_args)

                save_result = module_output
                rc = save_result['rc']
                if rc != 0 or ('CPC3708' in save_result['stdout']):
                    result[
                        'msg'] = 'Create SAVF failed. See stderr or stdout for more information.'
                    result['failed'] = True
                    result['stderr'] = save_result['stderr_lines']
                    result['stdout'] = save_result['stdout_lines']
                    return result
                created = True

            display.debug("savf_name = %s, savf_path = %s, force_save=%s" %
                          (savf_name, savf_path, force_save))
            source = savf_path
            commandmk = 'mkdir %s' % ifs_dir
            command = 'cp %s %s' % (savf_path, ifs_dir)

            try:
                module_output = self._execute_module(
                    module_name='command',
                    module_args={'_raw_params': commandmk})
                save_result = module_output
                rc = save_result['rc']
                display.debug("save_result['stderr_lines'] = %s" %
                              (save_result['stderr_lines']))
                if rc != 0 and ('exists' not in save_result['stderr']):
                    result['msg'] = save_result['msg']
                    result['failed'] = True
                    result['stderr'] = save_result['stderr_lines']
                    return result
                module_output = self._execute_module(
                    module_name='command',
                    module_args={'_raw_params': command})
                save_result = module_output
                rc = save_result['rc']
                if rc != 0:
                    result['msg'] = save_result['msg']
                    result['failed'] = True
                    result['stderr'] = save_result['stderr_lines']
                    result['stdout'] = save_result['stdout_lines']
                    return result
                ifs_created = True
            except Exception as e:
                result['msg'] = to_text(e)
                result['failed'] = True
                return result

            source = '%s/%s' % (ifs_dir, os.path.basename(savf_path))

            if not isinstance(source, string_types):
                result[
                    'msg'] = "Invalid type supplied for source option, it must be a string"
                result['failed'] = True
                return result
            source = self._connection._shell.join_path(source)
            source = self._remote_expand_user(source)
            remote_checksum = None
            if not self._connection.become:
                # calculate checksum for the remote file, don't bother if using become as slurp will be used
                # Force remote_checksum to follow symlinks because fetch always follows symlinks
                remote_checksum = self._remote_checksum(source,
                                                        all_vars=task_vars,
                                                        follow=True)
            # use slurp if permissions are lacking or privilege escalation is needed
            remote_data = None
            if remote_checksum in ('1', '2', None):
                slurpres = self._execute_module(module_name='slurp',
                                                module_args=dict(src=source),
                                                task_vars=task_vars)
                if slurpres.get('failed'):
                    if not fail_on_missing and (
                            slurpres.get('msg').startswith('file not found')
                            or remote_checksum == '1'):
                        result[
                            'msg'] = "the remote file does not exist, not transferring, ignored"
                        result['file'] = source
                        result['changed'] = False
                    else:
                        result.update(slurpres)
                    return result
                else:
                    if slurpres['encoding'] == 'base64':
                        remote_data = base64.b64decode(slurpres['content'])
                    if remote_data is not None:
                        remote_checksum = checksum_s(remote_data)
                    # the source path may have been expanded on the
                    # target system, so we compare it here and use the
                    # expanded version if it's different
                    remote_source = slurpres.get('source')
                    if remote_source and remote_source != source:
                        source = remote_source

            # calculate the destination name
            if os.path.sep not in self._connection._shell.join_path('a', ''):
                source = self._connection._shell._unquote(source)
                qsys_source = self._connection._shell._unquote(savf_path)
                source_local = qsys_source.replace('\\', '/')
            else:
                source_local = savf_path

            dest = os.path.expanduser(dest)
            if flat:
                if not dest.startswith("/"):
                    # if dest does not start with "/", we'll assume a relative path
                    dest = self._loader.path_dwim(dest)
                base = os.path.basename(source_local)
                dest = os.path.join(dest, base)
            else:
                # files are saved in dest dir, with a subdir for each host, then the filename
                if 'inventory_hostname' in task_vars:
                    target_name = task_vars['inventory_hostname']
                else:
                    target_name = self._play_context.remote_addr
                dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name,
                                     source_local)

            dest = dest.replace("//", "/")

            if remote_checksum in ('0', '1', '2', '3', '4', '5'):
                result['changed'] = False
                result['file'] = source
                if remote_checksum == '0':
                    result[
                        'msg'] = "unable to calculate the checksum of the remote file"
                elif remote_checksum == '1':
                    result['msg'] = "the remote file does not exist"
                elif remote_checksum == '2':
                    result['msg'] = "no read permission on remote file"
                elif remote_checksum == '3':
                    result[
                        'msg'] = "remote file is a directory, fetch cannot work on directories"
                elif remote_checksum == '4':
                    result[
                        'msg'] = "python isn't present on the system.  Unable to compute checksum"
                elif remote_checksum == '5':
                    result[
                        'msg'] = "stdlib json was not found on the remote machine. Only the raw module can work without those installed"
                # Historically, these don't fail because you may want to transfer
                # a log file that possibly MAY exist but keep going to fetch other
                # log files. Today, this is better achieved by adding
                # ignore_errors or failed_when to the task.  Control the behaviour
                # via fail_when_missing
                if fail_on_missing:
                    result['failed'] = True
                    del result['changed']
                else:
                    result['msg'] += ", not transferring, ignored"
                return result

            # calculate checksum for the local file
            local_checksum = checksum(dest)
            if remote_checksum != local_checksum:
                # create the containing directories, if needed
                makedirs_safe(os.path.dirname(dest))

                # fetch the file and check for changes
                if remote_data is None:
                    self._connection.fetch_file(source, dest)
                else:
                    try:
                        f = open(to_bytes(dest, errors='surrogate_or_strict'),
                                 'wb')
                        f.write(remote_data)
                        f.close()
                    except (IOError, OSError) as e:
                        raise AnsibleError("Failed to fetch the file: %s" % e)
                new_checksum = secure_hash(dest)
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    new_md5 = md5(dest)
                except ValueError:
                    new_md5 = None

                if validate_checksum and new_checksum != remote_checksum:
                    result.update(
                        dict(failed=True,
                             md5sum=new_md5,
                             msg="checksum mismatch",
                             file=savf,
                             dest=dest,
                             remote_md5sum=None,
                             checksum=new_checksum,
                             remote_checksum=remote_checksum))
                else:
                    endd = datetime.datetime.now()
                    delta = endd - startd
                    if (created is True and backup is True) or is_savf is True:
                        savf = savf_path
                    result['msg'] += " File is renewed on local."
                    result.update({
                        'changed': True,
                        'md5sum': new_md5,
                        'dest': dest,
                        'remote_md5sum': None,
                        'checksum': new_checksum,
                        'remote_checksum': remote_checksum,
                        'delta': str(delta),
                        'file': savf
                    })
            else:
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    local_md5 = md5(dest)
                except ValueError:
                    local_md5 = None
                endd = datetime.datetime.now()
                delta = endd - startd
                if (created is True and backup is True) or is_savf is True:
                    savf = savf_path
                result.update(
                    dict(changed=False,
                         md5sum=local_md5,
                         file=savf,
                         delta=str(delta),
                         dest=dest,
                         checksum=local_checksum))

        except Exception as e:
            result['msg'] += "%s" % to_text(e)
            result['failed'] = True
            return result
        finally:
            if ((backup is False and is_savf is False)
                    or result['failed'] is True) and created is True:
                cmd = 'DLTOBJ OBJ(%s/%s) OBJTYPE(*FILE)' % (lib_name,
                                                            savf_name)
                module_output = self._execute_module(
                    module_name='ibmi_cl_command', module_args={'cmd': cmd})
                save_result = module_output
                rc = save_result['rc']
                if rc != 0 and ('CPF2105' not in save_result['stderr']):
                    result['msg'] += "Failed to delete SAVF on remote"
            if ifs_created is True:
                cmd = 'rm %s/%s' % (ifs_dir, os.path.basename(savf_path))
                try:
                    module_output = self._execute_module(
                        module_name='command',
                        module_args={'_raw_params': cmd})
                    save_result = module_output
                    rc = save_result['rc']
                    if rc != 0:
                        result['msg'] += "Failed to delete IFS on remote"
                except Exception as e:
                    result[
                        'msg'] += "exception happens when delete IFS file. error: %s" % to_text(
                            e)

            self._remove_tmp_path(self._connection._shell.tmpdir)

        return result
Exemple #12
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for fetch operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)
        del tmp  # tmp no longer has any effect

        try:
            if self._play_context.check_mode:
                raise AnsibleActionSkip(
                    'check mode not (yet) supported for this module')

            source = self._task.args.get('src', None)
            original_dest = dest = self._task.args.get('dest', None)
            flat = boolean(self._task.args.get('flat'), strict=False)
            fail_on_missing = boolean(self._task.args.get(
                'fail_on_missing', True),
                                      strict=False)
            validate_checksum = boolean(self._task.args.get(
                'validate_checksum', True),
                                        strict=False)

            msg = ''
            # validate source and dest are strings FIXME: use basic.py and module specs
            if not isinstance(source, string_types):
                msg = "Invalid type supplied for source option, it must be a string"

            if not isinstance(dest, string_types):
                msg = "Invalid type supplied for dest option, it must be a string"

            if source is None or dest is None:
                msg = "src and dest are required"

            if msg:
                raise AnsibleActionFail(msg)

            source = self._connection._shell.join_path(source)
            source = self._remote_expand_user(source)

            remote_stat = {}
            remote_checksum = None
            if True:
                # Get checksum for the remote file even using become. Mitogen doesn't need slurp.
                # Follow symlinks because fetch always follows symlinks
                try:
                    remote_stat = self._execute_remote_stat(source,
                                                            all_vars=task_vars,
                                                            follow=True)
                except AnsibleError as ae:
                    result['changed'] = False
                    result['file'] = source
                    if fail_on_missing:
                        result['failed'] = True
                        result['msg'] = to_text(ae)
                    else:
                        result['msg'] = "%s, ignored" % to_text(
                            ae, errors='surrogate_or_replace')

                    return result

                remote_checksum = remote_stat.get('checksum')
                if remote_stat.get('exists'):
                    if remote_stat.get('isdir'):
                        result['failed'] = True
                        result['changed'] = False
                        result[
                            'msg'] = "remote file is a directory, fetch cannot work on directories"

                        # Historically, these don't fail because you may want to transfer
                        # a log file that possibly MAY exist but keep going to fetch other
                        # log files. Today, this is better achieved by adding
                        # ignore_errors or failed_when to the task.  Control the behaviour
                        # via fail_when_missing
                        if not fail_on_missing:
                            result['msg'] += ", not transferring, ignored"
                            del result['changed']
                            del result['failed']

                        return result

            # use slurp if permissions are lacking or privilege escalation is needed
            remote_data = None
            if remote_checksum in (None, '1', ''):
                slurpres = self._execute_module(
                    module_name='ansible.legacy.slurp',
                    module_args=dict(src=source),
                    task_vars=task_vars)
                if slurpres.get('failed'):
                    if not fail_on_missing:
                        result['file'] = source
                        result['changed'] = False
                    else:
                        result.update(slurpres)

                    if 'not found' in slurpres.get('msg', ''):
                        result[
                            'msg'] = "the remote file does not exist, not transferring, ignored"
                    elif slurpres.get('msg',
                                      '').startswith('source is a directory'):
                        result[
                            'msg'] = "remote file is a directory, fetch cannot work on directories"

                    return result
                else:
                    if slurpres['encoding'] == 'base64':
                        remote_data = base64.b64decode(slurpres['content'])
                    if remote_data is not None:
                        remote_checksum = checksum_s(remote_data)

            # calculate the destination name
            if os.path.sep not in self._connection._shell.join_path('a', ''):
                source = self._connection._shell._unquote(source)
                source_local = source.replace('\\', '/')
            else:
                source_local = source

            # ensure we only use file name, avoid relative paths
            if not is_subpath(dest, original_dest):
                # TODO: ? dest = os.path.expanduser(dest.replace(('../','')))
                raise AnsibleActionFail(
                    "Detected directory traversal, expected to be contained in '%s' but got '%s'"
                    % (original_dest, dest))

            if flat:
                if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')
                                 ) and not dest.endswith(os.sep):
                    raise AnsibleActionFail(
                        "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
                    )
                if dest.endswith(os.sep):
                    # if the path ends with "/", we'll use the source filename as the
                    # destination filename
                    base = os.path.basename(source_local)
                    dest = os.path.join(dest, base)
                if not dest.startswith("/"):
                    # if dest does not start with "/", we'll assume a relative path
                    dest = self._loader.path_dwim(dest)
            else:
                # files are saved in dest dir, with a subdir for each host, then the filename
                if 'inventory_hostname' in task_vars:
                    target_name = task_vars['inventory_hostname']
                else:
                    target_name = self._play_context.remote_addr
                dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name,
                                     source_local)

            dest = os.path.normpath(dest)

            # calculate checksum for the local file
            local_checksum = checksum(dest)

            if remote_checksum != local_checksum:
                # create the containing directories, if needed
                makedirs_safe(os.path.dirname(dest))

                # fetch the file and check for changes
                if remote_data is None:
                    self._connection.fetch_file(source, dest)
                else:
                    try:
                        f = open(to_bytes(dest, errors='surrogate_or_strict'),
                                 'wb')
                        f.write(remote_data)
                        f.close()
                    except (IOError, OSError) as e:
                        raise AnsibleActionFail(
                            "Failed to fetch the file: %s" % e)
                new_checksum = secure_hash(dest)
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    new_md5 = md5(dest)
                except ValueError:
                    new_md5 = None

                if validate_checksum and new_checksum != remote_checksum:
                    result.update(
                        dict(failed=True,
                             md5sum=new_md5,
                             msg="checksum mismatch",
                             file=source,
                             dest=dest,
                             remote_md5sum=None,
                             checksum=new_checksum,
                             remote_checksum=remote_checksum))
                else:
                    result.update({
                        'changed': True,
                        'md5sum': new_md5,
                        'dest': dest,
                        'remote_md5sum': None,
                        'checksum': new_checksum,
                        'remote_checksum': remote_checksum
                    })
            else:
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    local_md5 = md5(dest)
                except ValueError:
                    local_md5 = None
                result.update(
                    dict(changed=False,
                         md5sum=local_md5,
                         file=source,
                         dest=dest,
                         checksum=local_checksum))

        finally:
            self._remove_tmp_path(self._connection._shell.tmpdir)

        return result
Exemple #13
0
    def run(self, tmp=None, task_vars=dict()):
        """ handler for fetch operations """

        if self._connection_info.check_mode:
            return dict(skipped=True, msg="check mode not (yet) supported for this module")

        source = self._task.args.get("src", None)
        dest = self._task.args.get("dest", None)
        flat = boolean(self._task.args.get("flat"))
        fail_on_missing = boolean(self._task.args.get("fail_on_missing"))
        validate_checksum = boolean(self._task.args.get("validate_checksum", self._task.args.get("validate_md5")))

        if "validate_md5" in self._task.args and "validate_checksum" in self._task.args:
            return dict(failed=True, msg="validate_checksum and validate_md5 cannot both be specified")

        if source is None or dest is None:
            return dict(failed=True, msg="src and dest are required")

        source = self._connection._shell.join_path(source)
        source = self._remote_expand_user(source, tmp)

        # calculate checksum for the remote file
        remote_checksum = self._remote_checksum(tmp, source)

        # use slurp if sudo and permissions are lacking
        remote_data = None
        if remote_checksum in ("1", "2") or self._connection_info.become:
            slurpres = self._execute_module(
                module_name="slurp", module_args=dict(src=source), task_vars=task_vars, tmp=tmp
            )
            if slurpres.get("rc") == 0:
                if slurpres["encoding"] == "base64":
                    remote_data = base64.b64decode(slurpres["content"])
                if remote_data is not None:
                    remote_checksum = checksum_s(remote_data)
                # the source path may have been expanded on the
                # target system, so we compare it here and use the
                # expanded version if it's different
                remote_source = slurpres.get("source")
                if remote_source and remote_source != source:
                    source = remote_source
            else:
                # FIXME: should raise an error here? the old code did nothing
                pass

        # calculate the destination name
        if os.path.sep not in self._connection._shell.join_path("a", ""):
            source_local = source.replace("\\", "/")
        else:
            source_local = source

        dest = os.path.expanduser(dest)
        if flat:
            if dest.endswith(os.sep):
                # if the path ends with "/", we'll use the source filename as the
                # destination filename
                base = os.path.basename(source_local)
                dest = os.path.join(dest, base)
            if not dest.startswith("/"):
                # if dest does not start with "/", we'll assume a relative path
                dest = self._loader.path_dwim(dest)
        else:
            # files are saved in dest dir, with a subdir for each host, then the filename
            if "inventory_hostname" in task_vars:
                target_name = task_vars["inventory_hostname"]
            else:
                target_name = self._connection_info.remote_addr
            dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)

        dest = dest.replace("//", "/")

        if remote_checksum in ("0", "1", "2", "3", "4"):
            # these don't fail because you may want to transfer a log file that possibly MAY exist
            # but keep going to fetch other log files
            if remote_checksum == "0":
                result = dict(msg="unable to calculate the checksum of the remote file", file=source, changed=False)
            elif remote_checksum == "1":
                if fail_on_missing:
                    result = dict(failed=True, msg="the remote file does not exist", file=source)
                else:
                    result = dict(
                        msg="the remote file does not exist, not transferring, ignored", file=source, changed=False
                    )
            elif remote_checksum == "2":
                result = dict(
                    msg="no read permission on remote file, not transferring, ignored", file=source, changed=False
                )
            elif remote_checksum == "3":
                result = dict(
                    msg="remote file is a directory, fetch cannot work on directories", file=source, changed=False
                )
            elif remote_checksum == "4":
                result = dict(
                    msg="python isn't present on the system.  Unable to compute checksum", file=source, changed=False
                )
            return result

        # calculate checksum for the local file
        local_checksum = checksum(dest)

        if remote_checksum != local_checksum:
            # create the containing directories, if needed
            makedirs_safe(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                self._connection.fetch_file(source, dest)
            else:
                f = open(dest, "w")
                f.write(remote_data)
                f.close()
            new_checksum = secure_hash(dest)
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                new_md5 = md5(dest)
            except ValueError:
                new_md5 = None

            if validate_checksum and new_checksum != remote_checksum:
                return dict(
                    failed=True,
                    md5sum=new_md5,
                    msg="checksum mismatch",
                    file=source,
                    dest=dest,
                    remote_md5sum=None,
                    checksum=new_checksum,
                    remote_checksum=remote_checksum,
                )
            return dict(
                changed=True,
                md5sum=new_md5,
                dest=dest,
                remote_md5sum=None,
                checksum=new_checksum,
                remote_checksum=remote_checksum,
            )
        else:
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                local_md5 = md5(dest)
            except ValueError:
                local_md5 = None

            return dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)
Exemple #14
0
    def run(self, tmp=None, task_vars=dict()):
        ''' handler for fetch operations '''

        # FIXME: is this even required anymore?
        #if self.runner.noop_on_check(inject):
        #    return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not (yet) supported for this module'))

        source            = self._task.args.get('src', None)
        dest              = self._task.args.get('dest', None)
        flat              = boolean(self._task.args.get('flat'))
        fail_on_missing   = boolean(self._task.args.get('fail_on_missing'))
        validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5')))

        if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
            return dict(failed=True, msg="validate_checksum and validate_md5 cannot both be specified")

        if source is None or dest is None:
            return dict(failed=True, msg="src and dest are required")

        source = self._shell.join_path(source)
        source = self._remote_expand_user(source, tmp)

        # calculate checksum for the remote file
        remote_checksum = self._remote_checksum(tmp, source)

        # use slurp if sudo and permissions are lacking
        remote_data = None
        if remote_checksum in ('1', '2') or self._connection_info.sudo:
            slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), tmp=tmp)
            if slurpres.get('rc') == 0:
                if slurpres['encoding'] == 'base64':
                    remote_data = base64.b64decode(slurpres['content'])
                if remote_data is not None:
                    remote_checksum = checksum_s(remote_data)
                # the source path may have been expanded on the
                # target system, so we compare it here and use the
                # expanded version if it's different
                remote_source = slurpres.get('source')
                if remote_source and remote_source != source:
                    source = remote_source
            else:
                # FIXME: should raise an error here? the old code did nothing
                pass

        # calculate the destination name
        if os.path.sep not in self._shell.join_path('a', ''):
            source_local = source.replace('\\', '/')
        else:
            source_local = source

        dest = os.path.expanduser(dest)
        if flat:
            if dest.endswith("/"):
                # if the path ends with "/", we'll use the source filename as the
                # destination filename
                base = os.path.basename(source_local)
                dest = os.path.join(dest, base)
            if not dest.startswith("/"):
                # if dest does not start with "/", we'll assume a relative path
                dest = self._loader.path_dwim(dest)
        else:
            # files are saved in dest dir, with a subdir for each host, then the filename
            dest = "%s/%s/%s" % (self._loader.path_dwim(dest), self._connection_info.remote_addr, source_local)

        dest = dest.replace("//","/")

        if remote_checksum in ('0', '1', '2', '3', '4'):
            # these don't fail because you may want to transfer a log file that possibly MAY exist
            # but keep going to fetch other log files
            if remote_checksum == '0':
                result = dict(msg="unable to calculate the checksum of the remote file", file=source, changed=False)
            elif remote_checksum == '1':
                if fail_on_missing:
                    result = dict(failed=True, msg="the remote file does not exist", file=source)
                else:
                    result = dict(msg="the remote file does not exist, not transferring, ignored", file=source, changed=False)
            elif remote_checksum == '2':
                result = dict(msg="no read permission on remote file, not transferring, ignored", file=source, changed=False)
            elif remote_checksum == '3':
                result = dict(msg="remote file is a directory, fetch cannot work on directories", file=source, changed=False)
            elif remote_checksum == '4':
                result = dict(msg="python isn't present on the system.  Unable to compute checksum", file=source, changed=False)
            return result

        # calculate checksum for the local file
        local_checksum = checksum(dest)

        if remote_checksum != local_checksum:
            # create the containing directories, if needed
            if not os.path.isdir(os.path.dirname(dest)):
                os.makedirs(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                self._connection.fetch_file(source, dest)
            else:
                f = open(dest, 'w')
                f.write(remote_data)
                f.close()
            new_checksum = secure_hash(dest)
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                new_md5 = md5(dest)
            except ValueError:
                new_md5 = None

            if validate_checksum and new_checksum != remote_checksum:
                return dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)
            return dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)
        else:
            # For backwards compatibility.  We'll return None on FIPS enabled
            # systems
            try:
                local_md5 = md5(dest)
            except ValueError:
                local_md5 = None

            return dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)
Exemple #15
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for fetch operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        if self._play_context.check_mode:
            result['skipped'] = True
            result['msg'] = 'check mode not (yet) supported for this module'
            return result

        source            = self._task.args.get('src', None)
        dest              = self._task.args.get('dest', None)
        flat              = boolean(self._task.args.get('flat'))
        fail_on_missing   = boolean(self._task.args.get('fail_on_missing'))
        validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5')))

        if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
            result['failed'] = True
            result['msg'] = "validate_checksum and validate_md5 cannot both be specified"
            return result

        if source is None or dest is None:
            result['failed'] = True
            result['msg'] = "src and dest are required"
            return result

        source = self._connection._shell.join_path(source)
        source = self._remote_expand_user(source)

        remote_checksum = None
        if not self._play_context.become:
            # calculate checksum for the remote file, don't bother if using become as slurp will be used
            remote_checksum = self._remote_checksum(source, all_vars=task_vars)

        # use slurp if permissions are lacking or privilege escalation is needed
        remote_data = None
        if remote_checksum in ('1', '2', None):
            slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp)
            if slurpres.get('failed'):
                if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'):
                    result['msg'] = "the remote file does not exist, not transferring, ignored"
                    result['file'] = source
                    result['changed'] = False
                else:
                    result.update(slurpres)
                return result
            else:
                if slurpres['encoding'] == 'base64':
                    remote_data = base64.b64decode(slurpres['content'])
                if remote_data is not None:
                    remote_checksum = checksum_s(remote_data)
                # the source path may have been expanded on the
                # target system, so we compare it here and use the
                # expanded version if it's different
                remote_source = slurpres.get('source')
                if remote_source and remote_source != source:
                    source = remote_source

        # calculate the destination name
        if os.path.sep not in self._connection._shell.join_path('a', ''):
            source = self._connection._shell._unquote(source)
            source_local = source.replace('\\', '/')
        else:
            source_local = source

        dest = os.path.expanduser(dest)
        if flat:
            if dest.endswith(os.sep):
                # if the path ends with "/", we'll use the source filename as the
                # destination filename
                base = os.path.basename(source_local)
                dest = os.path.join(dest, base)
            if not dest.startswith("/"):
                # if dest does not start with "/", we'll assume a relative path
                dest = self._loader.path_dwim(dest)
        else:
            # files are saved in dest dir, with a subdir for each host, then the filename
            if 'inventory_hostname' in task_vars:
                target_name = task_vars['inventory_hostname']
            else:
                target_name = self._play_context.remote_addr
            dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)

        dest = dest.replace("//","/")

        if remote_checksum in ('0', '1', '2', '3', '4'):
            # these don't fail because you may want to transfer a log file that
            # possibly MAY exist but keep going to fetch other log files
            if remote_checksum == '0':
                result['msg'] = "unable to calculate the checksum of the remote file"
                result['file'] = source
                result['changed'] = False
            elif remote_checksum == '1':
                if fail_on_missing:
                    result['failed'] = True
                    result['msg'] = "the remote file does not exist"
                    result['file'] = source
                else:
                    result['msg'] = "the remote file does not exist, not transferring, ignored"
                    result['file'] = source
                    result['changed'] = False
            elif remote_checksum == '2':
                result['msg'] = "no read permission on remote file, not transferring, ignored"
                result['file'] = source
                result['changed'] = False
            elif remote_checksum == '3':
                result['msg'] = "remote file is a directory, fetch cannot work on directories"
                result['file'] = source
                result['changed'] = False
            elif remote_checksum == '4':
                result['msg'] = "python isn't present on the system.  Unable to compute checksum"
                result['file'] = source
                result['changed'] = False
            return result

        # calculate checksum for the local file
        local_checksum = checksum(dest)

        if remote_checksum != local_checksum:
            # create the containing directories, if needed
            makedirs_safe(os.path.dirname(dest))

            # fetch the file and check for changes
            if remote_data is None:
                self._connection.fetch_file(source, dest)
            else:
                try:
                    f = open(to_bytes(dest, errors='strict'), 'w')
                    f.write(remote_data)
                    f.close()
                except (IOError, OSError) as e:
                    raise AnsibleError("Failed to fetch the file: %s" % e)
            new_checksum = secure_hash(dest)
            # For backwards compatibility. We'll return None on FIPS enabled systems
            try:
                new_md5 = md5(dest)
            except ValueError:
                new_md5 = None

            if validate_checksum and new_checksum != remote_checksum:
                result.update(dict(failed=True, md5sum=new_md5,
                    msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
                    checksum=new_checksum, remote_checksum=remote_checksum))
            else:
                result.update(dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum))
        else:
            # For backwards compatibility. We'll return None on FIPS enabled systems
            try:
                local_md5 = md5(dest)
            except ValueError:
                local_md5 = None
            result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))

        return result
Exemple #16
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for fetch operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)
        try:
            if self._play_context.check_mode:
                result['skipped'] = True
                result[
                    'msg'] = 'check mode not (yet) supported for this module'
                return result

            flat = boolean(self._task.args.get('flat'), strict=False)
            fail_on_missing = boolean(self._task.args.get(
                'fail_on_missing', True),
                                      strict=False)
            validate_checksum = boolean(self._task.args.get(
                'validate_checksum', True),
                                        strict=False)

            # validate source and dest are strings FIXME: use basic.py and module specs
            source = self._task.args.get('src')
            if not isinstance(source, string_types):
                result[
                    'msg'] = "Invalid type supplied for source option, it must be a string"

            dest = self._task.args.get('dest')
            if not isinstance(dest, string_types):
                result[
                    'msg'] = "Invalid type supplied for dest option, it must be a string"

            if result.get('msg'):
                result['failed'] = True
                return result

            source = self._connection._shell.join_path(source)
            source = self._remote_expand_user(source)

            # calculate checksum for the remote file, don't bother if using
            # become as slurp will be used Force remote_checksum to follow
            # symlinks because fetch always follows symlinks
            remote_checksum = self._remote_checksum(source,
                                                    all_vars=task_vars,
                                                    follow=True)

            # calculate the destination name
            if os.path.sep not in self._connection._shell.join_path('a', ''):
                source = self._connection._shell._unquote(source)
                source_local = source.replace('\\', '/')
            else:
                source_local = source

            dest = os.path.expanduser(dest)
            if flat:
                if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')
                                 ) and not dest.endswith(os.sep):
                    result[
                        'msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
                    result['file'] = dest
                    result['failed'] = True
                    return result
                if dest.endswith(os.sep):
                    # if the path ends with "/", we'll use the source filename as the
                    # destination filename
                    base = os.path.basename(source_local)
                    dest = os.path.join(dest, base)
                if not dest.startswith("/"):
                    # if dest does not start with "/", we'll assume a relative path
                    dest = self._loader.path_dwim(dest)
            else:
                # files are saved in dest dir, with a subdir for each host, then the filename
                if 'inventory_hostname' in task_vars:
                    target_name = task_vars['inventory_hostname']
                else:
                    target_name = self._play_context.remote_addr
                dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name,
                                     source_local)

            dest = dest.replace("//", "/")

            if remote_checksum in REMOTE_CHECKSUM_ERRORS:
                result['changed'] = False
                result['file'] = source
                result['msg'] = REMOTE_CHECKSUM_ERRORS[remote_checksum]
                # Historically, these don't fail because you may want to transfer
                # a log file that possibly MAY exist but keep going to fetch other
                # log files. Today, this is better achieved by adding
                # ignore_errors or failed_when to the task.  Control the behaviour
                # via fail_when_missing
                if fail_on_missing:
                    result['failed'] = True
                    del result['changed']
                else:
                    result['msg'] += ", not transferring, ignored"
                return result

            # calculate checksum for the local file
            local_checksum = checksum(dest)

            if remote_checksum != local_checksum:
                # create the containing directories, if needed
                makedirs_safe(os.path.dirname(dest))

                # fetch the file and check for changes
                self._connection.fetch_file(source, dest)
                new_checksum = secure_hash(dest)
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    new_md5 = md5(dest)
                except ValueError:
                    new_md5 = None

                if validate_checksum and new_checksum != remote_checksum:
                    result.update(
                        dict(failed=True,
                             md5sum=new_md5,
                             msg="checksum mismatch",
                             file=source,
                             dest=dest,
                             remote_md5sum=None,
                             checksum=new_checksum,
                             remote_checksum=remote_checksum))
                else:
                    result.update({
                        'changed': True,
                        'md5sum': new_md5,
                        'dest': dest,
                        'remote_md5sum': None,
                        'checksum': new_checksum,
                        'remote_checksum': remote_checksum
                    })
            else:
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    local_md5 = md5(dest)
                except ValueError:
                    local_md5 = None
                result.update(
                    dict(changed=False,
                         md5sum=local_md5,
                         file=source,
                         dest=dest,
                         checksum=local_checksum))

        finally:
            self._remove_tmp_path(self._connection._shell.tmpdir)

        return result
Exemple #17
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for fetch operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)
        del tmp  # tmp no longer has any effect

        try:
            if self._play_context.check_mode:
                result['skipped'] = True
                result['msg'] = 'check mode not (yet) supported for this module'
                return result

            source = self._task.args.get('src', None)
            dest = self._task.args.get('dest', None)
            flat = boolean(self._task.args.get('flat'), strict=False)
            fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False)
            validate_checksum = boolean(self._task.args.get('validate_checksum',
                                                            self._task.args.get('validate_md5', True)),
                                        strict=False)

            # validate source and dest are strings FIXME: use basic.py and module specs
            if not isinstance(source, string_types):
                result['msg'] = "Invalid type supplied for source option, it must be a string"

            if not isinstance(dest, string_types):
                result['msg'] = "Invalid type supplied for dest option, it must be a string"

            # validate_md5 is the deprecated way to specify validate_checksum
            if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
                result['msg'] = "validate_checksum and validate_md5 cannot both be specified"

            if 'validate_md5' in self._task.args:
                display.deprecated('Use validate_checksum instead of validate_md5', version='2.8')

            if source is None or dest is None:
                result['msg'] = "src and dest are required"

            if result.get('msg'):
                result['failed'] = True
                return result

            source = self._connection._shell.join_path(source)
            source = self._remote_expand_user(source)

            remote_checksum = None
            if not self._play_context.become:
                # calculate checksum for the remote file, don't bother if using become as slurp will be used
                # Force remote_checksum to follow symlinks because fetch always follows symlinks
                remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True)

            # use slurp if permissions are lacking or privilege escalation is needed
            remote_data = None
            if remote_checksum in ('1', '2', None):
                slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars)
                if slurpres.get('failed'):
                    if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'):
                        result['msg'] = "the remote file does not exist, not transferring, ignored"
                        result['file'] = source
                        result['changed'] = False
                    else:
                        result.update(slurpres)
                    return result
                else:
                    if slurpres['encoding'] == 'base64':
                        remote_data = base64.b64decode(slurpres['content'])
                    if remote_data is not None:
                        remote_checksum = checksum_s(remote_data)
                    # the source path may have been expanded on the
                    # target system, so we compare it here and use the
                    # expanded version if it's different
                    remote_source = slurpres.get('source')
                    if remote_source and remote_source != source:
                        source = remote_source

            # calculate the destination name
            if os.path.sep not in self._connection._shell.join_path('a', ''):
                source = self._connection._shell._unquote(source)
                source_local = source.replace('\\', '/')
            else:
                source_local = source

            dest = os.path.expanduser(dest)
            if flat:
                if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
                    result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
                    result['file'] = dest
                    result['failed'] = True
                    return result
                if dest.endswith(os.sep):
                    # if the path ends with "/", we'll use the source filename as the
                    # destination filename
                    base = os.path.basename(source_local)
                    dest = os.path.join(dest, base)
                if not dest.startswith("/"):
                    # if dest does not start with "/", we'll assume a relative path
                    dest = self._loader.path_dwim(dest)
            else:
                # files are saved in dest dir, with a subdir for each host, then the filename
                if 'inventory_hostname' in task_vars:
                    target_name = task_vars['inventory_hostname']
                else:
                    target_name = self._play_context.remote_addr
                dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)

            dest = dest.replace("//", "/")

            if remote_checksum in ('0', '1', '2', '3', '4', '5'):
                result['changed'] = False
                result['file'] = source
                if remote_checksum == '0':
                    result['msg'] = "unable to calculate the checksum of the remote file"
                elif remote_checksum == '1':
                    result['msg'] = "the remote file does not exist"
                elif remote_checksum == '2':
                    result['msg'] = "no read permission on remote file"
                elif remote_checksum == '3':
                    result['msg'] = "remote file is a directory, fetch cannot work on directories"
                elif remote_checksum == '4':
                    result['msg'] = "python isn't present on the system.  Unable to compute checksum"
                elif remote_checksum == '5':
                    result['msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed"
                # Historically, these don't fail because you may want to transfer
                # a log file that possibly MAY exist but keep going to fetch other
                # log files. Today, this is better achieved by adding
                # ignore_errors or failed_when to the task.  Control the behaviour
                # via fail_when_missing
                if fail_on_missing:
                    result['failed'] = True
                    del result['changed']
                else:
                    result['msg'] += ", not transferring, ignored"
                return result

            # calculate checksum for the local file
            local_checksum = checksum(dest)

            if remote_checksum != local_checksum:
                # create the containing directories, if needed
                makedirs_safe(os.path.dirname(dest))

                # fetch the file and check for changes
                if remote_data is None:
                    self._connection.fetch_file(source, dest)
                else:
                    try:
                        f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
                        f.write(remote_data)
                        f.close()
                    except (IOError, OSError) as e:
                        raise AnsibleError("Failed to fetch the file: %s" % e)
                new_checksum = secure_hash(dest)
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    new_md5 = md5(dest)
                except ValueError:
                    new_md5 = None

                if validate_checksum and new_checksum != remote_checksum:
                    result.update(dict(failed=True, md5sum=new_md5,
                                       msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
                                       checksum=new_checksum, remote_checksum=remote_checksum))
                else:
                    result.update({'changed': True, 'md5sum': new_md5, 'dest': dest,
                                   'remote_md5sum': None, 'checksum': new_checksum,
                                   'remote_checksum': remote_checksum})
            else:
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    local_md5 = md5(dest)
                except ValueError:
                    local_md5 = None
                result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))

        finally:
            self._remove_tmp_path(self._connection._shell.tmpdir)

        return result
Exemple #18
0
    def put_file(self, in_path, out_path):
        super(Connection, self).put_file(in_path, out_path)
        out_path = self._shell._unquote(out_path)
        display.vvv('PUT "%s" TO "%s"' % (in_path, out_path),
                    host=self._winrm_host)
        if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
            raise AnsibleFileNotFound('file or module does not exist: "%s"' %
                                      in_path)

        script_template = u'''
            begin {{
                $path = '{0}'

                $DebugPreference = "Continue"
                $ErrorActionPreference = "Stop"
                Set-StrictMode -Version 2

                $fd = [System.IO.File]::Create($path)

                $sha1 = [System.Security.Cryptography.SHA1CryptoServiceProvider]::Create()

                $bytes = @() #initialize for empty file case
            }}
            process {{
               $bytes = [System.Convert]::FromBase64String($input)
               $sha1.TransformBlock($bytes, 0, $bytes.Length, $bytes, 0) | Out-Null
               $fd.Write($bytes, 0, $bytes.Length)
            }}
            end {{
                $sha1.TransformFinalBlock($bytes, 0, 0) | Out-Null

                $hash = [System.BitConverter]::ToString($sha1.Hash).Replace("-", "").ToLowerInvariant()

                $fd.Close()

                Write-Output "{{""sha1"":""$hash""}}"
            }}
        '''

        script = script_template.format(self._shell._escape(out_path))
        cmd_parts = self._shell._encode_script(script,
                                               as_list=True,
                                               strict_mode=False,
                                               preserve_rc=False)

        result = self._winrm_exec(cmd_parts[0],
                                  cmd_parts[1:],
                                  stdin_iterator=self._put_file_stdin_iterator(
                                      in_path, out_path))
        # TODO: improve error handling
        if result.status_code != 0:
            raise AnsibleError(to_native(result.std_err))

        put_output = json.loads(result.std_out)
        remote_sha1 = put_output.get("sha1")

        if not remote_sha1:
            raise AnsibleError("Remote sha1 was not returned")

        local_sha1 = secure_hash(in_path)

        if not remote_sha1 == local_sha1:
            raise AnsibleError(
                "Remote sha1 hash {0} does not match local hash {1}".format(
                    to_native(remote_sha1), to_native(local_sha1)))
Exemple #19
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for fetch operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)
        del tmp  # tmp no longer has any effect

        try:
            if self._play_context.check_mode:
                result['skipped'] = True
                result[
                    'msg'] = 'check mode not (yet) supported for this module'
                return result

            source = self._task.args.get('src', None)
            dest = self._task.args.get('dest', None)
            flat = boolean(self._task.args.get('flat'), strict=False)
            fail_on_missing = boolean(self._task.args.get('fail_on_missing'),
                                      strict=False)
            validate_checksum = boolean(self._task.args.get(
                'validate_checksum', self._task.args.get('validate_md5',
                                                         True)),
                                        strict=False)

            # validate source and dest are strings FIXME: use basic.py and module specs
            if not isinstance(source, string_types):
                result[
                    'msg'] = "Invalid type supplied for source option, it must be a string"

            if not isinstance(dest, string_types):
                result[
                    'msg'] = "Invalid type supplied for dest option, it must be a string"

            # validate_md5 is the deprecated way to specify validate_checksum
            if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
                result[
                    'msg'] = "validate_checksum and validate_md5 cannot both be specified"

            if 'validate_md5' in self._task.args:
                display.deprecated(
                    'Use validate_checksum instead of validate_md5',
                    version='2.8')

            if source is None or dest is None:
                result['msg'] = "src and dest are required"

            if result.get('msg'):
                result['failed'] = True
                return result

            source = self._connection._shell.join_path(source)
            source = self._remote_expand_user(source)

            remote_checksum = None
            if not self._play_context.become:
                # calculate checksum for the remote file, don't bother if using become as slurp will be used
                # Force remote_checksum to follow symlinks because fetch always follows symlinks
                remote_checksum = self._remote_checksum(source,
                                                        all_vars=task_vars,
                                                        follow=True)

            # use slurp if permissions are lacking or privilege escalation is needed
            remote_data = None
            if remote_checksum in ('1', '2', None):
                slurpres = self._execute_module(module_name='slurp',
                                                module_args=dict(src=source),
                                                task_vars=task_vars)
                if slurpres.get('failed'):
                    if not fail_on_missing and (
                            slurpres.get('msg').startswith('file not found')
                            or remote_checksum == '1'):
                        result[
                            'msg'] = "the remote file does not exist, not transferring, ignored"
                        result['file'] = source
                        result['changed'] = False
                    else:
                        result.update(slurpres)
                    return result
                else:
                    if slurpres['encoding'] == 'base64':
                        remote_data = base64.b64decode(slurpres['content'])
                    if remote_data is not None:
                        remote_checksum = checksum_s(remote_data)
                    # the source path may have been expanded on the
                    # target system, so we compare it here and use the
                    # expanded version if it's different
                    remote_source = slurpres.get('source')
                    if remote_source and remote_source != source:
                        source = remote_source

            # calculate the destination name
            if os.path.sep not in self._connection._shell.join_path('a', ''):
                source = self._connection._shell._unquote(source)
                source_local = source.replace('\\', '/')
            else:
                source_local = source

            dest = os.path.expanduser(dest)
            if flat:
                if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')
                                 ) and not dest.endswith(os.sep):
                    result[
                        'msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
                    result['file'] = dest
                    result['failed'] = True
                    return result
                if dest.endswith(os.sep):
                    # if the path ends with "/", we'll use the source filename as the
                    # destination filename
                    base = os.path.basename(source_local)
                    dest = os.path.join(dest, base)
                if not dest.startswith("/"):
                    # if dest does not start with "/", we'll assume a relative path
                    dest = self._loader.path_dwim(dest)
            else:
                # files are saved in dest dir, with a subdir for each host, then the filename
                if 'inventory_hostname' in task_vars:
                    target_name = task_vars['inventory_hostname']
                else:
                    target_name = self._play_context.remote_addr
                dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name,
                                     source_local)

            dest = dest.replace("//", "/")

            if remote_checksum in ('0', '1', '2', '3', '4', '5'):
                result['changed'] = False
                result['file'] = source
                if remote_checksum == '0':
                    result[
                        'msg'] = "unable to calculate the checksum of the remote file"
                elif remote_checksum == '1':
                    result['msg'] = "the remote file does not exist"
                elif remote_checksum == '2':
                    result['msg'] = "no read permission on remote file"
                elif remote_checksum == '3':
                    result[
                        'msg'] = "remote file is a directory, fetch cannot work on directories"
                elif remote_checksum == '4':
                    result[
                        'msg'] = "python isn't present on the system.  Unable to compute checksum"
                elif remote_checksum == '5':
                    result[
                        'msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed"
                # Historically, these don't fail because you may want to transfer
                # a log file that possibly MAY exist but keep going to fetch other
                # log files. Today, this is better achieved by adding
                # ignore_errors or failed_when to the task.  Control the behaviour
                # via fail_when_missing
                if fail_on_missing:
                    result['failed'] = True
                    del result['changed']
                else:
                    result['msg'] += ", not transferring, ignored"
                return result

            # calculate checksum for the local file
            local_checksum = checksum(dest)

            if remote_checksum != local_checksum:
                # create the containing directories, if needed
                makedirs_safe(os.path.dirname(dest))

                # fetch the file and check for changes
                if remote_data is None:
                    self._connection.fetch_file(source, dest)
                else:
                    try:
                        f = open(to_bytes(dest, errors='surrogate_or_strict'),
                                 'wb')
                        f.write(remote_data)
                        f.close()
                    except (IOError, OSError) as e:
                        raise AnsibleError("Failed to fetch the file: %s" % e)
                new_checksum = secure_hash(dest)
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    new_md5 = md5(dest)
                except ValueError:
                    new_md5 = None

                if validate_checksum and new_checksum != remote_checksum:
                    result.update(
                        dict(failed=True,
                             md5sum=new_md5,
                             msg="checksum mismatch",
                             file=source,
                             dest=dest,
                             remote_md5sum=None,
                             checksum=new_checksum,
                             remote_checksum=remote_checksum))
                else:
                    result.update({
                        'changed': True,
                        'md5sum': new_md5,
                        'dest': dest,
                        'remote_md5sum': None,
                        'checksum': new_checksum,
                        'remote_checksum': remote_checksum
                    })
            else:
                # For backwards compatibility. We'll return None on FIPS enabled systems
                try:
                    local_md5 = md5(dest)
                except ValueError:
                    local_md5 = None
                result.update(
                    dict(changed=False,
                         md5sum=local_md5,
                         file=source,
                         dest=dest,
                         checksum=local_checksum))

        finally:
            self._remove_tmp_path(self._connection._shell.tmpdir)

        return result