def _handle_backup_option(self, result, task_vars): filename = None backup_path = None try: non_config_regexes = self._connection.cliconf.get_option( "non_config_lines", task_vars) except (AttributeError, KeyError): non_config_regexes = [] try: content = self._sanitize_contents( contents=result.pop("__backup__"), filters=non_config_regexes) except KeyError: raise AnsibleError("Failed while reading configuration backup") backup_options = self._task.args.get("backup_options") if backup_options: filename = backup_options.get("filename") backup_path = backup_options.get("dir_path") tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time())) if not backup_path: cwd = self._get_working_path() backup_path = os.path.join(cwd, "backup") if not filename: filename = "%s_config.%s" % ( task_vars["inventory_hostname"], tstamp, ) dest = os.path.join(backup_path, filename) if not os.path.exists(backup_path): os.makedirs(backup_path) changed = False # Do not overwrite the destination if the contents match. if not os.path.exists(dest) or checksum(dest) != checksum_s(content): try: with open(dest, "w") as output_file: output_file.write(content) except Exception as exc: result["failed"] = True result[ "msg"] = "Could not write to destination file %s: %s" % ( dest, to_text(exc), ) return changed = True result["backup_path"] = dest result["changed"] = changed result["date"], result["time"] = tstamp.split("@") if not (backup_options and backup_options.get("filename")): result["filename"] = os.path.basename(result["backup_path"]) result["shortname"] = os.path.splitext(result["backup_path"])[0]
def run(self, tmp=None, task_vars=None): if task_vars is None: task_vars = dict() if self._play_context.check_mode: return dict(skipped=True, msg='check mode not supported for this module') result = super(ActionModule, self).run(tmp, task_vars) cmd = self._task.args.get('cmd', None) stdin = self._task.args.get('stdin', None) dest = self._task.args.get('dest', None) if cmd is None or dest is None: return dict(failed=True, msg="cmd and dest are required") if stdin is not None: stdin = self._connection._shell.join_path(stdin) stdin = self._remote_expand_user(stdin) remote_user = task_vars.get( 'ansible_ssh_user') or self._play_context.remote_user stdout = self._connection._shell.join_path( self._make_tmp_path(remote_user), 'stdout') result.update( self._execute_module(module_args=dict(cmd=cmd, stdin=stdin, dest=stdout), task_vars=task_vars)) # calculate checksum for the local file local_checksum = checksum(dest) # calculate checksum for the remote file, don't bother if using become as slurp will be used remote_checksum = self._remote_checksum(stdout, all_vars=task_vars) if remote_checksum != local_checksum: makedirs_safe(os.path.dirname(dest)) self._connection.fetch_file(stdout, dest) if checksum(dest) == remote_checksum: result.update(dict(changed=True)) else: result.update(dict(failed=True)) return result
def test_fetch_sequential_data_set_replace_on_local_machine( ansible_zos_module): hosts = ansible_zos_module ds_name = 'IMSTESTL.IMS01.DDCHKPT' dest_path = "/tmp/" + ds_name with open(dest_path, 'w') as infile: infile.write(DUMMY_DATA) local_checksum = checksum(dest_path, hash_func=sha256) params = dict(src=ds_name, dest='/tmp/', flat=True) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get('changed') is True assert result.get('module_stderr') is None assert checksum(dest_path, hash_func=sha256) != local_checksum finally: if os.path.exists(dest_path): os.remove(dest_path)
def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module params = dict(src='/etc/profile', dest='/tmp/', flat=True) dest_path = '/tmp/profile' hosts.all.zos_fetch(**params) local_checksum = checksum(dest_path, hash_func=sha256) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get('changed') is False assert result.get('checksum') == local_checksum assert result.get('module_stderr') is None finally: os.remove(dest_path)
def test_fetch_uss_file_replace_on_local_machine(ansible_zos_module): open('/tmp/profile', 'w').close() hosts = ansible_zos_module params = dict(src='/etc/profile', dest='/tmp/', flat=True) dest_path = '/tmp/profile' local_checksum = checksum(dest_path, hash_func=sha256) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get('changed') is True assert result.get('checksum') != local_checksum assert result.get('module_stderr') is None assert os.path.exists(dest_path) finally: os.remove(dest_path)
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: if self._play_context.check_mode: result['skipped'] = True result[ 'msg'] = 'check mode not (yet) supported for this module' return result source = self._task.args.get('src', None) dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat'), strict=False) fail_on_missing = boolean(self._task.args.get('fail_on_missing'), strict=False) validate_checksum = boolean(self._task.args.get( 'validate_checksum', self._task.args.get('validate_md5', True)), strict=False) # validate source and dest are strings FIXME: use basic.py and module specs if not isinstance(source, string_types): result[ 'msg'] = "Invalid type supplied for source option, it must be a string" if not isinstance(dest, string_types): result[ 'msg'] = "Invalid type supplied for dest option, it must be a string" # validate_md5 is the deprecated way to specify validate_checksum if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args: result[ 'msg'] = "validate_checksum and validate_md5 cannot both be specified" if 'validate_md5' in self._task.args: display.deprecated( 'Use validate_checksum instead of validate_md5', version='2.8') if source is None or dest is None: result['msg'] = "src and dest are required" if result.get('msg'): result['failed'] = True return result source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_checksum = None if not self._play_context.become: # calculate checksum for the remote file, don't bother if using become as slurp will be used # Force remote_checksum to follow symlinks because fetch always follows symlinks remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True) # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in ('1', '2', None): slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars) if slurpres.get('failed'): if not fail_on_missing and ( slurpres.get('msg').startswith('file not found') or remote_checksum == '1'): result[ 'msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False else: result.update(slurpres) return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict') ) and not dest.endswith(os.sep): result[ 'msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory" result['file'] = dest result['failed'] = True return result if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//", "/") if remote_checksum in ('0', '1', '2', '3', '4', '5'): result['changed'] = False result['file'] = source if remote_checksum == '0': result[ 'msg'] = "unable to calculate the checksum of the remote file" elif remote_checksum == '1': result['msg'] = "the remote file does not exist" elif remote_checksum == '2': result['msg'] = "no read permission on remote file" elif remote_checksum == '3': result[ 'msg'] = "remote file is a directory, fetch cannot work on directories" elif remote_checksum == '4': result[ 'msg'] = "python isn't present on the system. Unable to compute checksum" elif remote_checksum == '5': result[ 'msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed" # Historically, these don't fail because you may want to transfer # a log file that possibly MAY exist but keep going to fetch other # log files. Today, this is better achieved by adding # ignore_errors or failed_when to the task. Control the behaviour # via fail_when_missing if fail_on_missing: result['failed'] = True del result['changed'] else: result['msg'] += ", not transferring, ignored" return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleError("Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update( dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: result.update({ 'changed': True, 'md5sum': new_md5, 'dest': dest, 'remote_md5sum': None, 'checksum': new_checksum, 'remote_checksum': remote_checksum }) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None result.update( dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) finally: self._remove_tmp_path(self._connection._shell.tmpdir) return result
def run(self, tmp=None, task_vars=dict()): ''' handler for fetch operations ''' # FIXME: is this even required anymore? #if self.runner.noop_on_check(inject): # return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not (yet) supported for this module')) source = self._task.args.get('src', None) dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat')) fail_on_missing = boolean(self._task.args.get('fail_on_missing')) validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5'))) if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args: return dict(failed=True, msg="validate_checksum and validate_md5 cannot both be specified") if source is None or dest is None: return dict(failed=True, msg="src and dest are required") source = self._shell.join_path(source) source = self._remote_expand_user(source, tmp) # calculate checksum for the remote file remote_checksum = self._remote_checksum(tmp, source) # use slurp if sudo and permissions are lacking remote_data = None if remote_checksum in ('1', '2') or self._connection_info.sudo: slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), tmp=tmp) if slurpres.get('rc') == 0: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source else: # FIXME: should raise an error here? the old code did nothing pass # calculate the destination name if os.path.sep not in self._shell.join_path('a', ''): source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if dest.endswith("/"): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename dest = "%s/%s/%s" % (self._loader.path_dwim(dest), self._connection_info.remote_addr, source_local) dest = dest.replace("//","/") if remote_checksum in ('0', '1', '2', '3', '4'): # these don't fail because you may want to transfer a log file that possibly MAY exist # but keep going to fetch other log files if remote_checksum == '0': result = dict(msg="unable to calculate the checksum of the remote file", file=source, changed=False) elif remote_checksum == '1': if fail_on_missing: result = dict(failed=True, msg="the remote file does not exist", file=source) else: result = dict(msg="the remote file does not exist, not transferring, ignored", file=source, changed=False) elif remote_checksum == '2': result = dict(msg="no read permission on remote file, not transferring, ignored", file=source, changed=False) elif remote_checksum == '3': result = dict(msg="remote file is a directory, fetch cannot work on directories", file=source, changed=False) elif remote_checksum == '4': result = dict(msg="python isn't present on the system. Unable to compute checksum", file=source, changed=False) return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed if not os.path.isdir(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: f = open(dest, 'w') f.write(remote_data) f.close() new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled # systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: return dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum) return dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum) else: # For backwards compatibility. We'll return None on FIPS enabled # systems try: local_md5 = md5(dest) except ValueError: local_md5 = None return dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) if self._play_context.check_mode: result['skipped'] = True result['msg'] = 'check mode not (yet) supported for this module' return result source = self._task.args.get('src', None) dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat')) fail_on_missing = boolean(self._task.args.get('fail_on_missing')) validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5'))) if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args: result['failed'] = True result['msg'] = "validate_checksum and validate_md5 cannot both be specified" return result if source is None or dest is None: result['failed'] = True result['msg'] = "src and dest are required" return result source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_checksum = None if not self._play_context.become: # calculate checksum for the remote file, don't bother if using become as slurp will be used remote_checksum = self._remote_checksum(source, all_vars=task_vars) # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in ('1', '2', None): slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp) if slurpres.get('failed'): if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'): result['msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False else: result.update(slurpres) return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//","/") if remote_checksum in ('0', '1', '2', '3', '4'): # these don't fail because you may want to transfer a log file that # possibly MAY exist but keep going to fetch other log files if remote_checksum == '0': result['msg'] = "unable to calculate the checksum of the remote file" result['file'] = source result['changed'] = False elif remote_checksum == '1': if fail_on_missing: result['failed'] = True result['msg'] = "the remote file does not exist" result['file'] = source else: result['msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False elif remote_checksum == '2': result['msg'] = "no read permission on remote file, not transferring, ignored" result['file'] = source result['changed'] = False elif remote_checksum == '3': result['msg'] = "remote file is a directory, fetch cannot work on directories" result['file'] = source result['changed'] = False elif remote_checksum == '4': result['msg'] = "python isn't present on the system. Unable to compute checksum" result['file'] = source result['changed'] = False return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='strict'), 'w') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleError("Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update(dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: result.update(dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) return result
def _get_local_checksum(get_checksum, local_path): if get_checksum: return checksum(local_path) else: return None
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: if self._play_context.check_mode: raise AnsibleActionSkip( 'check mode not (yet) supported for this module') source = self._task.args.get('src', None) original_dest = dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat'), strict=False) fail_on_missing = boolean(self._task.args.get( 'fail_on_missing', True), strict=False) validate_checksum = boolean(self._task.args.get( 'validate_checksum', True), strict=False) msg = '' # validate source and dest are strings FIXME: use basic.py and module specs if not isinstance(source, string_types): msg = "Invalid type supplied for source option, it must be a string" if not isinstance(dest, string_types): msg = "Invalid type supplied for dest option, it must be a string" if source is None or dest is None: msg = "src and dest are required" if msg: raise AnsibleActionFail(msg) source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_stat = {} remote_checksum = None if True: # Get checksum for the remote file even using become. Mitogen doesn't need slurp. # Follow symlinks because fetch always follows symlinks try: remote_stat = self._execute_remote_stat(source, all_vars=task_vars, follow=True) except AnsibleError as ae: result['changed'] = False result['file'] = source if fail_on_missing: result['failed'] = True result['msg'] = to_text(ae) else: result['msg'] = "%s, ignored" % to_text( ae, errors='surrogate_or_replace') return result remote_checksum = remote_stat.get('checksum') if remote_stat.get('exists'): if remote_stat.get('isdir'): result['failed'] = True result['changed'] = False result[ 'msg'] = "remote file is a directory, fetch cannot work on directories" # Historically, these don't fail because you may want to transfer # a log file that possibly MAY exist but keep going to fetch other # log files. Today, this is better achieved by adding # ignore_errors or failed_when to the task. Control the behaviour # via fail_when_missing if not fail_on_missing: result['msg'] += ", not transferring, ignored" del result['changed'] del result['failed'] return result # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in (None, '1', ''): slurpres = self._execute_module( module_name='ansible.legacy.slurp', module_args=dict(src=source), task_vars=task_vars) if slurpres.get('failed'): if not fail_on_missing: result['file'] = source result['changed'] = False else: result.update(slurpres) if 'not found' in slurpres.get('msg', ''): result[ 'msg'] = "the remote file does not exist, not transferring, ignored" elif slurpres.get('msg', '').startswith('source is a directory'): result[ 'msg'] = "remote file is a directory, fetch cannot work on directories" return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source # ensure we only use file name, avoid relative paths if not is_subpath(dest, original_dest): # TODO: ? dest = os.path.expanduser(dest.replace(('../',''))) raise AnsibleActionFail( "Detected directory traversal, expected to be contained in '%s' but got '%s'" % (original_dest, dest)) if flat: if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict') ) and not dest.endswith(os.sep): raise AnsibleActionFail( "dest is an existing directory, use a trailing slash if you want to fetch src into that directory" ) if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = os.path.normpath(dest) # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleActionFail( "Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update( dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: result.update({ 'changed': True, 'md5sum': new_md5, 'dest': dest, 'remote_md5sum': None, 'checksum': new_checksum, 'remote_checksum': remote_checksum }) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None result.update( dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) finally: self._remove_tmp_path(self._connection._shell.tmpdir) return result
def run(self, tmp=None, task_vars=dict()): """ handler for fetch operations """ if self._connection_info.check_mode: return dict(skipped=True, msg="check mode not (yet) supported for this module") source = self._task.args.get("src", None) dest = self._task.args.get("dest", None) flat = boolean(self._task.args.get("flat")) fail_on_missing = boolean(self._task.args.get("fail_on_missing")) validate_checksum = boolean(self._task.args.get("validate_checksum", self._task.args.get("validate_md5"))) if "validate_md5" in self._task.args and "validate_checksum" in self._task.args: return dict(failed=True, msg="validate_checksum and validate_md5 cannot both be specified") if source is None or dest is None: return dict(failed=True, msg="src and dest are required") source = self._connection._shell.join_path(source) source = self._remote_expand_user(source, tmp) # calculate checksum for the remote file remote_checksum = self._remote_checksum(tmp, source) # use slurp if sudo and permissions are lacking remote_data = None if remote_checksum in ("1", "2") or self._connection_info.become: slurpres = self._execute_module( module_name="slurp", module_args=dict(src=source), task_vars=task_vars, tmp=tmp ) if slurpres.get("rc") == 0: if slurpres["encoding"] == "base64": remote_data = base64.b64decode(slurpres["content"]) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get("source") if remote_source and remote_source != source: source = remote_source else: # FIXME: should raise an error here? the old code did nothing pass # calculate the destination name if os.path.sep not in self._connection._shell.join_path("a", ""): source_local = source.replace("\\", "/") else: source_local = source dest = os.path.expanduser(dest) if flat: if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if "inventory_hostname" in task_vars: target_name = task_vars["inventory_hostname"] else: target_name = self._connection_info.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//", "/") if remote_checksum in ("0", "1", "2", "3", "4"): # these don't fail because you may want to transfer a log file that possibly MAY exist # but keep going to fetch other log files if remote_checksum == "0": result = dict(msg="unable to calculate the checksum of the remote file", file=source, changed=False) elif remote_checksum == "1": if fail_on_missing: result = dict(failed=True, msg="the remote file does not exist", file=source) else: result = dict( msg="the remote file does not exist, not transferring, ignored", file=source, changed=False ) elif remote_checksum == "2": result = dict( msg="no read permission on remote file, not transferring, ignored", file=source, changed=False ) elif remote_checksum == "3": result = dict( msg="remote file is a directory, fetch cannot work on directories", file=source, changed=False ) elif remote_checksum == "4": result = dict( msg="python isn't present on the system. Unable to compute checksum", file=source, changed=False ) return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: f = open(dest, "w") f.write(remote_data) f.close() new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled # systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: return dict( failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum, ) return dict( changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum, ) else: # For backwards compatibility. We'll return None on FIPS enabled # systems try: local_md5 = md5(dest) except ValueError: local_md5 = None return dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)
def run(self, tmp=None, task_vars=None): ''' handler for file transfer operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) source = self._task.args.get('src', None) content = self._task.args.get('content', None) dest = self._task.args.get('dest', None) raw = boolean(self._task.args.get('raw', 'no')) force = boolean(self._task.args.get('force', 'yes')) remote_src = boolean(self._task.args.get('remote_src', False)) follow = boolean(self._task.args.get('follow', False)) if (source is None and content is None) or dest is None: result['failed'] = True result['msg'] = "src (or content) and dest are required" return result elif source is not None and content is not None: result['failed'] = True result['msg'] = "src and content are mutually exclusive" return result elif content is not None and dest is not None and dest.endswith("/"): result['failed'] = True result['msg'] = "dest must be a file if content is defined" return result # Check if the source ends with a "/" source_trailing_slash = False if source: source_trailing_slash = self._connection._shell.path_has_trailing_slash(source) # Define content_tempfile in case we set it after finding content populated. content_tempfile = None # If content is defined make a temp file and write the content into it. if content is not None: try: # If content comes to us as a dict it should be decoded json. # We need to encode it back into a string to write it out. if isinstance(content, dict) or isinstance(content, list): content_tempfile = self._create_content_tempfile(json.dumps(content)) else: content_tempfile = self._create_content_tempfile(content) source = content_tempfile except Exception as err: result['failed'] = True result['msg'] = "could not write content temp file: %s" % to_native(err) return result # if we have first_available_file in our vars # look up the files and use the first one we find as src elif remote_src: result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars, delete_remote_tmp=False)) return result else: # find in expected paths try: source = self._find_needle('files', source) except AnsibleError as e: result['failed'] = True result['msg'] = to_text(e) return result # A list of source file tuples (full_path, relative_path) which will try to copy to the destination source_files = [] # If source is a directory populate our list else source is a file and translate it to a tuple. if os.path.isdir(to_bytes(source, errors='surrogate_or_strict')): # Get the amount of spaces to remove to get the relative path. if source_trailing_slash: sz = len(source) else: sz = len(source.rsplit('/', 1)[0]) + 1 # Walk the directory and append the file tuples to source_files. for base_path, sub_folders, files in os.walk(to_bytes(source)): for file in files: full_path = to_text(os.path.join(base_path, file), errors='surrogate_or_strict') rel_path = full_path[sz:] if rel_path.startswith('/'): rel_path = rel_path[1:] source_files.append((full_path, rel_path)) # recurse into subdirs for sf in sub_folders: source_files += self._get_recursive_files(os.path.join(source, to_text(sf)), sz=sz) # If it's recursive copy, destination is always a dir, # explicitly mark it so (note - copy module relies on this). if not self._connection._shell.path_has_trailing_slash(dest): dest = self._connection._shell.join_path(dest, '') else: source_files.append((source, os.path.basename(source))) changed = False module_return = dict(changed=False) # A register for if we executed a module. # Used to cut down on command calls when not recursive. module_executed = False # Tell _execute_module to delete the file if there is one file. delete_remote_tmp = (len(source_files) == 1) # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late. remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user if not delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path(remote_user) self._cleanup_remote_tmp = True # expand any user home dir specifier dest = self._remote_expand_user(dest) # Keep original value for mode parameter mode_value = self._task.args.get('mode', None) diffs = [] for source_full, source_rel in source_files: source_full = self._loader.get_real_file(source_full) # Generate a hash of the local file. local_checksum = checksum(source_full) # Get the local mode and set if user wanted it preserved # https://github.com/ansible/ansible-modules-core/issues/1124 if self._task.args.get('mode', None) == 'preserve': lmode = '0%03o' % stat.S_IMODE(os.stat(source_full).st_mode) self._task.args['mode'] = lmode # If local_checksum is not defined we can't find the file so we should fail out. if local_checksum is None: result['failed'] = True result['msg'] = "could not find src=%s" % source_full self._remove_tmp_path(tmp) return result # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if self._connection._shell.path_has_trailing_slash(dest): dest_file = self._connection._shell.join_path(dest, source_rel) else: dest_file = self._connection._shell.join_path(dest) # Attempt to get remote file info dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp) if dest_status['exists'] and dest_status['isdir']: # The dest is a directory. if content is not None: # If source was defined as content remove the temporary file and fail out. self._remove_tempfile_if_content_defined(content, content_tempfile) self._remove_tmp_path(tmp) result['failed'] = True result['msg'] = "can not use content with a dir as dest" return result else: # Append the relative source location to the destination and get remote stats again dest_file = self._connection._shell.join_path(dest, source_rel) dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp) if dest_status['exists'] and not force: # remote_file does not exist so continue to next iteration. continue if local_checksum != dest_status['checksum']: # The checksums don't match and we will change or error out. changed = True # Create a tmp path if missing only if this is not recursive. # If this is recursive we already have a tmp path. if delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path(remote_user) self._cleanup_remote_tmp = True if self._play_context.diff and not raw: diffs.append(self._get_diff_data(dest_file, source_full, task_vars)) if self._play_context.check_mode: self._remove_tempfile_if_content_defined(content, content_tempfile) changed = True module_return = dict(changed=True) continue # Define a remote directory that we will copy the file to. tmp_src = self._connection._shell.join_path(tmp, 'source') remote_path = None if not raw: remote_path = self._transfer_file(source_full, tmp_src) else: self._transfer_file(source_full, dest_file) # We have copied the file remotely and no longer require our content_tempfile self._remove_tempfile_if_content_defined(content, content_tempfile) self._loader.cleanup_tmp_file(source_full) # fix file permissions when the copy is done as a different user if remote_path: self._fixup_perms2((tmp, remote_path), remote_user) if raw: # Continue to next iteration if raw is defined. continue # Run the copy module # src and dest here come after original and override them # we pass dest only to make sure it includes trailing slash in case of recursive copy new_module_args = self._task.args.copy() new_module_args.update( dict( src=tmp_src, dest=dest, original_basename=source_rel, ) ) if 'content' in new_module_args: del new_module_args['content'] module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=delete_remote_tmp) module_executed = True else: # no need to transfer the file, already correct hash, but still need to call # the file module in case we want to change attributes self._remove_tempfile_if_content_defined(content, content_tempfile) self._loader.cleanup_tmp_file(source_full) if raw: # Continue to next iteration if raw is defined. self._remove_tmp_path(tmp) continue # Build temporary module_args. new_module_args = self._task.args.copy() new_module_args.update( dict( src=source_rel, dest=dest, original_basename=source_rel ) ) # Execute the file module. module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=delete_remote_tmp) module_executed = True if not module_return.get('checksum'): module_return['checksum'] = local_checksum if module_return.get('failed'): result.update(module_return) if not delete_remote_tmp: self._remove_tmp_path(tmp) return result if module_return.get('changed'): changed = True # the file module returns the file path as 'path', but # the copy module uses 'dest', so add it if it's not there if 'path' in module_return and 'dest' not in module_return: module_return['dest'] = module_return['path'] # reset the mode self._task.args['mode'] = mode_value # Delete tmp path if we were recursive or if we did not execute a module. if not delete_remote_tmp or (delete_remote_tmp and not module_executed): self._remove_tmp_path(tmp) if module_executed and len(source_files) == 1: result.update(module_return) else: result.update(dict(dest=dest, src=source, changed=changed)) if diffs: result['diff'] = diffs return result
def _copy_file(self, source_full, source_rel, content, content_tempfile, dest, task_vars, follow): decrypt = boolean(self._task.args.get('decrypt', True), strict=False) force = boolean(self._task.args.get('force', 'yes'), strict=False) raw = boolean(self._task.args.get('raw', 'no'), strict=False) result = {} result['diff'] = [] # If the local file does not exist, get_real_file() raises AnsibleFileNotFound try: source_full = self._loader.get_real_file(source_full, decrypt=decrypt) except AnsibleFileNotFound as e: result['failed'] = True result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e)) return result # Get the local mode and set if user wanted it preserved # https://github.com/ansible/ansible-modules-core/issues/1124 lmode = None if self._task.args.get('mode', None) == 'preserve': lmode = '0%03o' % stat.S_IMODE(os.stat(source_full).st_mode) # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if self._connection._shell.path_has_trailing_slash(dest): dest_file = self._connection._shell.join_path(dest, source_rel) else: dest_file = dest # Attempt to get remote file info dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, checksum=force) if dest_status['exists'] and dest_status['isdir']: # The dest is a directory. if content is not None: # If source was defined as content remove the temporary file and fail out. self._remove_tempfile_if_content_defined(content, content_tempfile) result['failed'] = True result['msg'] = "can not use content with a dir as dest" return result else: # Append the relative source location to the destination and get remote stats again dest_file = self._connection._shell.join_path(dest, source_rel) dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, checksum=force) if dest_status['exists'] and not force: # remote_file exists so continue to next iteration. return None # Generate a hash of the local file. local_checksum = checksum(source_full) if local_checksum != dest_status['checksum']: # The checksums don't match and we will change or error out. if self._play_context.diff and not raw: result['diff'].append(self._get_diff_data(dest_file, source_full, task_vars)) if self._play_context.check_mode: self._remove_tempfile_if_content_defined(content, content_tempfile) result['changed'] = True return result # Define a remote directory that we will copy the file to. tmp_src = self._connection._shell.join_path(self._connection._shell.tmpdir, 'source') remote_path = None if not raw: remote_path = self._transfer_file(source_full, tmp_src) else: self._transfer_file(source_full, dest_file) # We have copied the file remotely and no longer require our content_tempfile self._remove_tempfile_if_content_defined(content, content_tempfile) self._loader.cleanup_tmp_file(source_full) # fix file permissions when the copy is done as a different user if remote_path: self._fixup_perms2((self._connection._shell.tmpdir, remote_path)) if raw: # Continue to next iteration if raw is defined. return None # Run the copy module # src and dest here come after original and override them # we pass dest only to make sure it includes trailing slash in case of recursive copy new_module_args = _create_remote_copy_args(self._task.args) new_module_args.update( dict( src=tmp_src, dest=dest, original_basename=source_rel, follow=follow ) ) if not self._task.args.get('checksum'): new_module_args['checksum'] = local_checksum if lmode: new_module_args['mode'] = lmode module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars) else: # no need to transfer the file, already correct hash, but still need to call # the file module in case we want to change attributes self._remove_tempfile_if_content_defined(content, content_tempfile) self._loader.cleanup_tmp_file(source_full) if raw: return None # Fix for https://github.com/ansible/ansible-modules-core/issues/1568. # If checksums match, and follow = True, find out if 'dest' is a link. If so, # change it to point to the source of the link. if follow: dest_status_nofollow = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=False) if dest_status_nofollow['islnk'] and 'lnk_source' in dest_status_nofollow.keys(): dest = dest_status_nofollow['lnk_source'] # Build temporary module_args. new_module_args = _create_remote_file_args(self._task.args) new_module_args.update( dict( src=source_rel, dest=dest, original_basename=source_rel, recurse=False, state='file', ) ) if lmode: new_module_args['mode'] = lmode # Execute the file module. module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars) if not module_return.get('checksum'): module_return['checksum'] = local_checksum result.update(module_return) return result
module_executed = False # Tell _execute_module to delete the file if there is one file. delete_remote_tmp = (len(source_files) == 1) # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late. if not delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() # expand any user home dir specifier dest = self._remote_expand_user(dest, tmp) for source_full, source_rel in source_files: # Generate a hash of the local file. local_checksum = checksum(source_full) # If local_checksum is not defined we can't find the file so we should fail out. if local_checksum is None: return dict(failed=True, msg="could not find src=%s" % source_full) # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if self._shell.path_has_trailing_slash(dest): dest_file = self._shell.join_path(dest, source_rel) else: dest_file = self._shell.join_path(dest) # Attempt to get the remote checksum remote_checksum = self._remote_checksum(tmp, dest_file)
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: result = dict(msg="", stderr="", stdout="", src="", md5sum="", dest="", remote_md5sum="", remote_checksum="", checksum="", delta="", failed=False) savf_name = '' created = False is_savf = False if self._play_context.check_mode: result['skipped'] = True result[ 'msg'] = 'check mode not (yet) supported for this module' return result src = self._task.args.get('src', None) lib_name = self._task.args.get('lib_name', None) force = boolean(self._task.args.get('force', False), strict=True) backup = boolean(self._task.args.get('backup', False), strict=True) if lib_name is None: result['msg'] = "lib_name is required." elif src is None: result['msg'] = "src is required." # validate dest are strings FIXME: use basic.py and module specs elif not isinstance(src, string_types): result[ 'msg'] = "Invalid type supplied for src option, it must be a string." if result.get('msg'): result['failed'] = True return result try: src = self._loader.get_real_file( self._find_needle('files', src)) except AnsibleError as e: raise AnsibleActionFail(to_native(e)) lib_name = lib_name.upper() startd = datetime.datetime.now() # Get the file name without extention savefile_name = os.path.splitext(os.path.basename(src))[0] savefile_path = self._calculate_savf_path(savefile_name, lib_name) display.debug("savefile_name = %s, savf_path = %s" % (savefile_name, savefile_path)) if self._execute_remote_stat(savefile_path, all_vars=task_vars, follow=False)['exists']: if force is True: if backup is True: # Rename original save file rename_savf_name, rename_savf_path, msg = self._calculate_savf_name( savefile_name, lib_name, task_vars) if msg: result['msg'] += msg result['failed'] = True return result else: cmd = "QSYS/REN OBJ('%s') NEWOBJ(%s.file)" % ( savefile_path, rename_savf_name) module_output = self._execute_module( module_name='ibmi_cl_command', module_args={'cmd': cmd}) save_result = module_output rc = save_result['rc'] if rc != IBMi_COMMAND_RC_SUCCESS: result[ 'msg'] += "Failed to rename original SAVF on remote. " result['stderr'] = save_result['stderr_lines'] result['stdout'] = save_result['stdout_lines'] result['failed'] = True return result display.debug( "The original save file is successfully renamed to %s" % (savefile_path)) else: cmd = 'QSYS/DLTOBJ OBJ(%s/%s) OBJTYPE(*FILE)' % ( lib_name, savefile_name) module_output = self._execute_module( module_name='ibmi_cl_command', module_args={'cmd': cmd}) save_result = module_output rc = save_result['rc'] if rc != IBMi_COMMAND_RC_SUCCESS: result[ 'msg'] += "Failed to delete original SAVF on remote. " result['stderr'] = save_result['stderr_lines'] result['stdout'] = save_result['stdout_lines'] result['failed'] = True return result display.debug("The original save file is deleted.") else: result[ 'msg'] += "File with the Same name already exists on remote. If still want to copy, set force True. " result['failed'] = True return result # Create the save file cmd = 'QSYS/CRTSAVF FILE(%s/%s)' % (lib_name, savefile_name) module_output = self._execute_module(module_name='ibmi_cl_command', module_args={'cmd': cmd}) save_result = module_output rc = save_result['rc'] if rc != IBMi_COMMAND_RC_SUCCESS: result[ 'msg'] += "Failed to create SAVF: %s on remote." % savefile_path result['stderr'] = save_result['stderr_lines'] result['stdout'] = save_result['stdout_lines'] result['failed'] = True return result dir = os.path.dirname(savefile_path) display.debug( "Start to copy file. src = %s, savefile_path = %s, savefile_path= %s " % (src, savefile_path, savefile_path)) self._transfer_file(src, savefile_path) local_checksum = checksum(src) remote_data = None if not self._connection.become: remote_checksum = self._remote_checksum(savefile_path, all_vars=task_vars, follow=True) if remote_checksum in ('1', '2', None): result[ 'msg'] += "remote_checksum error. The permissions are lacking or privilege escalation is needed. " elif local_checksum != remote_checksum: result[ 'msg'] += "local_checksum doesn't match remote_checksum. " result['failed'] = True return result endd = datetime.datetime.now() delta = endd - startd result.update( dict(msg="File is successfully copied.", src=src, delta=str(delta), dest=savefile_path)) except Exception as e: result['msg'] += "%s" % to_text(e) result['failed'] = True return result finally: if created is True and result['failed'] is True: cmd = 'QSYS/DLTOBJ OBJ(%s/%s) OBJTYPE(*FILE)' % (lib_name, savefile_name) module_output = self._execute_module( module_name='ibmi_cl_command', module_args={'cmd': cmd}) save_result = module_output rc = save_result['rc'] if rc != 0 and ('CPF2105' not in save_result['stderr']): result[ 'msg'] += "Failed to delete the new created save file %s on remote when copy fails. " % savefile_path return result
def _copy_file(self, source_full, source_rel, content, content_tempfile, dest, task_vars, tmp): decrypt = boolean(self._task.args.get('decrypt', True), strict=False) follow = boolean(self._task.args.get('follow', False), strict=False) force = boolean(self._task.args.get('force', 'yes'), strict=False) raw = boolean(self._task.args.get('raw', 'no'), strict=False) result = {} result['diff'] = [] # If the local file does not exist, get_real_file() raises AnsibleFileNotFound try: source_full = self._loader.get_real_file(source_full, decrypt=decrypt) except AnsibleFileNotFound as e: result['failed'] = True result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e)) return result # Get the local mode and set if user wanted it preserved # https://github.com/ansible/ansible-modules-core/issues/1124 lmode = None if self._task.args.get('mode', None) == 'preserve': lmode = '0%03o' % stat.S_IMODE(os.stat(source_full).st_mode) # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if self._connection._shell.path_has_trailing_slash(dest): dest_file = self._connection._shell.join_path(dest, source_rel) else: dest_file = dest # Attempt to get remote file info dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp, checksum=force) if dest_status['exists'] and dest_status['isdir']: # The dest is a directory. if content is not None: # If source was defined as content remove the temporary file and fail out. self._remove_tempfile_if_content_defined(content, content_tempfile) result['failed'] = True result['msg'] = "can not use content with a dir as dest" return result else: # Append the relative source location to the destination and get remote stats again dest_file = self._connection._shell.join_path(dest, source_rel) dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp, checksum=force) if dest_status['exists'] and not force: # remote_file exists so continue to next iteration. return None # Generate a hash of the local file. local_checksum = checksum(source_full) if local_checksum != dest_status['checksum']: # The checksums don't match and we will change or error out. if self._play_context.diff and not raw: result['diff'].append(self._get_diff_data(dest_file, source_full, task_vars)) if self._play_context.check_mode: self._remove_tempfile_if_content_defined(content, content_tempfile) result['changed'] = True return result # Define a remote directory that we will copy the file to. tmp_src = self._connection._shell.join_path(tmp, 'source') remote_path = None if not raw: remote_path = self._transfer_file(source_full, tmp_src) else: self._transfer_file(source_full, dest_file) # We have copied the file remotely and no longer require our content_tempfile self._remove_tempfile_if_content_defined(content, content_tempfile) self._loader.cleanup_tmp_file(source_full) # fix file permissions when the copy is done as a different user if remote_path: self._fixup_perms2((tmp, remote_path)) if raw: # Continue to next iteration if raw is defined. return None # Run the copy module # src and dest here come after original and override them # we pass dest only to make sure it includes trailing slash in case of recursive copy new_module_args = self._create_remote_file_args(self._task.args) new_module_args.update( dict( src=tmp_src, dest=dest, original_basename=source_rel, ) ) if not self._task.args.get('checksum'): new_module_args['checksum'] = local_checksum if lmode: new_module_args['mode'] = lmode module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp) else: # no need to transfer the file, already correct hash, but still need to call # the file module in case we want to change attributes self._remove_tempfile_if_content_defined(content, content_tempfile) self._loader.cleanup_tmp_file(source_full) if raw: return None # Fix for https://github.com/ansible/ansible-modules-core/issues/1568. # If checksums match, and follow = True, find out if 'dest' is a link. If so, # change it to point to the source of the link. if follow: dest_status_nofollow = self._execute_remote_stat(dest_file, all_vars=task_vars, tmp=tmp, follow=False) if dest_status_nofollow['islnk'] and 'lnk_source' in dest_status_nofollow.keys(): dest = dest_status_nofollow['lnk_source'] # Build temporary module_args. new_module_args = self._create_remote_file_args(self._task.args) new_module_args.update( dict( src=source_rel, dest=dest, original_basename=source_rel, state='file', ) ) if lmode: new_module_args['mode'] = lmode # Execute the file module. module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp) if not module_return.get('checksum'): module_return['checksum'] = local_checksum result.update(module_return) return result
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: if self._play_context.check_mode: result['skipped'] = True result['msg'] = 'check mode not (yet) supported for this module' return result source = self._task.args.get('src', None) dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat'), strict=False) fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False) validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5', True)), strict=False) # validate source and dest are strings FIXME: use basic.py and module specs if not isinstance(source, string_types): result['msg'] = "Invalid type supplied for source option, it must be a string" if not isinstance(dest, string_types): result['msg'] = "Invalid type supplied for dest option, it must be a string" # validate_md5 is the deprecated way to specify validate_checksum if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args: result['msg'] = "validate_checksum and validate_md5 cannot both be specified" if 'validate_md5' in self._task.args: display.deprecated('Use validate_checksum instead of validate_md5', version='2.8') if source is None or dest is None: result['msg'] = "src and dest are required" if result.get('msg'): result['failed'] = True return result source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_checksum = None if not self._play_context.become: # calculate checksum for the remote file, don't bother if using become as slurp will be used # Force remote_checksum to follow symlinks because fetch always follows symlinks remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True) # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in ('1', '2', None): slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars) if slurpres.get('failed'): if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'): result['msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False else: result.update(slurpres) return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep): result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory" result['file'] = dest result['failed'] = True return result if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//", "/") if remote_checksum in ('0', '1', '2', '3', '4', '5'): result['changed'] = False result['file'] = source if remote_checksum == '0': result['msg'] = "unable to calculate the checksum of the remote file" elif remote_checksum == '1': result['msg'] = "the remote file does not exist" elif remote_checksum == '2': result['msg'] = "no read permission on remote file" elif remote_checksum == '3': result['msg'] = "remote file is a directory, fetch cannot work on directories" elif remote_checksum == '4': result['msg'] = "python isn't present on the system. Unable to compute checksum" elif remote_checksum == '5': result['msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed" # Historically, these don't fail because you may want to transfer # a log file that possibly MAY exist but keep going to fetch other # log files. Today, this is better achieved by adding # ignore_errors or failed_when to the task. Control the behaviour # via fail_when_missing if fail_on_missing: result['failed'] = True del result['changed'] else: result['msg'] += ", not transferring, ignored" return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleError("Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update(dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: result.update({'changed': True, 'md5sum': new_md5, 'dest': dest, 'remote_md5sum': None, 'checksum': new_checksum, 'remote_checksum': remote_checksum}) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) finally: self._remove_tmp_path(self._connection._shell.tmpdir) return result
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) try: if self._play_context.check_mode: result['skipped'] = True result[ 'msg'] = 'check mode not (yet) supported for this module' return result flat = boolean(self._task.args.get('flat'), strict=False) fail_on_missing = boolean(self._task.args.get( 'fail_on_missing', True), strict=False) validate_checksum = boolean(self._task.args.get( 'validate_checksum', True), strict=False) # validate source and dest are strings FIXME: use basic.py and module specs source = self._task.args.get('src') if not isinstance(source, string_types): result[ 'msg'] = "Invalid type supplied for source option, it must be a string" dest = self._task.args.get('dest') if not isinstance(dest, string_types): result[ 'msg'] = "Invalid type supplied for dest option, it must be a string" if result.get('msg'): result['failed'] = True return result source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) # calculate checksum for the remote file, don't bother if using # become as slurp will be used Force remote_checksum to follow # symlinks because fetch always follows symlinks remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True) # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict') ) and not dest.endswith(os.sep): result[ 'msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory" result['file'] = dest result['failed'] = True return result if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//", "/") if remote_checksum in REMOTE_CHECKSUM_ERRORS: result['changed'] = False result['file'] = source result['msg'] = REMOTE_CHECKSUM_ERRORS[remote_checksum] # Historically, these don't fail because you may want to transfer # a log file that possibly MAY exist but keep going to fetch other # log files. Today, this is better achieved by adding # ignore_errors or failed_when to the task. Control the behaviour # via fail_when_missing if fail_on_missing: result['failed'] = True del result['changed'] else: result['msg'] += ", not transferring, ignored" return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes self._connection.fetch_file(source, dest) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update( dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: result.update({ 'changed': True, 'md5sum': new_md5, 'dest': dest, 'remote_md5sum': None, 'checksum': new_checksum, 'remote_checksum': remote_checksum }) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None result.update( dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) finally: self._remove_tmp_path(self._connection._shell.tmpdir) return result
def run(self, tmp=None, task_vars=dict()): ''' handler for file transfer operations ''' source = self._task.args.get('src', None) content = self._task.args.get('content', None) dest = self._task.args.get('dest', None) raw = boolean(self._task.args.get('raw', 'no')) force = boolean(self._task.args.get('force', 'yes')) faf = self._task.first_available_file remote_src = boolean(self._task.args.get('remote_src', False)) if (source is None and content is None and faf is None) or dest is None: return dict(failed=True, msg="src (or content) and dest are required") elif (source is not None or faf is not None) and content is not None: return dict(failed=True, msg="src and content are mutually exclusive") elif content is not None and dest is not None and dest.endswith("/"): return dict(failed=True, msg="dest must be a file if content is defined") # Check if the source ends with a "/" source_trailing_slash = False if source: source_trailing_slash = self._connection._shell.path_has_trailing_slash( source) # Define content_tempfile in case we set it after finding content populated. content_tempfile = None # If content is defined make a temp file and write the content into it. if content is not None: try: # If content comes to us as a dict it should be decoded json. # We need to encode it back into a string to write it out. if isinstance(content, dict) or isinstance(content, list): content_tempfile = self._create_content_tempfile( json.dumps(content)) else: content_tempfile = self._create_content_tempfile(content) source = content_tempfile except Exception as err: return dict(failed=True, msg="could not write content temp file: %s" % err) # if we have first_available_file in our vars # look up the files and use the first one we find as src elif faf: source = self._get_first_available_file( faf, task_vars.get('_original_file', None)) if source is None: return dict( failed=True, msg="could not find src in first_available_file list") elif remote_src: new_module_args = self._task.args.copy() del new_module_args['remote_src'] return self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=False) else: if self._task._role is not None: source = self._loader.path_dwim_relative( self._task._role._role_path, 'files', source) else: source = self._loader.path_dwim_relative( self._loader.get_basedir(), 'files', source) # A list of source file tuples (full_path, relative_path) which will try to copy to the destination source_files = [] # If source is a directory populate our list else source is a file and translate it to a tuple. if os.path.isdir(source): # Get the amount of spaces to remove to get the relative path. if source_trailing_slash: sz = len(source) else: sz = len(source.rsplit('/', 1)[0]) + 1 # Walk the directory and append the file tuples to source_files. for base_path, sub_folders, files in os.walk(source): for file in files: full_path = os.path.join(base_path, file) rel_path = full_path[sz:] source_files.append((full_path, rel_path)) # If it's recursive copy, destination is always a dir, # explicitly mark it so (note - copy module relies on this). if not self._connection._shell.path_has_trailing_slash(dest): dest = self._connection._shell.join_path(dest, '') else: source_files.append((source, os.path.basename(source))) changed = False module_result = {"changed": False} # A register for if we executed a module. # Used to cut down on command calls when not recursive. module_executed = False # Tell _execute_module to delete the file if there is one file. delete_remote_tmp = (len(source_files) == 1) # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late. if not delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() # expand any user home dir specifier dest = self._remote_expand_user(dest, tmp) diffs = [] for source_full, source_rel in source_files: # Generate a hash of the local file. local_checksum = checksum(source_full) # If local_checksum is not defined we can't find the file so we should fail out. if local_checksum is None: return dict(failed=True, msg="could not find src=%s" % source_full) # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if self._connection._shell.path_has_trailing_slash(dest): dest_file = self._connection._shell.join_path(dest, source_rel) else: dest_file = self._connection._shell.join_path(dest) # Attempt to get the remote checksum remote_checksum = self._remote_checksum(tmp, dest_file, all_vars=task_vars) if remote_checksum == '3': # The remote_checksum was executed on a directory. if content is not None: # If source was defined as content remove the temporary file and fail out. self._remove_tempfile_if_content_defined( content, content_tempfile) return dict(failed=True, msg="can not use content with a dir as dest") else: # Append the relative source location to the destination and retry remote_checksum dest_file = self._connection._shell.join_path( dest, source_rel) remote_checksum = self._remote_checksum(tmp, dest_file, all_vars=task_vars) if remote_checksum != '1' and not force: # remote_file does not exist so continue to next iteration. continue if local_checksum != remote_checksum: # The checksums don't match and we will change or error out. changed = True # Create a tmp path if missing only if this is not recursive. # If this is recursive we already have a tmp path. if delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() if self._play_context.diff and not raw: diffs.append( self._get_diff_data(tmp, dest_file, source_full, task_vars)) if self._play_context.check_mode: self._remove_tempfile_if_content_defined( content, content_tempfile) changed = True module_return = dict(changed=True) continue # Define a remote directory that we will copy the file to. tmp_src = self._connection._shell.join_path(tmp, 'source') if not raw: self._connection.put_file(source_full, tmp_src) else: self._connection.put_file(source_full, dest_file) # We have copied the file remotely and no longer require our content_tempfile self._remove_tempfile_if_content_defined( content, content_tempfile) # fix file permissions when the copy is done as a different user if self._play_context.become and self._play_context.become_user != 'root': self._remote_chmod('a+r', tmp_src, tmp) if raw: # Continue to next iteration if raw is defined. continue # Run the copy module # src and dest here come after original and override them # we pass dest only to make sure it includes trailing slash in case of recursive copy new_module_args = self._task.args.copy() new_module_args.update( dict( src=tmp_src, dest=dest, original_basename=source_rel, )) module_return = self._execute_module( module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp) module_executed = True else: # no need to transfer the file, already correct hash, but still need to call # the file module in case we want to change attributes self._remove_tempfile_if_content_defined( content, content_tempfile) if raw: # Continue to next iteration if raw is defined. self._remove_tmp_path(tmp) continue # Build temporary module_args. new_module_args = self._task.args.copy() new_module_args.update( dict(src=source_rel, dest=dest, original_basename=source_rel)) # Execute the file module. module_return = self._execute_module( module_name='file', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp) module_executed = True if not module_return.get('checksum'): module_return['checksum'] = local_checksum if module_return.get('failed') == True: return module_return if module_return.get('changed') == True: changed = True # the file module returns the file path as 'path', but # the copy module uses 'dest', so add it if it's not there if 'path' in module_return and 'dest' not in module_return: module_return['dest'] = module_return['path'] # Delete tmp path if we were recursive or if we did not execute a module. if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed): self._remove_tmp_path(tmp) if module_executed and len(source_files) == 1: result = module_return else: result = dict(dest=dest, src=source, changed=changed) if diffs: result['diff'] = diffs return result
def run(self, tmp=None, task_vars=dict()): ''' handler for file transfer operations ''' source = self._task.args.get('src', None) content = self._task.args.get('content', None) dest = self._task.args.get('dest', None) raw = boolean(self._task.args.get('raw', 'no')) force = boolean(self._task.args.get('force', 'yes')) faf = self._task.first_available_file remote_src = boolean(self._task.args.get('remote_src', False)) if (source is None and content is None and faf is None) or dest is None: return dict(failed=True, msg="src (or content) and dest are required") elif (source is not None or faf is not None) and content is not None: return dict(failed=True, msg="src and content are mutually exclusive") elif content is not None and dest is not None and dest.endswith("/"): return dict(failed=True, msg="dest must be a file if content is defined") # Check if the source ends with a "/" source_trailing_slash = False if source: source_trailing_slash = self._connection._shell.path_has_trailing_slash(source) # Define content_tempfile in case we set it after finding content populated. content_tempfile = None # If content is defined make a temp file and write the content into it. if content is not None: try: # If content comes to us as a dict it should be decoded json. # We need to encode it back into a string to write it out. if isinstance(content, dict) or isinstance(content, list): content_tempfile = self._create_content_tempfile(json.dumps(content)) else: content_tempfile = self._create_content_tempfile(content) source = content_tempfile except Exception as err: return dict(failed=True, msg="could not write content temp file: %s" % err) # if we have first_available_file in our vars # look up the files and use the first one we find as src elif faf: source = self._get_first_available_file(faf, task_vars.get('_original_file', None)) if source is None: return dict(failed=True, msg="could not find src in first_available_file list") elif remote_src: new_module_args = self._task.args.copy() del new_module_args['remote_src'] return self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=False) else: if self._task._role is not None: source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source) else: source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source) # A list of source file tuples (full_path, relative_path) which will try to copy to the destination source_files = [] # If source is a directory populate our list else source is a file and translate it to a tuple. if os.path.isdir(source): # Get the amount of spaces to remove to get the relative path. if source_trailing_slash: sz = len(source) else: sz = len(source.rsplit('/', 1)[0]) + 1 # Walk the directory and append the file tuples to source_files. for base_path, sub_folders, files in os.walk(source): for file in files: full_path = os.path.join(base_path, file) rel_path = full_path[sz:] if rel_path.startswith('/'): rel_path = rel_path[1:] source_files.append((full_path, rel_path)) # If it's recursive copy, destination is always a dir, # explicitly mark it so (note - copy module relies on this). if not self._connection._shell.path_has_trailing_slash(dest): dest = self._connection._shell.join_path(dest, '') else: source_files.append((source, os.path.basename(source))) changed = False module_result = {"changed": False} # A register for if we executed a module. # Used to cut down on command calls when not recursive. module_executed = False # Tell _execute_module to delete the file if there is one file. delete_remote_tmp = (len(source_files) == 1) # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late. if not delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() # expand any user home dir specifier dest = self._remote_expand_user(dest) diffs = [] for source_full, source_rel in source_files: # Generate a hash of the local file. local_checksum = checksum(source_full) # If local_checksum is not defined we can't find the file so we should fail out. if local_checksum is None: return dict(failed=True, msg="could not find src=%s" % source_full) # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if self._connection._shell.path_has_trailing_slash(dest): dest_file = self._connection._shell.join_path(dest, source_rel) else: dest_file = self._connection._shell.join_path(dest) # Attempt to get the remote checksum remote_checksum = self._remote_checksum(dest_file, all_vars=task_vars) if remote_checksum == '3': # The remote_checksum was executed on a directory. if content is not None: # If source was defined as content remove the temporary file and fail out. self._remove_tempfile_if_content_defined(content, content_tempfile) return dict(failed=True, msg="can not use content with a dir as dest") else: # Append the relative source location to the destination and retry remote_checksum dest_file = self._connection._shell.join_path(dest, source_rel) remote_checksum = self._remote_checksum(dest_file, all_vars=task_vars) if remote_checksum != '1' and not force: # remote_file does not exist so continue to next iteration. continue if local_checksum != remote_checksum: # The checksums don't match and we will change or error out. changed = True # Create a tmp path if missing only if this is not recursive. # If this is recursive we already have a tmp path. if delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() if self._play_context.diff and not raw: diffs.append(self._get_diff_data(dest_file, source_full, task_vars)) if self._play_context.check_mode: self._remove_tempfile_if_content_defined(content, content_tempfile) changed = True module_return = dict(changed=True) continue # Define a remote directory that we will copy the file to. tmp_src = self._connection._shell.join_path(tmp, 'source') if not raw: self._connection.put_file(source_full, tmp_src) else: self._connection.put_file(source_full, dest_file) # We have copied the file remotely and no longer require our content_tempfile self._remove_tempfile_if_content_defined(content, content_tempfile) # fix file permissions when the copy is done as a different user if self._play_context.become and self._play_context.become_user != 'root': self._remote_chmod('a+r', tmp_src) if raw: # Continue to next iteration if raw is defined. continue # Run the copy module # src and dest here come after original and override them # we pass dest only to make sure it includes trailing slash in case of recursive copy new_module_args = self._task.args.copy() new_module_args.update( dict( src=tmp_src, dest=dest, original_basename=source_rel, ) ) module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp) module_executed = True else: # no need to transfer the file, already correct hash, but still need to call # the file module in case we want to change attributes self._remove_tempfile_if_content_defined(content, content_tempfile) if raw: # Continue to next iteration if raw is defined. self._remove_tmp_path(tmp) continue # Build temporary module_args. new_module_args = self._task.args.copy() new_module_args.update( dict( src=source_rel, dest=dest, original_basename=source_rel ) ) # Execute the file module. module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp) module_executed = True if not module_return.get('checksum'): module_return['checksum'] = local_checksum if module_return.get('failed') == True: return module_return if module_return.get('changed') == True: changed = True # the file module returns the file path as 'path', but # the copy module uses 'dest', so add it if it's not there if 'path' in module_return and 'dest' not in module_return: module_return['dest'] = module_return['path'] # Delete tmp path if we were recursive or if we did not execute a module. if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed): self._remove_tmp_path(tmp) if module_executed and len(source_files) == 1: result = module_return else: result = dict(dest=dest, src=source, changed=changed) if diffs: result['diff'] = diffs return result
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: result = dict(msg="", stderr="", stdout="", file="", md5sum="", dest="", remote_md5sum="", remote_checksum="", checksum="", delta="", failed=False) savf_name = '' created = False is_savf = False savf = '' ifs_created = False backup = False is_lib = False force_save = False flat = False if self._play_context.check_mode: result['skipped'] = True result[ 'msg'] = 'check mode not (yet) supported for this module' return result object_names = self._task.args.get('object_names', '*ALL') lib_name = self._task.args.get('lib_name', None) object_types = self._task.args.get('object_types', '*ALL') is_lib = boolean(self._task.args.get('is_lib', False), strict=True) savefile_name = self._task.args.get('savefile_name', None) force_save = boolean(self._task.args.get('force_save', False), strict=True) backup = boolean(self._task.args.get('backup', False), strict=True) format = self._task.args.get('format', '*SAVF') target_release = self._task.args.get('target_release', '*CURRENT') dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat', False), strict=True) fail_on_missing = boolean(self._task.args.get( 'fail_on_missing', True), strict=True) validate_checksum = boolean(self._task.args.get( 'validate_checksum', True), strict=True) # validate dest are strings FIXME: use basic.py and module specs if not isinstance(dest, string_types): result[ 'msg'] = "Invalid type supplied for dest option, it must be a string. " if lib_name is None or dest is None: result['msg'] = "lib_name and dest are required. " object_names = object_names.upper() object_types = object_types.upper() format = format.upper() target_release = target_release.upper() if lib_name is not None: lib_name = lib_name.upper() if savefile_name is not None: savefile_name = savefile_name.upper() if lib_name == 'QSYS' and (is_lib is True or (object_names == '*ALL' and object_types == '*ALL')): result['msg'] = "QSYS can't be saved." if format != "*SAVF": result['msg'] = "format can only be *SAVF." if result.get('msg'): result['failed'] = True return result startd = datetime.datetime.now() if len(object_names.split()) == 1 and is_lib is not True: if object_types == '*ALL' or object_types == '*FILE': if (object_names.split())[0][-1] == '*': module_args = { 'object_name': object_names[0:-1] + '+', 'lib_name': lib_name, 'use_regex': True } module_output = self._execute_module( module_name='ibmi_object_find', module_args=module_args) save_result = module_output if len(save_result['object_list']) == 1 and save_result['object_list'][0]['OBJTYPE'] == '*FILE' and \ save_result['object_list'][0]['OBJATTRIBUTE'] == 'SAVF': result[ 'msg'] += "Object is a save file, fetch it directly." savf_path = self._calculate_savf_path( save_result['object_list'][0]['OBJNAME'], lib_name) savf_name = save_result['object_list'][0][ 'OBJNAME'] is_savf = True else: module_args = { 'object_name': object_names, 'lib_name': lib_name } module_output = self._execute_module( module_name='ibmi_object_find', module_args=module_args) save_result = module_output if len(save_result['object_list']) == 1 and save_result['object_list'][0]['OBJTYPE'] == '*FILE' and \ save_result['object_list'][0]['OBJATTRIBUTE'] == 'SAVF': result[ 'msg'] += "Object is a save file, fetch it directly." savf_path = self._calculate_savf_path( object_names, lib_name) savf_name = object_names is_savf = True if is_savf is False: savf_name, savf_path = self._calculate_savf_name( object_names, lib_name, is_lib, savefile_name, task_vars, result) if is_lib is True: omitfile = 'OMITOBJ((%s/%s *FILE))' % (lib_name, savf_name) module_args = { 'lib_name': lib_name, 'savefile_name': savf_name, 'savefile_lib': lib_name, 'target_release': target_release, 'force_save': force_save, 'joblog': True, 'parameters': omitfile } module_output = self._execute_module( module_name='ibmi_lib_save', module_args=module_args) else: omitfile = 'OMITOBJ((%s/%s *FILE))' % (lib_name, savf_name) module_args = { 'object_names': object_names, 'object_lib': lib_name, 'object_types': object_types, 'savefile_name': savf_name, 'savefile_lib': lib_name, 'target_release': target_release, 'force_save': force_save, 'joblog': False, 'parameters': omitfile } module_output = self._execute_module( module_name='ibmi_object_save', module_args=module_args) save_result = module_output rc = save_result['rc'] if rc != 0 or ('CPC3708' in save_result['stdout']): result[ 'msg'] = 'Create SAVF failed. See stderr or stdout for more information.' result['failed'] = True result['stderr'] = save_result['stderr_lines'] result['stdout'] = save_result['stdout_lines'] return result created = True display.debug("savf_name = %s, savf_path = %s, force_save=%s" % (savf_name, savf_path, force_save)) source = savf_path commandmk = 'mkdir %s' % ifs_dir command = 'cp %s %s' % (savf_path, ifs_dir) try: module_output = self._execute_module( module_name='command', module_args={'_raw_params': commandmk}) save_result = module_output rc = save_result['rc'] display.debug("save_result['stderr_lines'] = %s" % (save_result['stderr_lines'])) if rc != 0 and ('exists' not in save_result['stderr']): result['msg'] = save_result['msg'] result['failed'] = True result['stderr'] = save_result['stderr_lines'] return result module_output = self._execute_module( module_name='command', module_args={'_raw_params': command}) save_result = module_output rc = save_result['rc'] if rc != 0: result['msg'] = save_result['msg'] result['failed'] = True result['stderr'] = save_result['stderr_lines'] result['stdout'] = save_result['stdout_lines'] return result ifs_created = True except Exception as e: result['msg'] = to_text(e) result['failed'] = True return result source = '%s/%s' % (ifs_dir, os.path.basename(savf_path)) if not isinstance(source, string_types): result[ 'msg'] = "Invalid type supplied for source option, it must be a string" result['failed'] = True return result source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_checksum = None if not self._connection.become: # calculate checksum for the remote file, don't bother if using become as slurp will be used # Force remote_checksum to follow symlinks because fetch always follows symlinks remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True) # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in ('1', '2', None): slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars) if slurpres.get('failed'): if not fail_on_missing and ( slurpres.get('msg').startswith('file not found') or remote_checksum == '1'): result[ 'msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False else: result.update(slurpres) return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) qsys_source = self._connection._shell._unquote(savf_path) source_local = qsys_source.replace('\\', '/') else: source_local = savf_path dest = os.path.expanduser(dest) if flat: if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) base = os.path.basename(source_local) dest = os.path.join(dest, base) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//", "/") if remote_checksum in ('0', '1', '2', '3', '4', '5'): result['changed'] = False result['file'] = source if remote_checksum == '0': result[ 'msg'] = "unable to calculate the checksum of the remote file" elif remote_checksum == '1': result['msg'] = "the remote file does not exist" elif remote_checksum == '2': result['msg'] = "no read permission on remote file" elif remote_checksum == '3': result[ 'msg'] = "remote file is a directory, fetch cannot work on directories" elif remote_checksum == '4': result[ 'msg'] = "python isn't present on the system. Unable to compute checksum" elif remote_checksum == '5': result[ 'msg'] = "stdlib json was not found on the remote machine. Only the raw module can work without those installed" # Historically, these don't fail because you may want to transfer # a log file that possibly MAY exist but keep going to fetch other # log files. Today, this is better achieved by adding # ignore_errors or failed_when to the task. Control the behaviour # via fail_when_missing if fail_on_missing: result['failed'] = True del result['changed'] else: result['msg'] += ", not transferring, ignored" return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleError("Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update( dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=savf, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: endd = datetime.datetime.now() delta = endd - startd if (created is True and backup is True) or is_savf is True: savf = savf_path result['msg'] += " File is renewed on local." result.update({ 'changed': True, 'md5sum': new_md5, 'dest': dest, 'remote_md5sum': None, 'checksum': new_checksum, 'remote_checksum': remote_checksum, 'delta': str(delta), 'file': savf }) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None endd = datetime.datetime.now() delta = endd - startd if (created is True and backup is True) or is_savf is True: savf = savf_path result.update( dict(changed=False, md5sum=local_md5, file=savf, delta=str(delta), dest=dest, checksum=local_checksum)) except Exception as e: result['msg'] += "%s" % to_text(e) result['failed'] = True return result finally: if ((backup is False and is_savf is False) or result['failed'] is True) and created is True: cmd = 'DLTOBJ OBJ(%s/%s) OBJTYPE(*FILE)' % (lib_name, savf_name) module_output = self._execute_module( module_name='ibmi_cl_command', module_args={'cmd': cmd}) save_result = module_output rc = save_result['rc'] if rc != 0 and ('CPF2105' not in save_result['stderr']): result['msg'] += "Failed to delete SAVF on remote" if ifs_created is True: cmd = 'rm %s/%s' % (ifs_dir, os.path.basename(savf_path)) try: module_output = self._execute_module( module_name='command', module_args={'_raw_params': cmd}) save_result = module_output rc = save_result['rc'] if rc != 0: result['msg'] += "Failed to delete IFS on remote" except Exception as e: result[ 'msg'] += "exception happens when delete IFS file. error: %s" % to_text( e) self._remove_tmp_path(self._connection._shell.tmpdir) return result
module_executed = False # Tell _execute_module to delete the file if there is one file. delete_remote_tmp = (len(source_files) == 1) # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late. if not delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() # expand any user home dir specifier dest = self._remote_expand_user(dest, tmp) for source_full, source_rel in source_files: # Generate a hash of the local file. local_checksum = checksum(source_full) # If local_checksum is not defined we can't find the file so we should fail out. if local_checksum is None: return dict(failed=True, msg="could not find src=%s" % source_full) # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if self._shell.path_has_trailing_slash(dest): dest_file = self._shell.join_path(dest, source_rel) else: dest_file = self._shell.join_path(dest) # Attempt to get the remote checksum
def run(self, tmp=None, task_vars=dict()): ''' handler for fetch operations ''' # FIXME: is this even required anymore? #if self.runner.noop_on_check(inject): # return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not (yet) supported for this module')) source = self._task.args.get('src', None) dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat')) fail_on_missing = boolean(self._task.args.get('fail_on_missing')) validate_checksum = boolean( self._task.args.get('validate_checksum', self._task.args.get('validate_md5'))) if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args: return dict( failed=True, msg= "validate_checksum and validate_md5 cannot both be specified") if source is None or dest is None: return dict(failed=True, msg="src and dest are required") source = self._shell.join_path(source) source = self._remote_expand_user(source, tmp) # calculate checksum for the remote file remote_checksum = self._remote_checksum(tmp, source) # use slurp if sudo and permissions are lacking remote_data = None if remote_checksum in ('1', '2') or self._connection_info.become: slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), tmp=tmp) if slurpres.get('rc') == 0: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source else: # FIXME: should raise an error here? the old code did nothing pass # calculate the destination name if os.path.sep not in self._shell.join_path('a', ''): source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename dest = "%s/%s/%s" % (self._loader.path_dwim(dest), self._connection_info.remote_addr, source_local) dest = dest.replace("//", "/") if remote_checksum in ('0', '1', '2', '3', '4'): # these don't fail because you may want to transfer a log file that possibly MAY exist # but keep going to fetch other log files if remote_checksum == '0': result = dict( msg="unable to calculate the checksum of the remote file", file=source, changed=False) elif remote_checksum == '1': if fail_on_missing: result = dict(failed=True, msg="the remote file does not exist", file=source) else: result = dict( msg= "the remote file does not exist, not transferring, ignored", file=source, changed=False) elif remote_checksum == '2': result = dict( msg= "no read permission on remote file, not transferring, ignored", file=source, changed=False) elif remote_checksum == '3': result = dict( msg= "remote file is a directory, fetch cannot work on directories", file=source, changed=False) elif remote_checksum == '4': result = dict( msg= "python isn't present on the system. Unable to compute checksum", file=source, changed=False) return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed if not os.path.isdir(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: f = open(dest, 'w') f.write(remote_data) f.close() new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled # systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: return dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum) return dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum) else: # For backwards compatibility. We'll return None on FIPS enabled # systems try: local_md5 = md5(dest) except ValueError: local_md5 = None return dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)
def run(self, tmp=None, task_vars=None): ''' handler for file transfer operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) if result.get('skipped'): return result source = self._task.args.get('src', None) content = self._task.args.get('content', None) dest = self._task.args.get('dest', None) raw = boolean(self._task.args.get('raw', 'no')) force = boolean(self._task.args.get('force', 'yes')) remote_src = boolean(self._task.args.get('remote_src', False)) follow = boolean(self._task.args.get('follow', False)) decrypt = boolean(self._task.args.get('decrypt', True)) result['failed'] = True if (source is None and content is None) or dest is None: result['msg'] = "src (or content) and dest are required" elif source is not None and content is not None: result['msg'] = "src and content are mutually exclusive" elif content is not None and dest is not None and dest.endswith("/"): result['msg'] = "dest must be a file if content is defined" else: del result['failed'] if result.get('failed'): return result # Check if the source ends with a "/" source_trailing_slash = False if source: source_trailing_slash = self._connection._shell.path_has_trailing_slash( source) # Define content_tempfile in case we set it after finding content populated. content_tempfile = None # If content is defined make a temp file and write the content into it. if content is not None: try: # If content comes to us as a dict it should be decoded json. # We need to encode it back into a string to write it out. if isinstance(content, dict) or isinstance(content, list): content_tempfile = self._create_content_tempfile( json.dumps(content)) else: content_tempfile = self._create_content_tempfile(content) source = content_tempfile except Exception as err: result['failed'] = True result[ 'msg'] = "could not write content temp file: %s" % to_native( err) return result # if we have first_available_file in our vars # look up the files and use the first one we find as src elif remote_src: result.update(self._execute_module(task_vars=task_vars)) return result else: # find in expected paths try: source = self._find_needle('files', source) except AnsibleError as e: result['failed'] = True result['msg'] = to_text(e) return result # A list of source file tuples (full_path, relative_path) which will try to copy to the destination source_files = [] # If source is a directory populate our list else source is a file and translate it to a tuple. if os.path.isdir(to_bytes(source, errors='surrogate_or_strict')): # Get the amount of spaces to remove to get the relative path. if source_trailing_slash: sz = len(source) else: sz = len(source.rsplit('/', 1)[0]) + 1 # Walk the directory and append the file tuples to source_files. for base_path, sub_folders, files in os.walk(to_bytes(source), followlinks=True): for file in files: full_path = to_text(os.path.join(base_path, file), errors='surrogate_or_strict') rel_path = full_path[sz:] if rel_path.startswith('/'): rel_path = rel_path[1:] source_files.append((full_path, rel_path)) # If it's recursive copy, destination is always a dir, # explicitly mark it so (note - copy module relies on this). if not self._connection._shell.path_has_trailing_slash(dest): dest = self._connection._shell.join_path(dest, '') else: source_files.append((source, os.path.basename(source))) changed = False module_return = dict(changed=False) # A register for if we executed a module. # Used to cut down on command calls when not recursive. module_executed = False # Tell _execute_module to delete the file if there is one file. delete_remote_tmp = (len(source_files) == 1) # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late. if not delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() # expand any user home dir specifier dest = self._remote_expand_user(dest) # Keep original value for mode parameter mode_value = self._task.args.get('mode', None) diffs = [] for source_full, source_rel in source_files: # If the local file does not exist, get_real_file() raises AnsibleFileNotFound try: source_full = self._loader.get_real_file(source_full, decrypt=decrypt) except AnsibleFileNotFound as e: result['failed'] = True result['msg'] = "could not find src=%s, %s" % (source_full, e) self._remove_tmp_path(tmp) return result # Get the local mode and set if user wanted it preserved # https://github.com/ansible/ansible-modules-core/issues/1124 if self._task.args.get('mode', None) == 'preserve': lmode = '0%03o' % stat.S_IMODE(os.stat(source_full).st_mode) self._task.args['mode'] = lmode # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if self._connection._shell.path_has_trailing_slash(dest): dest_file = self._connection._shell.join_path(dest, source_rel) else: dest_file = self._connection._shell.join_path(dest) # Attempt to get remote file info dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp, checksum=force) if dest_status['exists'] and dest_status['isdir']: # The dest is a directory. if content is not None: # If source was defined as content remove the temporary file and fail out. self._remove_tempfile_if_content_defined( content, content_tempfile) self._remove_tmp_path(tmp) result['failed'] = True result['msg'] = "can not use content with a dir as dest" return result else: # Append the relative source location to the destination and get remote stats again dest_file = self._connection._shell.join_path( dest, source_rel) dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp, checksum=force) if dest_status['exists'] and not force: # remote_file exists so continue to next iteration. continue # Generate a hash of the local file. local_checksum = checksum(source_full) if local_checksum != dest_status['checksum']: # The checksums don't match and we will change or error out. changed = True # Create a tmp path if missing only if this is not recursive. # If this is recursive we already have a tmp path. if delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() if self._play_context.diff and not raw: diffs.append( self._get_diff_data(dest_file, source_full, task_vars)) if self._play_context.check_mode: self._remove_tempfile_if_content_defined( content, content_tempfile) changed = True module_return = dict(changed=True) continue # Define a remote directory that we will copy the file to. tmp_src = self._connection._shell.join_path(tmp, 'source') remote_path = None if not raw: remote_path = self._transfer_file(source_full, tmp_src) else: self._transfer_file(source_full, dest_file) # We have copied the file remotely and no longer require our content_tempfile self._remove_tempfile_if_content_defined( content, content_tempfile) self._loader.cleanup_tmp_file(source_full) # fix file permissions when the copy is done as a different user if remote_path: self._fixup_perms2((tmp, remote_path)) if raw: # Continue to next iteration if raw is defined. continue # Run the copy module # src and dest here come after original and override them # we pass dest only to make sure it includes trailing slash in case of recursive copy new_module_args = self._task.args.copy() new_module_args.update( dict( src=tmp_src, dest=dest, original_basename=source_rel, )) # remove action plugin only keys for key in ('content', 'decrypt'): if key in new_module_args: del new_module_args[key] module_return = self._execute_module( module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=delete_remote_tmp) module_executed = True else: # no need to transfer the file, already correct hash, but still need to call # the file module in case we want to change attributes self._remove_tempfile_if_content_defined( content, content_tempfile) self._loader.cleanup_tmp_file(source_full) if raw: # Continue to next iteration if raw is defined. self._remove_tmp_path(tmp) continue # Build temporary module_args. new_module_args = self._task.args.copy() new_module_args.update( dict(src=source_rel, dest=dest, original_basename=source_rel)) # Execute the file module. module_return = self._execute_module( module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=delete_remote_tmp) module_executed = True if not module_return.get('checksum'): module_return['checksum'] = local_checksum if module_return.get('failed'): result.update(module_return) if not delete_remote_tmp: self._remove_tmp_path(tmp) return result if module_return.get('changed'): changed = True # the file module returns the file path as 'path', but # the copy module uses 'dest', so add it if it's not there if 'path' in module_return and 'dest' not in module_return: module_return['dest'] = module_return['path'] # reset the mode self._task.args['mode'] = mode_value # Delete tmp path if we were recursive or if we did not execute a module. if not delete_remote_tmp or (delete_remote_tmp and not module_executed): self._remove_tmp_path(tmp) if module_executed and len(source_files) == 1: result.update(module_return) else: result.update(dict(dest=dest, src=source, changed=changed)) if diffs: result['diff'] = diffs return result
def run(self, tmp=None, task_vars=None): display.debug("version: " + __ibmi_module_version__) if task_vars is None: task_vars = dict() # _tmp_args is used for ibmi_sync module _tmp_args = self._task.args.copy() result = super().run(tmp, task_vars) del tmp # tmp no longer has any effect try: startd = datetime.datetime.now() result = dict(stderr="", stdout="", stdout_lines='', stderr_lines='', delta="", rc=255, failed=False, job_log=[]) if self._play_context.check_mode: result['skipped'] = True result[ 'stderr'] = 'check mode not (yet) supported for this module' return result src = self._task.args.get('src', None) type = self._task.args.get('type', None) if src is None: result['stderr'] = "src is required." elif type is None: result['stderr'] = "type is required." elif type not in ['CL', 'SQL']: result['stderr'] = "type must be 'CL' or 'SQL'." # validate src are strings FIXME: use basic.py and module specs elif not isinstance(src, string_types): result[ 'stderr'] = "Invalid type supplied for src option, it must be a string." if result.get('stderr'): result['failed'] = True return result re_raise = False # workaround to pass the raise-missing-from pylint issue inst = None try: src = self._loader.get_real_file( self._find_needle('files', src)) except AnsibleError as e: re_raise = True inst = e if re_raise: raise AnsibleActionFail(to_native(inst)) tmp_src = self._connection._shell.join_path( self._connection._shell.tmpdir, os.path.basename(src)) display.debug( "ibm i debug: transfer script file {p_src} to {p_tmp_src}". format(p_src=src, p_tmp_src=tmp_src)) self._transfer_file(src, tmp_src) local_checksum = checksum(src) if not self._connection.become: remote_checksum = self._remote_checksum(tmp_src, all_vars=task_vars, follow=True) if remote_checksum in ('1', '2', None): result[ 'stderr'] += "The permissions are lacking or privilege is needed. remote_checksum:{p_remote_checksum}".format( p_remote_checksum=remote_checksum) elif local_checksum != remote_checksum: result[ 'stderr'] += "local_checksum doesn't match remote_checksum." result['failed'] = True return result _tmp_args['src'] = tmp_src result.update( self._execute_module('ibmi_script_execute', module_args=_tmp_args)) endd = datetime.datetime.now() delta = endd - startd if result['rc']: result['failed'] = True result.update( dict(stderr=(u"Failed to execute script file {p_src}.". format(p_src=src)) + result['stderr'], delta=str(delta))) else: result.update( dict(stdout=u"Successfully execute script file {p_src}.". format(p_src=src), delta=str(delta))) except Exception as e: result['stderr'] += "{p_to_text}".format(p_to_text=to_text(e)) result['failed'] = True return result finally: self._remove_tmp_path(self._connection._shell.tmpdir) return result