def main(): module = AssibleModule( # not checking because of daisy chain to file module argument_spec=dict( src=dict(type='path', required=True), delimiter=dict(type='str'), dest=dict(type='path', required=True), backup=dict(type='bool', default=False), remote_src=dict(type='bool', default=True), regexp=dict(type='str'), ignore_hidden=dict(type='bool', default=False), validate=dict(type='str'), ), add_file_common_args=True, ) changed = False path_hash = None dest_hash = None src = module.params['src'] dest = module.params['dest'] backup = module.params['backup'] delimiter = module.params['delimiter'] regexp = module.params['regexp'] compiled_regexp = None ignore_hidden = module.params['ignore_hidden'] validate = module.params.get('validate', None) result = dict(src=src, dest=dest) if not os.path.exists(src): module.fail_json(msg="Source (%s) does not exist" % src) if not os.path.isdir(src): module.fail_json(msg="Source (%s) is not a directory" % src) if regexp is not None: try: compiled_regexp = re.compile(regexp) except re.error as e: module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" % (to_native(e), regexp)) if validate and "%s" not in validate: module.fail_json(msg="validate must contain %%s: %s" % validate) path = assemble_from_fragments(src, delimiter, compiled_regexp, ignore_hidden, module.tmpdir) path_hash = module.sha1(path) result['checksum'] = path_hash # Backwards compat. This won't return data if FIPS mode is active try: pathmd5 = module.md5(path) except ValueError: pathmd5 = None result['md5sum'] = pathmd5 if os.path.exists(dest): dest_hash = module.sha1(dest) if path_hash != dest_hash: if validate: (rc, out, err) = module.run_command(validate % path) result['validation'] = dict(rc=rc, stdout=out, stderr=err) if rc != 0: cleanup(path) module.fail_json(msg="failed to validate: rc:%s error:%s" % (rc, err)) if backup and dest_hash is not None: result['backup_file'] = module.backup_local(dest) module.atomic_move(path, dest, unsafe_writes=module.params['unsafe_writes']) changed = True cleanup(path, result) # handle file permissions file_args = module.load_file_common_arguments(module.params) result['changed'] = module.set_fs_attributes_if_different( file_args, changed) # Mission complete result['msg'] = "OK" module.exit_json(**result)
def main(): argument_spec = url_argument_spec() # setup aliases argument_spec['url_username']['aliases'] = ['username'] argument_spec['url_password']['aliases'] = ['password'] argument_spec.update( url=dict(type='str', required=True), dest=dict(type='path', required=True), backup=dict(type='bool'), sha256sum=dict(type='str', default=''), checksum=dict(type='str', default=''), timeout=dict(type='int', default=10), headers=dict(type='dict'), tmp_dest=dict(type='path'), ) module = AssibleModule( # not checking because of daisy chain to file module argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, mutually_exclusive=[['checksum', 'sha256sum']], ) if module.params.get('thirsty'): module.deprecate( 'The alias "thirsty" has been deprecated and will be removed, use "force" instead', version='2.13', collection_name='assible.builtin') if module.params.get('sha256sum'): module.deprecate( 'The parameter "sha256sum" has been deprecated and will be removed, use "checksum" instead', version='2.14', collection_name='assible.builtin') url = module.params['url'] dest = module.params['dest'] backup = module.params['backup'] force = module.params['force'] sha256sum = module.params['sha256sum'] checksum = module.params['checksum'] use_proxy = module.params['use_proxy'] timeout = module.params['timeout'] headers = module.params['headers'] tmp_dest = module.params['tmp_dest'] result = dict( changed=False, checksum_dest=None, checksum_src=None, dest=dest, elapsed=0, url=url, ) dest_is_dir = os.path.isdir(dest) last_mod_time = None # workaround for usage of deprecated sha256sum parameter if sha256sum: checksum = 'sha256:%s' % (sha256sum) # checksum specified, parse for algorithm and checksum if checksum: try: algorithm, checksum = checksum.split(':', 1) except ValueError: module.fail_json( msg= "The checksum parameter has to be in format <algorithm>:<checksum>", **result) if is_url(checksum): checksum_url = checksum # download checksum file to checksum_tmpsrc checksum_tmpsrc, checksum_info = url_get(module, checksum_url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest) with open(checksum_tmpsrc) as f: lines = [line.rstrip('\n') for line in f] os.remove(checksum_tmpsrc) checksum_map = [] for line in lines: parts = line.split(None, 1) if len(parts) == 2: checksum_map.append((parts[0], parts[1])) filename = url_filename(url) # Look through each line in the checksum file for a hash corresponding to # the filename in the url, returning the first hash that is found. for cksum in (s for (s, f) in checksum_map if f.strip('./') == filename): checksum = cksum break else: checksum = None if checksum is None: module.fail_json( msg="Unable to find a checksum for file '%s' in '%s'" % (filename, checksum_url)) # Remove any non-alphanumeric characters, including the infamous # Unicode zero-width space checksum = re.sub(r'\W+', '', checksum).lower() # Ensure the checksum portion is a hexdigest try: int(checksum, 16) except ValueError: module.fail_json(msg='The checksum format is invalid', **result) if not dest_is_dir and os.path.exists(dest): checksum_mismatch = False # If the download is not forced and there is a checksum, allow # checksum match to skip the download. if not force and checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum != destination_checksum: checksum_mismatch = True # Not forcing redownload, unless checksum does not match if not force and checksum and not checksum_mismatch: # Not forcing redownload, unless checksum does not match # allow file attribute changes file_args = module.load_file_common_arguments(module.params, path=dest) result['changed'] = module.set_fs_attributes_if_different( file_args, False) if result['changed']: module.exit_json( msg="file already exists but file attributes changed", **result) module.exit_json(msg="file already exists", **result) # If the file already exists, prepare the last modified time for the # request. mtime = os.path.getmtime(dest) last_mod_time = datetime.datetime.utcfromtimestamp(mtime) # If the checksum does not match we have to force the download # because last_mod_time may be newer than on remote if checksum_mismatch: force = True # download to tmpsrc start = datetime.datetime.utcnow() tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest) result['elapsed'] = (datetime.datetime.utcnow() - start).seconds result['src'] = tmpsrc # Now the request has completed, we can finally generate the final # destination file name from the info dict. if dest_is_dir: filename = extract_filename_from_headers(info) if not filename: # Fall back to extracting the filename from the URL. # Pluck the URL from the info, since a redirect could have changed # it. filename = url_filename(info['url']) dest = os.path.join(dest, filename) result['dest'] = dest # raise an error if there is no tmpsrc file if not os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], **result) if not os.access(tmpsrc, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Source %s is not readable" % (tmpsrc), **result) result['checksum_src'] = module.sha1(tmpsrc) # check if there is no dest file if os.path.exists(dest): # raise an error if copy has no permission on dest if not os.access(dest, os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (dest), **result) if not os.access(dest, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not readable" % (dest), **result) result['checksum_dest'] = module.sha1(dest) else: if not os.path.exists(os.path.dirname(dest)): os.remove(tmpsrc) module.fail_json(msg="Destination %s does not exist" % (os.path.dirname(dest)), **result) if not os.access(os.path.dirname(dest), os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (os.path.dirname(dest)), **result) if module.check_mode: if os.path.exists(tmpsrc): os.remove(tmpsrc) result['changed'] = ('checksum_dest' not in result or result['checksum_src'] != result['checksum_dest']) module.exit_json(msg=info.get('msg', ''), **result) backup_file = None if result['checksum_src'] != result['checksum_dest']: try: if backup: if os.path.exists(dest): backup_file = module.backup_local(dest) module.atomic_move(tmpsrc, dest) except Exception as e: if os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)), exception=traceback.format_exc(), **result) result['changed'] = True else: result['changed'] = False if os.path.exists(tmpsrc): os.remove(tmpsrc) if checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum != destination_checksum: os.remove(dest) module.fail_json( msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum), **result) # allow file attribute changes file_args = module.load_file_common_arguments(module.params, path=dest) result['changed'] = module.set_fs_attributes_if_different( file_args, result['changed']) # Backwards compat only. We'll return None on FIPS enabled systems try: result['md5sum'] = module.md5(dest) except ValueError: result['md5sum'] = None if backup_file: result['backup_file'] = backup_file # Mission complete module.exit_json(msg=info.get('msg', ''), status_code=info.get('status', ''), **result)
def main(): module = AssibleModule( argument_spec=dict( paths=dict(type='list', required=True, aliases=['name', 'path'], elements='str'), patterns=dict(type='list', default=['*'], aliases=['pattern'], elements='str'), excludes=dict(type='list', aliases=['exclude'], elements='str'), contains=dict(type='str'), read_whole_file=dict(type='bool', default=False), file_type=dict(type='str', default="file", choices=['any', 'directory', 'file', 'link']), age=dict(type='str'), age_stamp=dict(type='str', default="mtime", choices=['atime', 'ctime', 'mtime']), size=dict(type='str'), recurse=dict(type='bool', default=False), hidden=dict(type='bool', default=False), follow=dict(type='bool', default=False), get_checksum=dict(type='bool', default=False), use_regex=dict(type='bool', default=False), depth=dict(type='int'), ), supports_check_mode=True, ) params = module.params filelist = [] if params['age'] is None: age = None else: # convert age to seconds: m = re.match(r"^(-?\d+)(s|m|h|d|w)?$", params['age'].lower()) seconds_per_unit = { "s": 1, "m": 60, "h": 3600, "d": 86400, "w": 604800 } if m: age = int(m.group(1)) * seconds_per_unit.get(m.group(2), 1) else: module.fail_json(age=params['age'], msg="failed to process age") if params['size'] is None: size = None else: # convert size to bytes: m = re.match(r"^(-?\d+)(b|k|m|g|t)?$", params['size'].lower()) bytes_per_unit = { "b": 1, "k": 1024, "m": 1024**2, "g": 1024**3, "t": 1024**4 } if m: size = int(m.group(1)) * bytes_per_unit.get(m.group(2), 1) else: module.fail_json(size=params['size'], msg="failed to process size") now = time.time() msg = '' looked = 0 for npath in params['paths']: npath = os.path.expanduser(os.path.expandvars(npath)) if os.path.isdir(npath): for root, dirs, files in os.walk(npath, followlinks=params['follow']): looked = looked + len(files) + len(dirs) for fsobj in (files + dirs): fsname = os.path.normpath(os.path.join(root, fsobj)) if params['depth']: wpath = npath.rstrip(os.path.sep) + os.path.sep depth = int(fsname.count(os.path.sep)) - int( wpath.count(os.path.sep)) + 1 if depth > params['depth']: continue if os.path.basename(fsname).startswith( '.') and not params['hidden']: continue try: st = os.lstat(fsname) except Exception: msg += "%s was skipped as it does not seem to be a valid file or it cannot be accessed\n" % fsname continue r = {'path': fsname} if params['file_type'] == 'any': if pfilter(fsobj, params['patterns'], params['excludes'], params['use_regex']) and agefilter( st, now, age, params['age_stamp']): r.update(statinfo(st)) if stat.S_ISREG( st.st_mode) and params['get_checksum']: r['checksum'] = module.sha1(fsname) filelist.append(r) elif stat.S_ISDIR( st.st_mode) and params['file_type'] == 'directory': if pfilter(fsobj, params['patterns'], params['excludes'], params['use_regex']) and agefilter( st, now, age, params['age_stamp']): r.update(statinfo(st)) filelist.append(r) elif stat.S_ISREG( st.st_mode) and params['file_type'] == 'file': if pfilter(fsobj, params['patterns'], params['excludes'], params['use_regex']) and \ agefilter(st, now, age, params['age_stamp']) and \ sizefilter(st, size) and contentfilter(fsname, params['contains'], params['read_whole_file']): r.update(statinfo(st)) if params['get_checksum']: r['checksum'] = module.sha1(fsname) filelist.append(r) elif stat.S_ISLNK( st.st_mode) and params['file_type'] == 'link': if pfilter(fsobj, params['patterns'], params['excludes'], params['use_regex']) and agefilter( st, now, age, params['age_stamp']): r.update(statinfo(st)) filelist.append(r) if not params['recurse']: break else: msg += "%s was skipped as it does not seem to be a valid directory or it cannot be accessed\n" % npath matched = len(filelist) module.exit_json(files=filelist, changed=False, msg=msg, matched=matched, examined=looked)
def main(): global module module = AssibleModule( # not checking because of daisy chain to file module argument_spec=dict( src=dict(type='path'), _original_basename=dict( type='str' ), # used to handle 'dest is a directory' via template, a slight hack content=dict(type='str', no_log=True), dest=dict(type='path', required=True), backup=dict(type='bool', default=False), force=dict(type='bool', default=True, aliases=['thirsty']), validate=dict(type='str'), directory_mode=dict(type='raw'), remote_src=dict(type='bool'), local_follow=dict(type='bool'), checksum=dict(type='str'), follow=dict(type='bool', default=False), ), add_file_common_args=True, supports_check_mode=True, ) if module.params.get('thirsty'): module.deprecate( 'The alias "thirsty" has been deprecated and will be removed, use "force" instead', version='2.13', collection_name='assible.builtin') src = module.params['src'] b_src = to_bytes(src, errors='surrogate_or_strict') dest = module.params['dest'] # Make sure we always have a directory component for later processing if os.path.sep not in dest: dest = '.{0}{1}'.format(os.path.sep, dest) b_dest = to_bytes(dest, errors='surrogate_or_strict') backup = module.params['backup'] force = module.params['force'] _original_basename = module.params.get('_original_basename', None) validate = module.params.get('validate', None) follow = module.params['follow'] local_follow = module.params['local_follow'] mode = module.params['mode'] owner = module.params['owner'] group = module.params['group'] remote_src = module.params['remote_src'] checksum = module.params['checksum'] if not os.path.exists(b_src): module.fail_json(msg="Source %s not found" % (src)) if not os.access(b_src, os.R_OK): module.fail_json(msg="Source %s not readable" % (src)) # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the # remote host if module.params['mode'] == 'preserve': module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode) mode = module.params['mode'] checksum_dest = None if os.path.isfile(src): checksum_src = module.sha1(src) else: checksum_src = None # Backwards compat only. This will be None in FIPS mode try: if os.path.isfile(src): md5sum_src = module.md5(src) else: md5sum_src = None except ValueError: md5sum_src = None changed = False if checksum and checksum_src != checksum: module.fail_json( msg= 'Copied file does not match the expected checksum. Transfer failed.', checksum=checksum_src, expected_checksum=checksum) # Special handling for recursive copy - create intermediate dirs if dest.endswith(os.sep): if _original_basename: dest = os.path.join(dest, _original_basename) b_dest = to_bytes(dest, errors='surrogate_or_strict') dirname = os.path.dirname(dest) b_dirname = to_bytes(dirname, errors='surrogate_or_strict') if not os.path.exists(b_dirname): try: (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname) except AssibleModuleError as e: e.result['msg'] += ' Could not copy to {0}'.format(dest) module.fail_json(**e.results) os.makedirs(b_dirname) directory_args = module.load_file_common_arguments(module.params) directory_mode = module.params["directory_mode"] if directory_mode is not None: directory_args['mode'] = directory_mode else: directory_args['mode'] = None adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed) if os.path.isdir(b_dest): basename = os.path.basename(src) if _original_basename: basename = _original_basename dest = os.path.join(dest, basename) b_dest = to_bytes(dest, errors='surrogate_or_strict') if os.path.exists(b_dest): if os.path.islink(b_dest) and follow: b_dest = os.path.realpath(b_dest) dest = to_native(b_dest, errors='surrogate_or_strict') if not force: module.exit_json(msg="file already exists", src=src, dest=dest, changed=False) if os.access(b_dest, os.R_OK) and os.path.isfile(b_dest): checksum_dest = module.sha1(dest) else: if not os.path.exists(os.path.dirname(b_dest)): try: # os.path.exists() can return false in some # circumstances where the directory does not have # the execute bit for the current user set, in # which case the stat() call will raise an OSError os.stat(os.path.dirname(b_dest)) except OSError as e: if "permission denied" in to_native(e).lower(): module.fail_json( msg="Destination directory %s is not accessible" % (os.path.dirname(dest))) module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest))) if not os.access(os.path.dirname(b_dest), os.W_OK) and not module.params['unsafe_writes']: module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest))) backup_file = None if checksum_src != checksum_dest or os.path.islink(b_dest): if not module.check_mode: try: if backup: if os.path.exists(b_dest): backup_file = module.backup_local(dest) # allow for conversion from symlink. if os.path.islink(b_dest): os.unlink(b_dest) open(b_dest, 'w').close() if validate: # if we have a mode, make sure we set it on the temporary # file source as some validations may require it if mode is not None: module.set_mode_if_different(src, mode, False) if owner is not None: module.set_owner_if_different(src, owner, False) if group is not None: module.set_group_if_different(src, group, False) if "%s" not in validate: module.fail_json(msg="validate must contain %%s: %s" % (validate)) (rc, out, err) = module.run_command(validate % src) if rc != 0: module.fail_json(msg="failed to validate", exit_status=rc, stdout=out, stderr=err) b_mysrc = b_src if remote_src and os.path.isfile(b_src): _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest)) shutil.copyfile(b_src, b_mysrc) try: shutil.copystat(b_src, b_mysrc) except OSError as err: if err.errno == errno.ENOSYS and mode == "preserve": module.warn("Unable to copy stats {0}".format( to_native(b_src))) else: raise # might be needed below if PY3 and hasattr(os, 'listxattr'): try: src_has_acls = 'system.posix_acl_access' in os.listxattr( src) except Exception as e: # assume unwanted ACLs by default src_has_acls = True module.atomic_move( b_mysrc, dest, unsafe_writes=module.params['unsafe_writes']) if PY3 and hasattr(os, 'listxattr') and platform.system( ) == 'Linux' and not remote_src: # atomic_move used above to copy src into dest might, in some cases, # use shutil.copy2 which in turn uses shutil.copystat. # Since Python 3.3, shutil.copystat copies file extended attributes: # https://docs.python.org/3/library/shutil.html#shutil.copystat # os.listxattr (along with others) was added to handle the operation. # This means that on Python 3 we are copying the extended attributes which includes # the ACLs on some systems - further limited to Linux as the documentation above claims # that the extended attributes are copied only on Linux. Also, os.listxattr is only # available on Linux. # If not remote_src, then the file was copied from the controller. In that # case, any filesystem ACLs are artifacts of the copy rather than preservation # of existing attributes. Get rid of them: if src_has_acls: # FIXME If dest has any default ACLs, there are not applied to src now because # they were overridden by copystat. Should/can we do anything about this? # 'system.posix_acl_default' in os.listxattr(os.path.dirname(b_dest)) try: clear_facls(dest) except ValueError as e: if 'setfacl' in to_native(e): # No setfacl so we're okay. The controller couldn't have set a facl # without the setfacl command pass else: raise except RuntimeError as e: # setfacl failed. if 'Operation not supported' in to_native(e): # The file system does not support ACLs. pass else: raise except (IOError, OSError): module.fail_json(msg="failed to copy: %s to %s" % (src, dest), traceback=traceback.format_exc()) changed = True else: changed = False # If neither have checksums, both src and dest are directories. if checksum_src is None and checksum_dest is None: if remote_src and os.path.isdir(module.params['src']): b_src = to_bytes(module.params['src'], errors='surrogate_or_strict') b_dest = to_bytes(module.params['dest'], errors='surrogate_or_strict') if src.endswith(os.path.sep) and os.path.isdir( module.params['dest']): diff_files_changed = copy_diff_files(b_src, b_dest, module) left_only_changed = copy_left_only(b_src, b_dest, module) common_dirs_changed = copy_common_dirs(b_src, b_dest, module) owner_group_changed = chown_recursive(b_dest, module) if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed: changed = True if src.endswith( os.path.sep) and not os.path.exists(module.params['dest']): b_basename = to_bytes(os.path.basename(src), errors='surrogate_or_strict') b_dest = to_bytes(os.path.join(b_dest, b_basename), errors='surrogate_or_strict') b_src = to_bytes(os.path.join(module.params['src'], ""), errors='surrogate_or_strict') if not module.check_mode: shutil.copytree(b_src, b_dest, symlinks=not (local_follow)) chown_recursive(dest, module) changed = True if not src.endswith(os.path.sep) and os.path.isdir( module.params['dest']): b_basename = to_bytes(os.path.basename(src), errors='surrogate_or_strict') b_dest = to_bytes(os.path.join(b_dest, b_basename), errors='surrogate_or_strict') b_src = to_bytes(os.path.join(module.params['src'], ""), errors='surrogate_or_strict') if not module.check_mode and not os.path.exists(b_dest): shutil.copytree(b_src, b_dest, symlinks=not (local_follow)) changed = True chown_recursive(dest, module) if module.check_mode and not os.path.exists(b_dest): changed = True if os.path.exists(b_dest): diff_files_changed = copy_diff_files(b_src, b_dest, module) left_only_changed = copy_left_only(b_src, b_dest, module) common_dirs_changed = copy_common_dirs( b_src, b_dest, module) owner_group_changed = chown_recursive(b_dest, module) if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed: changed = True if not src.endswith(os.path.sep) and not os.path.exists( module.params['dest']): b_basename = to_bytes(os.path.basename(module.params['src']), errors='surrogate_or_strict') b_dest = to_bytes(os.path.join(b_dest, b_basename), errors='surrogate_or_strict') if not module.check_mode and not os.path.exists(b_dest): os.makedirs(b_dest) b_src = to_bytes(os.path.join(module.params['src'], ""), errors='surrogate_or_strict') diff_files_changed = copy_diff_files(b_src, b_dest, module) left_only_changed = copy_left_only(b_src, b_dest, module) common_dirs_changed = copy_common_dirs( b_src, b_dest, module) owner_group_changed = chown_recursive(b_dest, module) if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed: changed = True if module.check_mode and not os.path.exists(b_dest): changed = True res_args = dict(dest=dest, src=src, md5sum=md5sum_src, checksum=checksum_src, changed=changed) if backup_file: res_args['backup_file'] = backup_file if not module.check_mode: file_args = module.load_file_common_arguments(module.params, path=dest) res_args['changed'] = module.set_fs_attributes_if_different( file_args, res_args['changed']) module.exit_json(**res_args)