def makeInstaller(): print '---- Making installer...' quiterssFileDataPath = quiterssFileRepoPath + '\\installer\\Data' if (os.path.exists(quiterssFileDataPath)): print "Path ...\\installer\\Data exists. Remove it" shutil.rmtree(quiterssFileDataPath) print 'Copying files...' shutil.copytree(prepareBinPath, quiterssFileDataPath) shutil.copystat(prepareBinPath, quiterssFileDataPath) print 'Run Inno Setup compiler...' cmdLine = [innoSetupCompilerPath, '/cc', quiterssFileRepoPath + '\\installer\\quiterss.iss'] print 'subprocess.call(' + str(cmdLine) + ')' call(cmdLine) print 'Copying installer...' shutil.copy2(quiterssFileRepoPath + '\\installer\\Setup\\QuiteRSS-' + strProductVer + '-Setup.exe', packagesPath) print 'Cleanup installer files...' shutil.rmtree(quiterssFileRepoPath + '\\installer\\Data') shutil.rmtree(quiterssFileRepoPath + '\\installer\\Setup') print 'Done'
def copytree(src, dst): """Similar to shutil.copytree, but always copies symlinks and doesn't error out if the destination path already exists. """ # If the source tree is a symlink, duplicate the link and we're done. if os.path.islink(src): os.symlink(os.readlink(src), dst) return try: os.mkdir(dst) except OSError as e: if e.errno != errno.EEXIST: raise names = os.listdir(src) for name in names: srcname = os.path.join(src, name) dstname = os.path.join(dst, name) if os.path.islink(srcname): os.symlink(os.readlink(srcname), dstname) elif os.path.isdir(srcname): copytree(srcname, dstname) else: copy2(srcname, dstname) try: shutil.copystat(src, dst) except OSError as e: if e.errno != errno.EPERM: raise
def main(): # Make a copy or duplicate of an existing file if path.exists("textfile.txt"): # Get the path of the file in the current directory src = path.realpath("textfile.txt") # Separate the path part from the filename head, tail = path.split(src) print "path: " + head print "file: " + tail # Now, make a backup file dst = src + ".bak" # Then use the shell to make a copy of the file shutil.copy(src,dst) # If you want to copy over perms, modification times, and other data shutil.copystat(src,dst) # Rename the original file os.rename("textfile.txt", "newfile.txt") # Put things intp a ZIP archive root_dir,tail = path.split(src) shutil.make_archive("archive", "zip", root_dir) # More control over ZIP files with ZipFile("testzip.zip", "w") as newzip: newzip.write("newfile.txt") newzip.write("textfile.txt.bak")
def copytree(src, dst, symlinks=False, ignore=None): """ copytree that works even if folder already exists """ # http://stackoverflow.com/questions/1868714/how-do-i-copy-an-entire-directory-of-files-into-an-existing-directory-using-pyth if not os.path.exists(dst): os.makedirs(dst) shutil.copystat(src, dst) lst = os.listdir(src) if ignore: excl = ignore(src, lst) lst = [x for x in lst if x not in excl] for item in lst: s = os.path.join(src, item) d = os.path.join(dst, item) if symlinks and os.path.islink(s): if os.path.lexists(d): os.remove(d) os.symlink(os.readlink(s), d) try: st = os.lstat(s) mode = stat.S_IMODE(st.st_mode) os.lchmod(d, mode) except: pass # lchmod not available elif os.path.isdir(s): copytree(s, d, symlinks, ignore) else: shutil.copy2(s, d)
def overwriteCopy(src, dest, symlinks=False, ignore=None): if not os.path.exists(dest): os.makedirs(dest) shutil.copystat(src, dest) sub_list = os.listdir(src) if ignore: excl = ignore(src, sub_list) sub_list = [x for x in sub_list if x not in excl] for i_sub in sub_list: s_path = os.path.join(src, i_sub) d_path = os.path.join(dest, i_sub) if symlinks and os.path.islink(s_path): if os.path.lexists(d_path): os.remove(d_path) os.symlink(os.readlink(s_path), d_path) try: s_path_s = os.lstat(s_path) s_path_mode = stat.S_IMODE(s_path_s.st_mode) os.lchmod(d_path, s_path_mode) except Exception: pass elif os.path.isdir(s_path): overwriteCopy(s_path, d_path, symlinks, ignore) else: shutil.copy2(s_path, d_path)
def unpack_directory(filename, extract_dir, progress_filter=default_filter): """"Unpack" a directory, using the same interface as for archives Raises ``UnrecognizedFormat`` if `filename` is not a directory """ if not os.path.isdir(filename): raise UnrecognizedFormat("%s is not a directory" % filename) paths = { filename: ('', extract_dir), } for base, dirs, files in os.walk(filename): src, dst = paths[base] for d in dirs: paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) for f in files: target = os.path.join(dst, f) target = progress_filter(src + f, target) if not target: # skip non-files continue ensure_directory(target) f = os.path.join(base, f) shutil.copyfile(f, target) shutil.copystat(f, target)
def backup(self, datasetType, dataId): """Rename any existing object with the given type and dataId. The CameraMapper implementation saves objects in a sequence of e.g.: foo.fits foo.fits~1 foo.fits~2 All of the backups will be placed in the output repo, however, and will not be removed if they are found elsewhere in the _parent chain. This means that the same file will be stored twice if the previous version was found in an input repo. """ n = 0 suffix = "" newLocation = self.map(datasetType, dataId, write=True) newPath = newLocation.getLocations()[0] path = self._parentSearch(newPath) oldPaths = [] while path is not None: n += 1 oldPaths.append((n, path)) path = self._parentSearch("%s~%d" % (newPath, n)) for n, oldPath in reversed(oldPaths): newDir, newFile = os.path.split(newPath) if not os.path.exists(newDir): os.makedirs(newDir) shutil.copy(oldPath, "%s~%d" % (newPath, n)) shutil.copystat(oldPath, "%s~%d" % (newPath, n))
def copytree(src, dst, symlinks=False, ignore=None): """Copies a tree and overwrites.""" if not os.path.exists(dst): os.makedirs(dst) shutil.copystat(src, dst) lst = os.listdir(src) if ignore: excl = ignore(src, lst) lst = [x for x in lst if x not in excl] for item in lst: s = os.path.join(src, item) d = os.path.join(dst, item) if symlinks and os.path.islink(s): if os.path.lexists(d): os.remove(d) os.symlink(os.readlink(s), d) try: st = os.lstat(s) mode = stat.S_IMODE(st.st_mode) os.lchmod(d, mode) except Exception: pass # lchmod not available elif os.path.isdir(s): copytree(s, d, symlinks, ignore) else: shutil.copy2(s, d)
def assemble(self): if _check_path_overlap(self.name) and os.path.isdir(self.name): _rmtree(self.name) logger.info("Building COLLECT %s", self.tocbasename) os.makedirs(self.name) toc = add_suffix_to_extensions(self.toc) for inm, fnm, typ in toc: if not os.path.exists(fnm) or not os.path.isfile(fnm) and is_path_to_egg(fnm): # file is contained within python egg, it is added with the egg continue if os.pardir in os.path.normpath(inm) or os.path.isabs(inm): raise SystemExit('Security-Alert: try to store file outside ' 'of dist-directory. Aborting. %r' % inm) tofnm = os.path.join(self.name, inm) todir = os.path.dirname(tofnm) if not os.path.exists(todir): os.makedirs(todir) if typ in ('EXTENSION', 'BINARY'): fnm = checkCache(fnm, strip=self.strip_binaries, upx=(self.upx_binaries and (is_win or is_cygwin)), dist_nm=inm) if typ != 'DEPENDENCY': shutil.copy(fnm, tofnm) try: shutil.copystat(fnm, tofnm) except OSError: logger.warn("failed to copy flags of %s", fnm) if typ in ('EXTENSION', 'BINARY'): os.chmod(tofnm, 0o755)
def copyfile(self, src, dst): if dst.endswith(".in"): dst = dst[:-3] text = open(src, "rU").read() # perform replacements for var, string in self.replacements: text = text.replace(var, string) # If the file exists, keep the old file. This is a # hopefully temporary hack to get around distutils # stripping the permissions on the server skeletin files. # We reuse the original default files, which have the # right permissions. old = os.path.exists(dst) if old: f = open(dst, "r+") f.truncate(0) else: f = open(dst, "w") f.write(text) f.close() if not old: shutil.copymode(src, dst) shutil.copystat(src, dst) else: shutil.copy2(src, dst)
def sed_inplace(filename, pattern, repl): """Perform the pure-Python equivalent of in-place `sed` substitution: e.g., `sed -i -e 's/'${pattern}'/'${repl}' "${filename}"`. Examples -------- sed_inplace('/etc/apt/sources.list', r'^\# deb', 'deb') """ # For efficiency, precompile the passed regular expression. pattern_compiled = re.compile(pattern) # For portability, NamedTemporaryFile() defaults to mode "w+b" (i.e., binary # writing with updating). This is usually a good thing. In this case, # however, binary writing imposes non-trivial encoding constraints trivially # resolved by switching to text writing. Let's do that. with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_file: with open(filename) as src_file: for line in src_file: tmp_file.write(pattern_compiled.sub(repl, line)) # Overwrite the original file with the munged temporary file in a # manner preserving file attributes (e.g., permissions). shutil.copystat(filename, tmp_file.name) shutil.move(tmp_file.name, filename)
def make_fake_special_file(f_type, f_path, src_dir, dst_dir): tgt_path = os.path.join(dst_dir, f_path + '.' + f_type) tgt_dir = os.path.dirname(tgt_path) if not os.path.exists(tgt_dir): os.makedirs(tgt_dir) open(tgt_path, 'w') # make zero byte file, immediately closed shutil.copystat(os.path.join(src_dir, f_path), tgt_path)
def copyDigestedFile(src, dst, copystat=1): """ Copy data from `src` to `dst`, adding a fingerprint to `dst`. If `copystat` is true, the file status is copied, too (like shutil.copy2). """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) dummy, ext = os.path.splitext(src) if ext not in digested_file_types: if copystat: return shutil.copy2(src, dst) else: return shutil.copyfile(src, dst) fsrc = None fdst = None try: fsrc = open(src, 'r') fdst = DigestFile(dst) shutil.copyfileobj(fsrc, fdst) finally: if fdst: fdst.close() if fsrc: fsrc.close() if copystat: shutil.copystat(src, dst)
def __copydir(self, src, dst, passes=None, fails=None, dry_run=False): """ Make the given dst directory and copy stats from src Append dst to ``fails`` on error """ if self.progressfnc: self.progressfnc('Copying to {0}'.format(dst), self.__getProgPercent()) try: if not dry_run: os.mkdir(dst) except Exception as e: LOG.error(e) if fails is not None: fails.append(dst) else: if os.path.exists(dst) and self.runstngs['forceOwnership']: # make writable fileAtt = os.stat(dst)[0] if (not fileAtt & stat.S_IWRITE): try: os.chmod(dst, stat.S_IWRITE) except Exception as e: LOG.error('could not make file writable {0}: {1}'.format(dst, e)) return False shutil.copystat(src, dst) if passes is not None: passes.append(dst) LOG.debug('Created Directory: {0}'.format(dst))
def _createType(self, meta_name, root, movie_info, group, file_type, i): # Get file path camelcase_method = underscoreToCamel(file_type.capitalize()) name = getattr(self, 'get' + camelcase_method + 'Name')(meta_name, root, i) if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None): # Get file content content = getattr(self, 'get' + camelcase_method)(movie_info = movie_info, data = group, i = i) if content: log.debug('Creating %s file: %s', (file_type, name)) if os.path.isfile(content): content = sp(content) name = sp(name) if not os.path.exists(os.path.dirname(name)): os.makedirs(os.path.dirname(name)) shutil.copy2(content, name) shutil.copyfile(content, name) # Try and copy stats seperately try: shutil.copystat(content, name) except: pass else: self.createFile(name, content) group['renamed_files'].append(name) try: os.chmod(sp(name), Env.getPermission('file')) except: log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc()))
def copy_dir(src, dst, *, follow_sym=True): if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) if os.path.isdir(src): shutil.copyfile(src, dst, follow_symlinks=follow_sym) shutil.copystat(src, dst, follow_symlinks=follow_sym) return dst
def do_copyfile(self, from_file, to_file): outdir = os.path.split(to_file)[0] if not os.path.isfile(from_file) and not os.path.islink(from_file): raise RuntimeError('Tried to install something that isn\'t a file:' '{!r}'.format(from_file)) # copyfile fails if the target file already exists, so remove it to # allow overwriting a previous install. If the target is not a file, we # want to give a readable error. if os.path.exists(to_file): if not os.path.isfile(to_file): raise RuntimeError('Destination {!r} already exists and is not ' 'a file'.format(to_file)) if self.should_preserve_existing_file(from_file, to_file): append_to_log(self.lf, '# Preserving old file %s\n' % to_file) print('Preserving existing file %s.' % to_file) return False os.remove(to_file) print('Installing %s to %s' % (from_file, outdir)) if os.path.islink(from_file): if not os.path.exists(from_file): # Dangling symlink. Replicate as is. shutil.copy(from_file, outdir, follow_symlinks=False) else: # Remove this entire branch when changing the behaviour to duplicate # symlinks rather than copying what they point to. print(symlink_warning) shutil.copyfile(from_file, to_file) shutil.copystat(from_file, to_file) else: shutil.copyfile(from_file, to_file) shutil.copystat(from_file, to_file) selinux_updates.append(to_file) append_to_log(self.lf, to_file) return True
def copytree(src, dst, symlinks=False, ignore=None): """ This is a contributed re-implementation of 'copytree' that should work with the exact same behavior on multiple platforms. """ if not os.path.exists(dst): os.makedirs(dst) shutil.copystat(src, dst) lst = os.listdir(src) if ignore: excl = ignore(src, lst) lst = [x for x in lst if x not in excl] for item in lst: s = os.path.join(src, item) d = os.path.join(dst, item) if symlinks and os.path.islink(s): if os.path.lexists(d): os.remove(d) os.symlink(os.readlink(s), d) try: st = os.lstat(s) mode = stat.S_IMODE(st.st_mode) os.lchmod(d, mode) except: pass # lchmod not available elif os.path.isdir(s): copytree(s, d, symlinks, ignore) else: shutil.copy2(s, d)
def __place_template_folder(group, src, dst, gbp=False): template_files = pkg_resources.resource_listdir(group, src) # For each template, place for template_file in template_files: template_path = os.path.join(src, template_file) template_dst = os.path.join(dst, template_file) if pkg_resources.resource_isdir(group, template_path): debug("Recursing on folder '{0}'".format(template_path)) __place_template_folder(group, template_path, template_dst, gbp) else: try: debug("Placing template '{0}'".format(template_path)) template = pkg_resources.resource_string(group, template_path) template_abs_path = pkg_resources.resource_filename(group, template_path) except IOError as err: error("Failed to load template " "'{0}': {1}".format(template_file, str(err)), exit=True) if not os.path.exists(dst): os.makedirs(dst) if os.path.exists(template_dst): debug("Removing existing file '{0}'".format(template_dst)) os.remove(template_dst) with open(template_dst, 'w') as f: if not isinstance(template, str): template = template.decode('utf-8') f.write(template) shutil.copystat(template_abs_path, template_dst)
def _migrate_files(snap_files, snap_dirs, srcdir, dstdir, missing_ok=False, follow_symlinks=False, fixup_func=lambda *args: None): for directory in snap_dirs: src = os.path.join(srcdir, directory) dst = os.path.join(dstdir, directory) os.makedirs(dst, exist_ok=True) shutil.copystat(src, dst, follow_symlinks=follow_symlinks) for snap_file in snap_files: src = os.path.join(srcdir, snap_file) dst = os.path.join(dstdir, snap_file) os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copystat(os.path.dirname(src), os.path.dirname(dst), follow_symlinks=follow_symlinks) if missing_ok and not os.path.exists(src): continue # If the file is already here and it's a symlink, leave it alone. if os.path.islink(dst): continue # Otherwise, remove and re-link it. if os.path.exists(dst): os.remove(dst) if src.endswith('.pc'): shutil.copy2(src, dst, follow_symlinks=follow_symlinks) else: common.link_or_copy(src, dst, follow_symlinks=follow_symlinks) fixup_func(dst)
def main(): # make a duplicate of an existing file if path.exists("textfile.txt"): # get the path to the file in the current directory src = path.realpath("textfile.txt"); # separate the path part from the filename head, tail = path.split(src) print "path: " + head print "file: " + tail # let's make a backup copy by appending "bak" to the name dst = src + ".bak" # now use the shell to make a copy of the file shutil.copy(src,dst) # copy over the permissions, modification times, and other info shutil.copystat(src, dst) # rename the original file os.rename("textfile.txt", "newfile.txt") # now put things into a ZIP archive # root_dir,tail = path.split(src) # shutil.make_archive("archive", "zip", root_dir) # more fine-grained control over ZIP files with ZipFile("testzip.zip","w") as newzip: newzip.write("newfile.txt") newzip.write("textfile.txt.bak")
def _sieve_path(self, mailbox): """Retrieve the sieve script path for a mailbox The default value if not configured is ~/.dovecot.sieve :returns: the path where the sieve path will be stored, the backup if the user already that filename and the active sieve script if required :rtype: tuple """ # Read the sieve script path template from config sieve_script_path_template = self.conf['sieve_script_path'] sieve_script_path_mkdir = self.conf['sieve_script_path_mkdir'] log.debug("Sieve script path template is '%s'" % sieve_script_path_template) # Build the expansion variables for template (user, domain) = mailbox.split('@') # Substitute in template t = Template(sieve_script_path_template) sieve_script_path = t.substitute(domain=domain, user=user, fulluser=mailbox) log.debug("Expanded sieve script path for mailbox '%s' is '%s'" % (mailbox, sieve_script_path)) # If sieve script path mkdir enabled create hierarchy if it does not exist (head, tail) = os.path.split(sieve_script_path) if (sieve_script_path_mkdir): r_mkdir(head) else: if not os.path.isdir(head): raise Exception("Sieve script directory '%s' does not exist" % head) sieve_user_backup = None if os.path.isfile(sieve_script_path): if not self._isOofScript(sieve_script_path): if os.path.islink(sieve_script_path): target_sieve_script_path = os.path.realpath(sieve_script_path) log.info('Activate the OOF script and change the link for %s' % target_sieve_script_path) (sieve_user_backup, _) = os.path.splitext(os.path.basename(target_sieve_script_path)) else: log.info('Backing up already created "%s" to "%s.sieve"' % (sieve_script_path, sieve_script_path)) sieve_path_backup = sieve_script_path + '.sieve' shutil.copyfile(sieve_script_path, sieve_path_backup) shutil.copystat(sieve_script_path, sieve_path_backup) sieve_user_backup = os.path.basename(sieve_script_path) elif os.path.exists(sieve_script_path): raise Exception("Sieve script path '%s' exists and it is " "not a regular file" % sieve_script_path) # Active the script if necessary active_sieve_script_path = None if tail == 'sieve-script': # Dovecot only? active_sieve_script_path = sieve_script_path sieve_script_path = os.path.join(head, SIEVE_SCRIPT_NAME + '.sieve') return (sieve_script_path, sieve_user_backup, active_sieve_script_path)
def copystatRecursive(src, dst): if S_ISDIR(os.stat(src).st_mode): srcEntries = os.listdir(src) dstEntries = os.listdir(dst) for i in xrange(0, len(srcEntries)): copystatRecursive(os.path.join(src, srcEntries[i]), os.path.join(dst, dstEntries[i])) shutil.copystat(src, dst)
def destaging_harness(backup, func): path = backup[0:-len('.release')] trace(path) def relocate_for_release(token): newtoken = token.replace(self.staged_prefix, self.prefix) if newtoken != token: trace('%s:\n\t%s\t->\t%s' % (os.path.basename(path), token, newtoken)) return newtoken try: trace('Destaging %s' % path) func(path, relocate_for_release) if os.path.exists(path + '.stage'): os.remove(path) shutil.move(path + '.stage', path) shutil.copystat(backup, path) os.remove(backup) except Exception as e: warn ('Critical: Destaging failed for ''%s''' % path) raise
def copy2(src, dst): """ shutil.copy2 does not copy the file attributes on windows, so we hack into the shutil module to fix the problem """ old(src, dst) shutil.copystat(src, dst)
def make_thumbnails(files, thumbfunc): """ For any file without a matching image file, invokes function to generate a thumbnail image. """ dirs_changed = {} # {path name: os.stat_result} imageexts = [x.replace("*", "") for x in TYPEGROUPS["image"]] for video in files: path, tail = os.path.split(video) base = os.path.splitext(tail)[0] if any(os.path.isfile(os.path.join(path, base + x)) for x in imageexts): continue # for video pathstat = dirs_changed.get(path) or os.stat(path) image = os.path.join(path, base + ".jpg") tempimage = os.path.join(TEMP_DIR, uuid.uuid4().hex[:8] + ".jpg") if os.path.exists(tempimage): os.remove(tempimage) print("Creating thumb for video \"%s\"." % video) attempts = 3 while attempts: try: thumbfunc(video, tempimage), shutil.move(tempimage, image) break # while attempts except Exception: attempts -= 1 if os.path.exists(image): shutil.copystat(video, image) # Set video file timestamps to image dirs_changed[path] = pathstat else: print("Failed to produce \"%s\"." % image) for path, stat in dirs_changed.items(): # Restore directory timestamps os.utime(path, (stat.st_atime, stat.st_mtime))
def staging_harness(path, func, failure_count=failure_count): def relocate_to_profile(token): if token.find(package.staged_prefix) == -1 and token.find(package.staged_profile) == -1: newtoken = token.replace( package.package_prefix, package.staged_profile) else: newtoken = token.replace( package.staged_prefix, package.staged_profile) if newtoken != token: package.trace('%s:\n\t%s\t->\t%s' % (os.path.basename(path), token, newtoken)) return newtoken if (path.endswith('.release')): error('Staging backup exists in dir we''re trying to stage: %s' % path) backup = path + '.release' shutil.copy2(path, backup) try: trace('Staging %s' % path) func(path, relocate_to_profile) if os.path.exists(path + '.stage'): os.remove(path) shutil.move(path + '.stage', path) shutil.copystat(backup, path) except CommandException as e: package.rm_if_exists(path) shutil.copy2(backup, path) package.rm(backup) warn('Staging failed for %s' % os.path.basename(path)) error(str(e)) failure_count = failure_count + 1 if failure_count > 10: error('Possible staging issue, >10 staging failures')
def safe_copyfile(src, dest): """safely copy src to dest using a temporary intermediate and then renaming to dest""" fd, tmpname = tempfile.mkstemp(dir=os.path.dirname(dest)) shutil.copyfileobj(open(src, 'rb'), os.fdopen(fd, 'wb')) shutil.copystat(src, tmpname) os.rename(tmpname, dest)
def copytree(src, dst, symlinks=False): names = os.listdir(src) os.makedirs(dst) errors = [] for name in names: srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if symlinks and islink(srcname): linkto = readlink(srcname) symlink(linkto, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks) else: copy2(srcname, dstname) except OSError as why: errors.append((srcname, dstname, str(why))) # catch the Error from the recursive copytree so that we can # continue with other files except Error as err: errors.extend(err.args[0]) try: copystat(src, dst) except OSError as why: # can't copy file access times on Windows if why.winerror is None: errors.extend((src, dst, str(why))) if errors: raise Error(errors)
def copyfile(source, dest, replace=None, replace_with=None): """ Copy `source` to `dest`, preserving the modification time. If `replace` is given, instances of `replace` in the file contents are replaced with `replace_with`. """ sfn = os.path.join("..", source) dfn = os.path.join("..", dest) if os.path.exists(dfn): if os.path.getmtime(sfn) <= os.path.getmtime(dfn): return sf = file(sfn, "rb") data = sf.read() sf.close() if replace: data = data.replace(replace, replace_with) df = file(dfn, "wb") df.write("# This file was automatically generated from " + source + "\n") df.write("# Modifications will be automatically overwritten.\n\n") df.write(data) df.close() import shutil shutil.copystat(sfn, dfn)
def apply(self): self.target_path().parent.mkdir(parents=True, exist_ok=True) self.target_path().write_bytes(self.path.read_bytes()) copystat(self.path, self.target_path())
def copy_file(old, new): # type: (str, str) -> None print("copy %s to %s" % (old, new)) shutil.copyfile(old, new) shutil.copystat(old, new)
def copy2(src,dst): old(src,dst) shutil.copystat(src,dst)
shutil.copyfileobj(f1, f2, 1024) # copy(src, dst, *, follow_symlinks=True)复制一个文件的路径,到另一个路径(可以为路径或者文件名),follow_symlinks在linux生效,具体搜索硬连接和软连接 shutil.copy('a.txt', '../b.txt') # chown(path, user=None, group=None) 改变给定path的所有者和组权限,似乎在linux生效 # shutil.chown() # copy2(src, dst, *, follow_symlinks=True) 复制数据和数据的状态信息 shutil.copy2('a.txt', '../b.txt') # copymode(src, dst, *, follow_symlinks=True) 仅拷贝权限。内容、组、用户均不变 shutil.copymode() # copystat(src, dst, *, follow_symlinks=True) 拷贝状态的信息,包括:mode bits, atime, mtime, flags shutil.copystat() # copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, # ignore_dangling_symlinks=False) 复制一个文件夹及其内容到另一个文件夹下,ignore过滤条件,可以多个 # 注意,必须要指定一个文件夹,即把a文件夹的内容,复制到父目录的b里面(b一定不能事先存在) shutil.copytree('a', '../b', ignore=shutil.ignore_patterns('*.bat', '*.py')) # 获取一个路径的磁盘占用,返回一个三元组(total,used,free) print(shutil.disk_usage('D:\\')) # 显示支持的打包格式/解压包的格式 print(shutil.get_archive_formats()) print(shutil.get_unpack_formats()) # 打包文件 (包名,格式,要打包的路径) shutil.make_archive('a_zip', 'zip', 'a')
def copytree2(src, dst, symlinks=False, ignore=None, copy_function=copyIfDif, ignore_dangling_symlinks=False): names = os.listdir(src) if ignore is not None: ignored_names = ignore(src, names) else: ignored_names = set() if not os.path.isdir(dst): os.makedirs(dst) errors = [] for name in names: if name in ignored_names: continue srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if os.path.islink(srcname): linkto = os.readlink(srcname) if symlinks: # We can't just leave it to `copy_function` because legacy # code with a custom `copy_function` may rely on copytree # doing the right thing. os.symlink(linkto, dstname) shutil.copystat(srcname, dstname, follow_symlinks=not symlinks) else: # ignore dangling symlink if the flag is on if not os.path.exists(linkto) and ignore_dangling_symlinks: continue # otherwise let the copy occurs. copy2 will raise an error if os.path.isdir(srcname): copytree2(srcname, dstname, symlinks, ignore, copy_function) else: copy_function(srcname, dstname) elif os.path.isdir(srcname): copytree2(srcname, dstname, symlinks, ignore, copy_function) else: if not isDiff(src, dst, name): continue # Will raise a SpecialFileError for unsupported file types copy_function(srcname, dstname) # catch the Error from the recursive copytree so that we can # continue with other files except shutil.Error as err: errors.extend(err.args[0]) except OSError as why: errors.append((srcname, dstname, str(why))) try: shutil.copystat(src, dst) except OSError as why: # Copying file access times may fail on Windows if getattr(why, 'winerror', None) is None: errors.append((src, dst, str(why))) if errors: raise shutil.Error(errors) return dst
# -*- coding: UTF-8 -*- import os import shutil # 复制权限、最后访问时间、最后修改时间 src = '../data/shutil/srcmode.txt' dst = '../data/shutil/dstmode.txt' # 先设置两个文件权限不一样 os.system('chmod 777 ' + src) os.system('chmod 644 ' + dst) # 碰一下 src 设置修改时间 os.system('touch ' + src) print('\n复制前:') os.system('ls -l ' + src) os.system('ls -l ' + dst) # 复制权限 shutil.copystat(src, dst) print('\n复制后:') os.system('ls -l ' + src) os.system('ls -l ' + dst)
def apply(self): self.target_path().parent.mkdir(parents=True, exist_ok=True) self.target_path().write_text(self.render()) copystat(self.path, self.target_path())
def main(): module = AnsibleModule( # not checking because of daisy chain to file module argument_spec=dict( src=dict(type='path'), _original_basename=dict( type='str' ), # used to handle 'dest is a directory' via template, a slight hack content=dict(type='str', no_log=True), dest=dict(type='path', required=True), backup=dict(type='bool', default=False), force=dict(type='bool', default=True, aliases=['thirsty']), validate=dict(type='str'), directory_mode=dict(type='raw'), remote_src=dict(type='bool'), local_follow=dict(type='bool'), checksum=dict(), ), add_file_common_args=True, supports_check_mode=True, ) src = module.params['src'] b_src = to_bytes(src, errors='surrogate_or_strict') dest = module.params['dest'] # Make sure we always have a directory component for later processing if os.path.sep not in dest: dest = '.{0}{1}'.format(os.path.sep, dest) b_dest = to_bytes(dest, errors='surrogate_or_strict') backup = module.params['backup'] force = module.params['force'] _original_basename = module.params.get('_original_basename', None) validate = module.params.get('validate', None) follow = module.params['follow'] mode = module.params['mode'] owner = module.params['owner'] group = module.params['group'] remote_src = module.params['remote_src'] checksum = module.params['checksum'] if not os.path.exists(b_src): module.fail_json(msg="Source %s not found" % (src)) if not os.access(b_src, os.R_OK): module.fail_json(msg="Source %s not readable" % (src)) if os.path.isdir(b_src): module.fail_json( msg="Remote copy does not support recursive copy of directory: %s" % (src)) # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the # remote host if module.params['mode'] == 'preserve': module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode) mode = module.params['mode'] checksum_src = module.sha1(src) checksum_dest = None # Backwards compat only. This will be None in FIPS mode try: md5sum_src = module.md5(src) except ValueError: md5sum_src = None changed = False if checksum and checksum_src != checksum: module.fail_json( msg= 'Copied file does not match the expected checksum. Transfer failed.', checksum=checksum_src, expected_checksum=checksum) # Special handling for recursive copy - create intermediate dirs if _original_basename and dest.endswith(os.sep): dest = os.path.join(dest, _original_basename) b_dest = to_bytes(dest, errors='surrogate_or_strict') dirname = os.path.dirname(dest) b_dirname = to_bytes(dirname, errors='surrogate_or_strict') if not os.path.exists(b_dirname): try: (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname) except AnsibleModuleError as e: e.result['msg'] += ' Could not copy to {0}'.format(dest) module.fail_json(**e.results) os.makedirs(b_dirname) directory_args = module.load_file_common_arguments(module.params) directory_mode = module.params["directory_mode"] if directory_mode is not None: directory_args['mode'] = directory_mode else: directory_args['mode'] = None adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed) if os.path.isdir(b_dest): basename = os.path.basename(src) if _original_basename: basename = _original_basename dest = os.path.join(dest, basename) b_dest = to_bytes(dest, errors='surrogate_or_strict') if os.path.exists(b_dest): if os.path.islink(b_dest) and follow: b_dest = os.path.realpath(b_dest) dest = to_native(b_dest, errors='surrogate_or_strict') if not force: module.exit_json(msg="file already exists", src=src, dest=dest, changed=False) if os.access(b_dest, os.R_OK): checksum_dest = module.sha1(dest) else: if not os.path.exists(os.path.dirname(b_dest)): try: # os.path.exists() can return false in some # circumstances where the directory does not have # the execute bit for the current user set, in # which case the stat() call will raise an OSError os.stat(os.path.dirname(b_dest)) except OSError as e: if "permission denied" in to_native(e).lower(): module.fail_json( msg="Destination directory %s is not accessible" % (os.path.dirname(dest))) module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest))) if not os.access(os.path.dirname(b_dest), os.W_OK) and not module.params['unsafe_writes']: module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest))) backup_file = None if checksum_src != checksum_dest or os.path.islink(b_dest): if not module.check_mode: try: if backup: if os.path.exists(b_dest): backup_file = module.backup_local(dest) # allow for conversion from symlink. if os.path.islink(b_dest): os.unlink(b_dest) open(b_dest, 'w').close() if validate: # if we have a mode, make sure we set it on the temporary # file source as some validations may require it if mode is not None: module.set_mode_if_different(src, mode, False) if owner is not None: module.set_owner_if_different(src, owner, False) if group is not None: module.set_group_if_different(src, group, False) if "%s" not in validate: module.fail_json(msg="validate must contain %%s: %s" % (validate)) (rc, out, err) = module.run_command(validate % src) if rc != 0: module.fail_json(msg="failed to validate", exit_status=rc, stdout=out, stderr=err) b_mysrc = b_src if remote_src: _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest)) shutil.copyfile(b_src, b_mysrc) try: shutil.copystat(b_src, b_mysrc) except OSError as err: if err.errno == errno.ENOSYS and mode == "preserve": module.warn("Unable to copy stats {0}".format( to_native(b_src))) else: raise module.atomic_move( b_mysrc, dest, unsafe_writes=module.params['unsafe_writes']) except (IOError, OSError): module.fail_json(msg="failed to copy: %s to %s" % (src, dest), traceback=traceback.format_exc()) changed = True else: changed = False res_args = dict(dest=dest, src=src, md5sum=md5sum_src, checksum=checksum_src, changed=changed) if backup_file: res_args['backup_file'] = backup_file module.params['dest'] = dest if not module.check_mode: file_args = module.load_file_common_arguments(module.params) res_args['changed'] = module.set_fs_attributes_if_different( file_args, res_args['changed']) module.exit_json(**res_args)
def copy_files(files_to_copy, to_dir, move=False, lock=None, use_hardlinks=False): """ Copy or move files to to_dir, keeping directory structure. Copy keeps the original file stats. Files should have attributes name and root: - root: root directory - name: relative path of file in root directory /root/file/file1 will be copied in to_dir/file/file1 :param files_to_copy: list of files to copy :type files_to_copy: list :param to_dir: destination directory :type to_dir: str :param move: move instead of copy :type move: bool :param lock: thread lock object for multi-threads :type lock: Lock :param use_hardlinks: use hard links (if possible) :type link: bool """ logger = logging.getLogger('biomaj') nb_files = len(files_to_copy) cur_files = 1 for file_to_copy in files_to_copy: logger.debug( str(cur_files) + '/' + str(nb_files) + ' copy file ' + file_to_copy['name']) cur_files += 1 from_file = file_to_copy['root'] + '/' + file_to_copy['name'] to_file = to_dir + '/' + file_to_copy['name'] if lock is not None: lock.acquire() try: if not os.path.exists(os.path.dirname(to_file)): os.makedirs(os.path.dirname(to_file)) except Exception as e: logger.error(e) finally: lock.release() else: if not os.path.exists(os.path.dirname(to_file)): try: os.makedirs(os.path.dirname(to_file)) except Exception as e: logger.error(e) if move: shutil.move(from_file, to_file) else: start_time = datetime.datetime.now() start_time = time.mktime(start_time.timetuple()) if use_hardlinks: try: os.link(from_file, to_file) logger.debug("Using hardlinks to copy %s", file_to_copy['name']) except OSError as e: if e.errno in (errno.ENOSYS, errno.ENOTSUP): msg = "Your system doesn't support hard links. Using regular copy." logger.warn(msg) # Copy this file (the stats are copied at the end # of the function) shutil.copyfile(from_file, to_file) # Don't try links anymore use_hardlinks = False elif e.errno == errno.EPERM: msg = "The FS at %s doesn't support hard links. Using regular copy." logger.warn(msg, to_dir) # Copy this file (the stats are copied at the end # of the function) shutil.copyfile(from_file, to_file) # Don't try links anymore use_hardlinks = False elif e.errno == errno.EXDEV: msg = "Cross device hard link is impossible (source: %s, dest: %s). Using regular copy." logger.warn(msg, from_file, to_dir) # Copy this file shutil.copyfile(from_file, to_file) # Don't try links anymore use_hardlinks = False else: raise else: shutil.copyfile(from_file, to_file) end_time = datetime.datetime.now() end_time = time.mktime(end_time.timetuple()) file_to_copy['download_time'] = end_time - start_time if not use_hardlinks: shutil.copystat(from_file, to_file)
def safe_copy(orig, dest): shutil.copyfile(orig, dest) try: shutil.copystat(orig, dest) # It may fail on Android except OSError: warning("shutil.copystat has failed.")
def save_as(self, newfile, options=None): """Saves Jpeg with IPTC data to a given file name.""" with smart_open(self._fobj, 'rb') as fh: if not file_is_jpeg(fh): logger.error('Source file %s is not a Jpeg.' % self._fobj) return None jpeg_parts = jpeg_collect_file_parts(fh) if jpeg_parts is None: raise Exception('jpeg_collect_file_parts failed: %s' % self.error) (start, end, adobe) = jpeg_parts LOGDBG.debug('start: %d, end: %d, adobe: %d', *map(len, jpeg_parts)) hex_dump(start) LOGDBG.debug('adobe1: %r', adobe) if options is not None and 'discardAdobeParts' in options: adobe = None LOGDBG.debug('adobe2: %r', adobe) LOGDBG.info('writing...') (tmpfd, tmpfn) = tempfile.mkstemp() if self._filename and os.path.exists(self._filename): shutil.copystat(self._filename, tmpfn) tmpfh = os.fdopen(tmpfd, 'wb') if not tmpfh: logger.error("Can't open output file %r", tmpfn) return None LOGDBG.debug('start=%d end=%d', len(start), len(end)) LOGDBG.debug('start len=%d dmp=%s', len(start), hex_dump(start)) # FIXME `start` contains the old IPTC data, so the next we read, we'll get the wrong data tmpfh.write(start) # character set ch = c_charset_r.get(self.out_charset, None) # writing the character set is not the best practice # - couldn't find the needed place (record) for it yet! if SURELY_WRITE_CHARSET_INFO and ch is not None: tmpfh.write(pack("!BBBHH", 0x1c, 1, 90, 4, ch)) LOGDBG.debug('pos: %d', self._filepos(tmpfh)) data = self.photoshopIIMBlock(adobe, self.packedIIMData()) LOGDBG.debug('data len=%d dmp=%s', len(data), hex_dump(data)) tmpfh.write(data) LOGDBG.debug('pos: %d', self._filepos(tmpfh)) tmpfh.write(end) LOGDBG.debug('pos: %d', self._filepos(tmpfh)) tmpfh.flush() if hasattr(tmpfh, 'getvalue'): # StringIO fh2 = open(newfile, 'wb') fh2.truncate() fh2.seek(0, 0) fh2.write(tmpfh.getvalue()) fh2.flush() fh2.close() tmpfh.close() os.unlink(tmpfn) else: tmpfh.close() if os.path.exists( newfile ) and options is not None and 'overwrite' in options: os.unlink(newfile) elif os.path.exists(newfile): shutil.move(newfile, "{file}~".format(file=newfile)) shutil.move(tmpfn, newfile) return True
def copy_files_with_regexp(from_dir, to_dir, regexps, move=False, lock=None, use_hardlinks=False): """ Copy or move files from from_dir to to_dir matching regexps. Copy keeps the original file stats. :param from_dir: origin directory :type from_dir: str :param to_dir: destination directory :type to_dir: str :param regexps: list of regular expressions that files in from_dir should match to be copied :type regexps: list :param move: move instead of copy :type move: bool :param lock: thread lock object for multi-threads :type lock: Lock :param use_hardlinks: use hard links (if possible) :type link: bool :return: list of copied files with their size """ logger = logging.getLogger('biomaj') files_to_copy = [] files_list = [] for root, _, files in os.walk(from_dir, topdown=True): for name in files: for reg in regexps: file_relative_path = os.path.join(root, name).replace( from_dir, '') if file_relative_path.startswith('/'): file_relative_path = file_relative_path.replace( '/', '', 1) # sometimes files appear twice.... check not already managed if file_relative_path in files_list: continue if reg == "**/*": files_to_copy.append({'name': file_relative_path}) files_list.append(file_relative_path) continue if re.match(reg, file_relative_path): files_list.append(file_relative_path) files_to_copy.append({'name': file_relative_path}) continue for file_to_copy in files_to_copy: from_file = from_dir + '/' + file_to_copy['name'] to_file = to_dir + '/' + file_to_copy['name'] if lock is not None: lock.acquire() try: if not os.path.exists(os.path.dirname(to_file)): os.makedirs(os.path.dirname(to_file)) except Exception as e: logger.error(e) finally: lock.release() else: if not os.path.exists(os.path.dirname(to_file)): os.makedirs(os.path.dirname(to_file)) if not os.path.exists(from_file): logger.warn('File does not exists: %s' % (from_file)) continue if move: shutil.move(from_file, to_file) else: if use_hardlinks: try: os.link(from_file, to_file) logger.debug("Using hardlinks to copy %s", file_to_copy['name']) except OSError as e: if e.errno in (errno.ENOSYS, errno.ENOTSUP): msg = "Your system doesn't support hard links. Using regular copy." logger.warn(msg) # Copy this file (the stats are copied at the end # of the function) shutil.copyfile(from_file, to_file) # Don't try links anymore use_hardlinks = False elif e.errno == errno.EPERM: msg = "The FS at %s doesn't support hard links. Using regular copy." logger.warn(msg, to_dir) # Copy this file (we copy the stats here because # it's not done at the end of the function) shutil.copyfile(from_file, to_file) shutil.copystat(from_file, to_file) # Don't try links anymore use_hardlinks = False elif e.errno == errno.EXDEV: msg = "Cross device hard link is impossible (source: %s, dest: %s). Using regular copy." logger.warn(msg, from_file, to_dir) # Copy this file (we copy the stats here because # it's not done at the end of the function) shutil.copyfile(from_file, to_file) shutil.copystat(from_file, to_file) # Don't try links anymore use_hardlinks = False else: raise else: shutil.copyfile(from_file, to_file) shutil.copystat(from_file, to_file) file_to_copy['size'] = os.path.getsize(to_file) f_stat = datetime.datetime.fromtimestamp(os.path.getmtime(to_file)) file_to_copy['year'] = str(f_stat.year) file_to_copy['month'] = str(f_stat.month) file_to_copy['day'] = str(f_stat.day) (file_format, encoding) = Utils.detect_format(to_file) file_to_copy['format'] = file_format return files_to_copy
def _CopyFile( self, source, target, copyDependentFiles, includeMode=False, machOReference: Optional[MachOReference] = None, ): normalizedSource = os.path.normcase(os.path.normpath(source)) normalizedTarget = os.path.normcase(os.path.normpath(target)) if normalizedTarget in self.files_copied: if sys.platform == "darwin" and (machOReference is not None): # If file was already copied, and we are following a reference # from a DarwinFile, then we need to tell the reference where # the file was copied to (so the reference can later be updated). copiedDarwinFile = self.darwinTracker.getDarwinFile( sourcePath=normalizedSource, targetPath=normalizedTarget) machOReference.setTargetFile(darwinFile=copiedDarwinFile) return if normalizedSource == normalizedTarget: return targetDir = os.path.dirname(target) self._CreateDirectory(targetDir) if not self.silent: print(f"copying {source} -> {target}") shutil.copyfile(source, target) shutil.copystat(source, target) if includeMode: shutil.copymode(source, target) self.files_copied.add(normalizedTarget) newDarwinFile = None if sys.platform == "darwin": # The file was not previously copied, so need to create a # DarwinFile file object to represent the file being copied. referencingFile = None if machOReference is not None: referencingFile = machOReference.sourceFile newDarwinFile = DarwinFile(originalFilePath=source, referencingFile=referencingFile) newDarwinFile.setBuildPath(normalizedTarget) if machOReference is not None: machOReference.setTargetFile(darwinFile=newDarwinFile) self.darwinTracker.recordCopiedFile(targetPath=normalizedTarget, darwinFile=newDarwinFile) if (copyDependentFiles and source not in self.finder.exclude_dependent_files): # Always copy dependent files on root directory # to allow to set relative reference if sys.platform == "darwin": targetDir = self.targetDir for dependent_file in self._GetDependentFiles( source, darwinFile=newDarwinFile): target = os.path.join(targetDir, os.path.basename(dependent_file)) self._CopyFile( dependent_file, target, copyDependentFiles=True, machOReference=newDarwinFile.getMachOReferenceForPath( path=dependent_file), ) elif sys.platform == "win32": for dependent_file in self._GetDependentFiles(source): target = os.path.join(targetDir, os.path.basename(dependent_file)) self._CopyFile(dependent_file, target, copyDependentFiles) else: source_dir = os.path.dirname(source) library_dir = os.path.join(self.targetDir, "lib") fix_rpath = set() for dependent_file in self._GetDependentFiles(source): dep_base = os.path.basename(dependent_file) dep_abs = os.path.abspath(dependent_file) dep_rel = os.path.relpath(dep_abs, source_dir) if targetDir == library_dir: while dep_rel.startswith(os.pardir + os.sep): dep_rel = dep_rel[len(os.pardir + os.sep):] dep_libs = dep_rel[:-(len(dep_base) + 1)] if dep_libs: fix_rpath.add(dep_libs) dependent_target = os.path.join(targetDir, dep_rel) self._CopyFile( dependent_file, dependent_target, copyDependentFiles, ) if fix_rpath: has_rpath = self.patchelf.get_rpath(target) rpath = ":".join([f"$ORIGIN/{r}" for r in fix_rpath]) if has_rpath != rpath: self.patchelf.set_rpath(target, rpath)
def file_move_safe(old_file_name, new_file_name, chunk_size=1024 * 64, allow_overwrite=False): """ Move a file from one location to another in the safest way possible. First, try ``os.rename``, which is simple but will break across filesystems. If that fails, stream manually from one file to another in pure Python. If the destination file exists and ``allow_overwrite`` is ``False``, raise ``IOError``. """ # There's no reason to move if we don't have to. if _samefile(old_file_name, new_file_name): return try: if not allow_overwrite and os.access(new_file_name, os.F_OK): raise IOError( "Destination file %s exists and allow_overwrite is False" % new_file_name) os.rename(old_file_name, new_file_name) return except OSError: # OSError happens with os.rename() if moving to another filesystem or # when moving opened files on certain operating systems. pass # first open the old file, so that it won't go away with open(old_file_name, 'rb') as old_file: # now open the new file, not forgetting allow_overwrite fd = os.open(new_file_name, (os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) | (os.O_EXCL if not allow_overwrite else 0))) try: locks.lock(fd, locks.LOCK_EX) current_chunk = None while current_chunk != b'': current_chunk = old_file.read(chunk_size) os.write(fd, current_chunk) finally: locks.unlock(fd) os.close(fd) try: copystat(old_file_name, new_file_name) except PermissionError as e: # Certain filesystems (e.g. CIFS) fail to copy the file's metadata if # the type of the destination filesystem isn't the same as the source # filesystem; ignore that. if e.errno != errno.EPERM: raise try: os.remove(old_file_name) except PermissionError as e: # Certain operating systems (Cygwin and Windows) # fail when deleting opened files, ignore it. (For the # systems where this happens, temporary files will be auto-deleted # on close anyway.) if getattr(e, 'winerror', 0) != 32: raise
def saveAs(self, newfile, options=None): """Saves Jpeg with IPTC data to a given file name.""" # Open file and snarf data from it. fh = self._getfh() assert fh fh.seek(0, 0) if not self.fileIsJpeg(fh): # LOG.error("Source file is not a Jpeg; I can only save Jpegs. Sorry.") return None ret = self.jpegCollectFileParts(fh, options) self._closefh(fh) if ret is None: # LOG.error("collectfileparts failed") raise Exception('collectfileparts failed') (start, end, adobe) = ret # # LOGDBG.debug('start: %d, end: %d, adobe:%d', *list(map(len, ret))) self.hexDump(start), len(end) # # LOGDBG.debug('adobe1: %r', adobe) if options is not None and 'discardAdobeParts' in options: adobe = None # # LOGDBG.debug('adobe2: %r', adobe) # # LOGDBG.info('writing...') (tmpfd, tmpfn) = tempfile.mkstemp() if self._filename and os.path.exists(self._filename): shutil.copystat(self._filename, tmpfn) #os.close(tmpfd) tmpfh = os.fdopen(tmpfd, 'wb') #tmpfh = open(tmpfn, 'wb') if not tmpfh: # LOG.error("Can't open output file %r", tmpfn) return None # # LOGDBG.debug('start=%d end=%d', len(start), len(end)) tmpfh.write(start) # character set ch = self.c_charset_r.get(self.out_charset, None) # writing the character set is not the best practice # - couldn't find the needed place (record) for it yet! if SURELY_WRITE_CHARSET_INFO and ch is not None: tmpfh.write(pack("!BBBHH", 0x1c, 1, 90, 4, ch)) # # LOGDBG.debug('pos: %d', self._filepos(tmpfh)) data = self.photoshopIIMBlock(adobe, self.packedIIMData()) # # LOGDBG.debug('data len=%d dmp=%r', len(data), self.hexDump(data)) tmpfh.write(data) # # LOGDBG.debug('pos: %d', self._filepos(tmpfh)) tmpfh.write(end) # # LOGDBG.debug('pos: %d', self._filepos(tmpfh)) tmpfh.flush() #print tmpfh, tmpfn, newfile #copy the successfully written file back to the given file if hasattr(tmpfh, 'getvalue'): # StringIO fh2 = open(newfile, 'wb') fh2.truncate() fh2.seek(0, 0) fh2.write(tmpfh.getvalue()) fh2.flush() fh2.close() tmpfh.close() os.unlink(tmpfn) else: tmpfh.close() if os.path.exists(newfile): shutil.move(newfile, newfile + '~') shutil.move(tmpfn, newfile) return True
def copytree( src, dst, symlinks=False, ignore=None, copy_function=copy2, ignore_dangling_symlinks=False, ): """Recursively copy a directory tree. The destination directory must not already exist. If exception(s) occur, an Error is raised with a list of reasons. If the optional symlinks flag is true, symbolic links in the source tree result in symbolic links in the destination tree; if it is false, the contents of the files pointed to by symbolic links are copied. If the file pointed by the symlink doesn't exist, an exception will be added in the list of errors raised in an Error exception at the end of the copy process. You can set the optional ignore_dangling_symlinks flag to true if you want to silence this exception. Notice that this has no effect on platforms that don't support os.symlink. The optional ignore argument is a callable. If given, it is called with the `src` parameter, which is the directory being visited by copytree(), and `names` which is the list of `src` contents, as returned by os.listdir(): callable(src, names) -> ignored_names Since copytree() is called recursively, the callable will be called once for each directory that is copied. It returns a list of names relative to the `src` directory that should not be copied. The optional copy_function argument is a callable that will be used to copy each file. It will be called with the source path and the destination path as arguments. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used. """ names = os.listdir(src) if ignore is not None: ignored_names = ignore(src, names) else: ignored_names = set() os.makedirs(dst, exist_ok=True) errors = [] for name in names: if name in ignored_names: continue srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if os.path.islink(srcname): linkto = os.readlink(srcname) if symlinks: # We can't just leave it to `copy_function` because legacy # code with a custom `copy_function` may rely on copytree # doing the right thing. os.symlink(linkto, dstname) copystat(srcname, dstname, follow_symlinks=not symlinks) else: # ignore dangling symlink if the flag is on if not os.path.exists(linkto) and ignore_dangling_symlinks: continue # otherwise let the copy occurs. copy2 will raise an error if os.path.isdir(srcname): copytree(srcname, dstname, symlinks, ignore, copy_function) else: copy_function(srcname, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks, ignore, copy_function) else: # Will raise a SpecialFileError for unsupported file types copy_function(srcname, dstname) # catch the Error from the recursive copytree so that we can # continue with other files except Error as err: errors.extend(err.args[0]) except OSError as why: errors.append((srcname, dstname, str(why))) try: copystat(src, dst) except OSError as why: # Copying file access times may fail on Windows if getattr(why, "winerror", None) is None: errors.append((src, dst, str(why))) if errors: raise Error(errors) return dst
def handle(self, *args, **options): if not options["interactive"]: options["username"] = options["username"] or getattr( settings, "INSTALL_ADMIN_USERNAME", None) or get_clean_default_username() options["hostname"] = options["hostname"] or get_host_name() sys.stdout.write( " \n" ) # blank allows ansible scripts to dump errors cleanly. sys.stdout.write(" _ __ ___ _ _ _ \n") sys.stdout.write(" | | / / / _ \ | | (_) | \n") sys.stdout.write(" | |/ / / /_\ \ | | _| |_ ___ \n") sys.stdout.write(" | \ | _ | | | | | __/ _ \ \n") sys.stdout.write(" | |\ \| | | | | |___| | || __/ \n") sys.stdout.write(" \_| \_/\_| |_/ \_____/_|\__\___| \n") sys.stdout.write(" \n") sys.stdout.write("http://kalite.learningequality.org\n") sys.stdout.write(" \n") # PJK 11/03/2015 # sys.stdout.write(" version %s\n" % VERSION) sys.stdout.write(" \n") # PJK 11/03/2015 if sys.version_info[:2] == (3, 4): pass else: raise CommandError("You must have Python version 3.4.x installed. " "Your version is: {}\n".format( platform.python_version())) # PJK 11/03/2015 # if sys.version_info >= (2,8) or sys.version_info < (2,6): # raise CommandError("You must have Python version 2.6.x or 2.7.x installed. Your version is: %s\n" % sys.version_info) if options["interactive"]: sys.stdout.write( "--------------------------------------------------------------------------------\n" ) sys.stdout.write("\n") sys.stdout.write( "This script will configure the database and prepare it for use.\n" ) sys.stdout.write("\n") sys.stdout.write( "--------------------------------------------------------------------------------\n" ) sys.stdout.write("\n") # PJK 11/03/2015 input("Press [enter] to continue...") sys.stdout.write("\n") # Tried not to be os-specific, but ... hey. :-/ if not is_windows() and hasattr(os, "getuid") and os.getuid() == 502: sys.stdout.write( "-------------------------------------------------------------------\n" ) sys.stdout.write( "WARNING: You are installing KA-Lite as root user!\n") sys.stdout.write( "\tInstalling as root may cause some permission problems while running\n" ) sys.stdout.write("\tas a normal user in the future.\n") sys.stdout.write( "-------------------------------------------------------------------\n" ) if options["interactive"]: if not raw_input_yn( "Do you wish to continue and install it as root?"): raise CommandError("Aborting script.\n") sys.stdout.write("\n") # Check to see if the current user is the owner of the install directory if not os.access(BASE_DIR, os.W_OK): raise CommandError( "You do not have permission to write to this directory!") install_clean = not academy.is_installed() database_kind = settings.DATABASES["default"]["ENGINE"] database_file = ("sqlite" in database_kind and settings.DATABASES["default"]["NAME"]) or None if database_file and os.path.exists(database_file): # We found an existing database file. By default, # we will upgrade it; users really need to work hard # to delete the file (but it's possible, which is nice). sys.stdout.write( "-------------------------------------------------------------------\n" ) sys.stdout.write("WARNING: Database file already exists! \n") sys.stdout.write( "-------------------------------------------------------------------\n" ) # PJK 11/03/2015 # or raw_input_yn("Keep database file and upgrade to KA Lite version %s? " % VERSION) if not options["interactive"] \ or not raw_input_yn("Remove database file '%s' now? " % database_file) \ or not raw_input_yn("WARNING: all data will be lost! Are you sure? "): install_clean = False sys.stdout.write("Upgrading database to KA Lite version %s\n" % VERSION) else: install_clean = True sys.stdout.write("OK. We will run a clean install; \n") sys.stdout.write( "the database file will be moved to a deletable location.\n" ) # After all, don't delete--just move. if not install_clean and not database_file and not academy.is_installed( ): # Make sure that, for non-sqlite installs, the database exists. raise Exception( "For databases not using SQLite, you must set up your database before running setup." ) # Do all input at once, at the beginning if install_clean and options["interactive"]: if not options["username"] or not options["password"]: sys.stdout.write("\n") sys.stdout.write( "Please choose a username and password for the admin account on this device.\n" ) sys.stdout.write( "\tYou must remember this login information, as you will need\n" ) sys.stdout.write( "\tto enter it to administer this installation of KA Lite.\n" ) sys.stdout.write("\n") (username, password) = get_username_password(options["username"], options["password"]) email = options["email"] (hostname, description) = get_hostname_and_description( options["hostname"], options["description"]) else: username = options["username"] or getattr( settings, "INSTALL_ADMIN_USERNAME", None) password = options["password"] or getattr( settings, "INSTALL_ADMIN_PASSWORD", None) email = options["email"] # default is non-empty hostname = options["hostname"] description = options["description"] if username and not validate_username(username): raise CommandError( "Username must contain only letters, digits, and underscores, and start with a letter.\n" ) ######################## # Now do stuff ######################## # Move database file (if exists) if install_clean and database_file and os.path.exists(database_file): # This is an overwrite install; destroy the old db dest_file = tempfile.mkstemp()[1] sys.stdout.write( "(Re)moving database file to temp location, starting clean install. Recovery location: %s\n" % dest_file) shutil.move(database_file, dest_file) # Should clean_pyc for (clean) reinstall purposes call_command("clean_pyc", interactive=False, verbosity=options.get("verbosity"), path=os.path.join(settings.PROJECT_PATH, "..")) # Migrate the database call_command("syncdb", interactive=False, verbosity=options.get("verbosity")) call_command("migrate", merge=True, verbosity=options.get("verbosity")) # PJK 11/03/2015 I don't know if we need the following, but comment out for now # Individually generate any prerequisite models/state that is missing # if not Settings.get("private_key"): # call_command("generatekeys", verbosity=options.get("verbosity")) # if not Device.objects.count(): # call_command("initdevice", hostname, description, verbosity=options.get("verbosity")) # if not Facility.objects.count(): # Facility.initialize_default_facility() # Install data # if install_clean: # # Create device, load on any zone data # call_command("generatekeys", verbosity=options.get("verbosity")) # call_command("initdevice", hostname, description, verbosity=options.get("verbosity")) # Facility.initialize_default_facility() #else: # Device exists; load data if required. # # Hackish, as this duplicates code from initdevice. # #if os.path.exists(InitCommand.data_json_file): # # This is a pathway to install zone-based data on a software upgrade. # sys.stdout.write("Loading zone data from '%s'\n" % InitCommand.data_json_file) # load_data_for_offline_install(in_file=InitCommand.data_json_file) # confirm_or_generate_zone() # Create the admin user if password: # blank password (non-interactive) means don't create a superuser admin = get_object_or_None(User, username=username) if not admin: call_command("createsuperuser", username=username, email=email, interactive=False, verbosity=options.get("verbosity")) admin = User.objects.get(username=username) admin.set_password(password) admin.save() # Now deploy the static files call_command("collectstatic", interactive=False) if not settings.CENTRAL_SERVER: # Move scripts for script_name in ["start", "stop", "run_command"]: script_file = script_name + system_script_extension() dest_dir = os.path.join(settings.PROJECT_PATH, "..") src_dir = os.path.join(dest_dir, "scripts") shutil.copyfile(os.path.join(src_dir, script_file), os.path.join(dest_dir, script_file)) try: shutil.copystat(os.path.join(src_dir, script_file), os.path.join(dest_dir, script_file)) except OSError: # even if we have write permission, we might not have permission to change file mode sys.stdout.write( "WARNING: Unable to set file permissions on %s! \n" % script_file) start_script_path = os.path.realpath( os.path.join(settings.PROJECT_PATH, "..", "start%s" % system_script_extension())) # PJK 11/03/2015. I don't think we will need this... # Run videoscan, on the distributed server. # sys.stdout.write("Scanning for video files in the content directory (%s)\n" % settings.CONTENT_ROOT) # call_command("videoscan") # done; notify the user. sys.stdout.write("\n") if install_clean: sys.stdout.write( "CONGRATULATIONS! You've finished setting up the KA Lite server software.\n" ) sys.stdout.write("\tPlease run '%s' to start the server,\n" % start_script_path) sys.stdout.write( "\tthen load one of the following addresses in your browser to complete the configuration:\n" ) for ip in get_ip_addresses(): sys.stdout.write("\t\thttp://%s:%d/\n" % (ip, settings.USER_FACING_PORT())) else: sys.stdout.write( "CONGRATULATIONS! You've finished updating the KA Lite server software.\n" ) sys.stdout.write("\tPlease run '%s' to start the server.\n" % start_script_path) sys.stdout.write("\n")
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): """ clone existing prefix1 into new prefix2 """ untracked_files = untracked(prefix1) # Discard conda, conda-env and any package that depends on them drecs = linked_data(prefix1) filter = {} found = True while found: found = False for dist, info in iteritems(drecs): name = info['name'] if name in filter: continue if name == 'conda': filter['conda'] = dist found = True break if name == "conda-env": filter["conda-env"] = dist found = True break for dep in info.get('depends', []): if MatchSpec(dep).name in filter: filter[name] = dist found = True if filter: if not quiet: fh = sys.stderr if context.json else sys.stdout print( 'The following packages cannot be cloned out of the root environment:', file=fh) for pkg in itervalues(filter): print(' - ' + pkg.dist_name, file=fh) drecs = { dist: info for dist, info in iteritems(drecs) if info['name'] not in filter } # Resolve URLs for packages that do not have URLs r = None index = {} unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')] notfound = [] if unknowns: index_args = index_args or {} index = get_index(**index_args) r = Resolve(index, sort=True) for dist in unknowns: name = dist.dist_name fn = dist.to_filename() fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn] if fkeys: del drecs[dist] dist_str = sorted(fkeys, key=r.version_key, reverse=True)[0] drecs[Dist(dist_str)] = r.index[dist_str] else: notfound.append(fn) if notfound: what = "Package%s " % ('' if len(notfound) == 1 else 's') notfound = '\n'.join(' - ' + fn for fn in notfound) msg = '%s missing in current %s channels:%s' % (what, context.subdir, notfound) raise CondaRuntimeError(msg) # Assemble the URL and channel list urls = {} for dist, info in iteritems(drecs): fkey = dist if fkey not in index: index[fkey] = IndexRecord.from_objects(info, not_fetched=True) r = None urls[dist] = info['url'] if r is None: r = Resolve(index) dists = r.dependency_sort({d.quad[0]: d for d in urls.keys()}) urls = [urls[d] for d in dists] if verbose: print('Packages: %d' % len(dists)) print('Files: %d' % len(untracked_files)) for f in untracked_files: src = join(prefix1, f) dst = join(prefix2, f) dst_dir = dirname(dst) if islink(dst_dir) or isfile(dst_dir): rm_rf(dst_dir) if not isdir(dst_dir): os.makedirs(dst_dir) if islink(src): os.symlink(os.readlink(src), dst) continue try: with open(src, 'rb') as fi: data = fi.read() except IOError: continue try: s = data.decode('utf-8') s = s.replace(prefix1, prefix2) data = s.encode('utf-8') except UnicodeDecodeError: # data is binary pass with open(dst, 'wb') as fo: fo.write(data) shutil.copystat(src, dst) actions = explicit(urls, prefix2, verbose=not quiet, index=index, force_extract=False, index_args=index_args) return actions, untracked_files
def prepare(self): if self._originalFileWasMissing: self.logger.debug("file '%s' missing" % self._name) else: self.logger.debug("file '%s' exists" % self._name) with open(self._name, 'r' + ('b' if self._binary else '')) as f: if f.read() == self._content: self.logger.debug( "file '%s' already has content" % self._name ) self._originalDiffer = False if self._originalDiffer: mydir = os.path.dirname(self._name) if self._originalFileWasMissing: if not os.path.exists(mydir): self._createdDirectory = self._createDirRecursive(mydir) else: # check we can open file for write with open(self._name, 'a'): pass currentStat = os.stat(self._name) if not self._enforcePermissions: self._mode = currentStat.st_mode self._owner = currentStat.st_uid self._group = currentStat.st_gid # # backup the file # self._backup = "%s.%s" % ( self._name, datetime.datetime.now().strftime('%Y%m%d%H%M%S') ) self.logger.debug( "backup '%s'->'%s'" % ( self._name, self._backup ) ) shutil.copyfile(self._name, self._backup) shutil.copystat(self._name, self._backup) os.chown( self._backup, currentStat.st_uid, currentStat.st_gid ) fd = -1 try: fd, self._tmpname = tempfile.mkstemp( suffix=".tmp", prefix="%s." % os.path.basename(self._name), dir=mydir, ) os.chown( self._tmpname, self._owner, self._group ) # python does not support atomic umask # so leave file as-is if self._mode is not None: os.chmod( self._tmpname, self._mode ) os.write(fd, self._content) os.fsync(fd) if self._visibleButUnsafe: type(self)._atomicMove( source=self._tmpname, destination=self._name, binary=self._binary, ) self._prepared = True finally: if fd != -1: try: os.close(fd) except OSError: pass fd = -1
def copystat(self, target): """Copies the permissions, times and flags from this to the `target`. The owner is not copied. """ shutil.copystat(self.path, self._to_backend(target))
def copyToDir(src, dst, updateonly=True, symlinks=True, ignore=None, forceupdate=None, dryrun=False): def copySymLink(srclink, destlink): if os.path.lexists(destlink): os.remove(destlink) os.symlink(os.readlink(srclink), destlink) try: st = os.lstat(srclink) mode = stat.S_IMODE(st.st_mode) os.lchmod(destlink, mode) except OSError: pass # lchmod not available fc = [] if not os.path.exists(dst) and not dryrun: os.makedirs(dst) shutil.copystat(src, dst) if ignore is not None: ignorepatterns = [os.path.join(src, *x.split('/')) for x in ignore] else: ignorepatterns = [] if forceupdate is not None: forceupdatepatterns = [ os.path.join(src, *x.split('/')) for x in forceupdate ] else: forceupdatepatterns = [] srclen = len(src) for root, dirs, files in os.walk(src): fullsrcfiles = [os.path.join(root, x) for x in files] t = root[srclen + 1:] dstroot = os.path.join(dst, t) fulldstfiles = [os.path.join(dstroot, x) for x in files] excludefiles = list( itertools.chain.from_iterable([ fnmatch.filter(fullsrcfiles, pattern) for pattern in ignorepatterns ])) forceupdatefiles = list( itertools.chain.from_iterable([ fnmatch.filter(fullsrcfiles, pattern) for pattern in forceupdatepatterns ])) for directory in dirs: fullsrcdir = os.path.join(src, directory) fulldstdir = os.path.join(dstroot, directory) if os.path.islink(fullsrcdir): if symlinks and dryrun is False: copySymLink(fullsrcdir, fulldstdir) else: if not os.path.exists(fulldstdir) and dryrun is False: os.makedirs(fulldstdir) shutil.copystat(src, dst) for s, d in zip(fullsrcfiles, fulldstfiles): if s not in excludefiles: if updateonly: go = False if os.path.isfile(d): srcdate = os.stat(s).st_mtime dstdate = os.stat(d).st_mtime if srcdate > dstdate: go = True else: go = True if s in forceupdatefiles: go = True if go is True: fc.append(d) if not dryrun: if os.path.islink(s) and symlinks is True: copySymLink(s, d) else: shutil.copy2(s, d) else: fc.append(d) if not dryrun: if os.path.islink(s) and symlinks is True: copySymLink(s, d) else: shutil.copy2(s, d) return fc
if symlinks and os.path.islink(srcname): linkto = os.readlink(srcname) os.symlink(linkto, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks, ignore) else: # Will raise a SpecialFileError for unsupported file types copy2(srcname, dstname) # catch the Error from the recursive copytree so that we can # continue with other files except Error as e: errors.extend(e.args[0]) except EnvironmentError, why: errors.append((srcname, dstname, str(why))) try: copystat(src, dst) except OSError, why: if WindowsError is not None and isinstance(why, WindowsError): # Copying file access times may fail on Windows pass else: errors.append((src, dst, str(why))) if errors: raise Error(errors) copytree = copy_tree # alias for drop-in replacement of shutil if __name__ == '__main__': #with atomic_save('/tmp/final.txt') as f: # f.write('rofl')
def main(): global module module = AnsibleModule( # not checking because of daisy chain to file module argument_spec=dict( src=dict(type='path'), _original_basename=dict( type='str' ), # used to handle 'dest is a directory' via template, a slight hack content=dict(type='str', no_log=True), dest=dict(type='path', required=True), backup=dict(type='bool', default=False), force=dict(type='bool', default=True, aliases=['thirsty']), validate=dict(type='str'), directory_mode=dict(type='raw'), remote_src=dict(type='bool'), local_follow=dict(type='bool'), checksum=dict(type='str'), follow=dict(type='bool', default=False), ), add_file_common_args=True, supports_check_mode=True, ) if module.params.get('thirsty'): module.deprecate( 'The alias "thirsty" has been deprecated and will be removed, use "force" instead', version='2.13', collection_name='ansible.builtin') src = module.params['src'] b_src = to_bytes(src, errors='surrogate_or_strict') dest = module.params['dest'] # Make sure we always have a directory component for later processing if os.path.sep not in dest: dest = '.{0}{1}'.format(os.path.sep, dest) b_dest = to_bytes(dest, errors='surrogate_or_strict') backup = module.params['backup'] force = module.params['force'] _original_basename = module.params.get('_original_basename', None) validate = module.params.get('validate', None) follow = module.params['follow'] local_follow = module.params['local_follow'] mode = module.params['mode'] owner = module.params['owner'] group = module.params['group'] remote_src = module.params['remote_src'] checksum = module.params['checksum'] if not os.path.exists(b_src): module.fail_json(msg="Source %s not found" % (src)) if not os.access(b_src, os.R_OK): module.fail_json(msg="Source %s not readable" % (src)) # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the # remote host if module.params['mode'] == 'preserve': module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode) mode = module.params['mode'] checksum_dest = None if os.path.isfile(src): checksum_src = module.sha1(src) else: checksum_src = None # Backwards compat only. This will be None in FIPS mode try: if os.path.isfile(src): md5sum_src = module.md5(src) else: md5sum_src = None except ValueError: md5sum_src = None changed = False if checksum and checksum_src != checksum: module.fail_json( msg= 'Copied file does not match the expected checksum. Transfer failed.', checksum=checksum_src, expected_checksum=checksum) # Special handling for recursive copy - create intermediate dirs if dest.endswith(os.sep): if _original_basename: dest = os.path.join(dest, _original_basename) b_dest = to_bytes(dest, errors='surrogate_or_strict') dirname = os.path.dirname(dest) b_dirname = to_bytes(dirname, errors='surrogate_or_strict') if not os.path.exists(b_dirname): try: (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname) except AnsibleModuleError as e: e.result['msg'] += ' Could not copy to {0}'.format(dest) module.fail_json(**e.results) os.makedirs(b_dirname) directory_args = module.load_file_common_arguments(module.params) directory_mode = module.params["directory_mode"] if directory_mode is not None: directory_args['mode'] = directory_mode else: directory_args['mode'] = None adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed) if os.path.isdir(b_dest): basename = os.path.basename(src) if _original_basename: basename = _original_basename dest = os.path.join(dest, basename) b_dest = to_bytes(dest, errors='surrogate_or_strict') if os.path.exists(b_dest): if os.path.islink(b_dest) and follow: b_dest = os.path.realpath(b_dest) dest = to_native(b_dest, errors='surrogate_or_strict') if not force: module.exit_json(msg="file already exists", src=src, dest=dest, changed=False) if os.access(b_dest, os.R_OK) and os.path.isfile(b_dest): checksum_dest = module.sha1(dest) else: if not os.path.exists(os.path.dirname(b_dest)): try: # os.path.exists() can return false in some # circumstances where the directory does not have # the execute bit for the current user set, in # which case the stat() call will raise an OSError os.stat(os.path.dirname(b_dest)) except OSError as e: if "permission denied" in to_native(e).lower(): module.fail_json( msg="Destination directory %s is not accessible" % (os.path.dirname(dest))) module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest))) if not os.access(os.path.dirname(b_dest), os.W_OK) and not module.params['unsafe_writes']: module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest))) backup_file = None if checksum_src != checksum_dest or os.path.islink(b_dest): if not module.check_mode: try: if backup: if os.path.exists(b_dest): backup_file = module.backup_local(dest) # allow for conversion from symlink. if os.path.islink(b_dest): os.unlink(b_dest) open(b_dest, 'w').close() if validate: # if we have a mode, make sure we set it on the temporary # file source as some validations may require it if mode is not None: module.set_mode_if_different(src, mode, False) if owner is not None: module.set_owner_if_different(src, owner, False) if group is not None: module.set_group_if_different(src, group, False) if "%s" not in validate: module.fail_json(msg="validate must contain %%s: %s" % (validate)) (rc, out, err) = module.run_command(validate % src) if rc != 0: module.fail_json(msg="failed to validate", exit_status=rc, stdout=out, stderr=err) b_mysrc = b_src if remote_src and os.path.isfile(b_src): _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest)) shutil.copyfile(b_src, b_mysrc) try: shutil.copystat(b_src, b_mysrc) except OSError as err: if err.errno == errno.ENOSYS and mode == "preserve": module.warn("Unable to copy stats {0}".format( to_native(b_src))) else: raise # might be needed below if PY3 and hasattr(os, 'listxattr'): try: src_has_acls = 'system.posix_acl_access' in os.listxattr( src) except Exception as e: # assume unwanted ACLs by default src_has_acls = True module.atomic_move( b_mysrc, dest, unsafe_writes=module.params['unsafe_writes']) if PY3 and hasattr(os, 'listxattr') and platform.system( ) == 'Linux' and not remote_src: # atomic_move used above to copy src into dest might, in some cases, # use shutil.copy2 which in turn uses shutil.copystat. # Since Python 3.3, shutil.copystat copies file extended attributes: # https://docs.python.org/3/library/shutil.html#shutil.copystat # os.listxattr (along with others) was added to handle the operation. # This means that on Python 3 we are copying the extended attributes which includes # the ACLs on some systems - further limited to Linux as the documentation above claims # that the extended attributes are copied only on Linux. Also, os.listxattr is only # available on Linux. # If not remote_src, then the file was copied from the controller. In that # case, any filesystem ACLs are artifacts of the copy rather than preservation # of existing attributes. Get rid of them: if src_has_acls: # FIXME If dest has any default ACLs, there are not applied to src now because # they were overridden by copystat. Should/can we do anything about this? # 'system.posix_acl_default' in os.listxattr(os.path.dirname(b_dest)) try: clear_facls(dest) except ValueError as e: if 'setfacl' in to_native(e): # No setfacl so we're okay. The controller couldn't have set a facl # without the setfacl command pass else: raise except RuntimeError as e: # setfacl failed. if 'Operation not supported' in to_native(e): # The file system does not support ACLs. pass else: raise except (IOError, OSError): module.fail_json(msg="failed to copy: %s to %s" % (src, dest), traceback=traceback.format_exc()) changed = True else: changed = False # If neither have checksums, both src and dest are directories. if checksum_src is None and checksum_dest is None: if remote_src and os.path.isdir(module.params['src']): b_src = to_bytes(module.params['src'], errors='surrogate_or_strict') b_dest = to_bytes(module.params['dest'], errors='surrogate_or_strict') if src.endswith(os.path.sep) and os.path.isdir( module.params['dest']): diff_files_changed = copy_diff_files(b_src, b_dest, module) left_only_changed = copy_left_only(b_src, b_dest, module) common_dirs_changed = copy_common_dirs(b_src, b_dest, module) owner_group_changed = chown_recursive(b_dest, module) if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed: changed = True if src.endswith( os.path.sep) and not os.path.exists(module.params['dest']): b_basename = to_bytes(os.path.basename(src), errors='surrogate_or_strict') b_dest = to_bytes(os.path.join(b_dest, b_basename), errors='surrogate_or_strict') b_src = to_bytes(os.path.join(module.params['src'], ""), errors='surrogate_or_strict') if not module.check_mode: shutil.copytree(b_src, b_dest, symlinks=not (local_follow)) chown_recursive(dest, module) changed = True if not src.endswith(os.path.sep) and os.path.isdir( module.params['dest']): b_basename = to_bytes(os.path.basename(src), errors='surrogate_or_strict') b_dest = to_bytes(os.path.join(b_dest, b_basename), errors='surrogate_or_strict') b_src = to_bytes(os.path.join(module.params['src'], ""), errors='surrogate_or_strict') if not module.check_mode and not os.path.exists(b_dest): shutil.copytree(b_src, b_dest, symlinks=not (local_follow)) changed = True chown_recursive(dest, module) if module.check_mode and not os.path.exists(b_dest): changed = True if os.path.exists(b_dest): diff_files_changed = copy_diff_files(b_src, b_dest, module) left_only_changed = copy_left_only(b_src, b_dest, module) common_dirs_changed = copy_common_dirs( b_src, b_dest, module) owner_group_changed = chown_recursive(b_dest, module) if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed: changed = True if not src.endswith(os.path.sep) and not os.path.exists( module.params['dest']): b_basename = to_bytes(os.path.basename(module.params['src']), errors='surrogate_or_strict') b_dest = to_bytes(os.path.join(b_dest, b_basename), errors='surrogate_or_strict') if not module.check_mode and not os.path.exists(b_dest): os.makedirs(b_dest) b_src = to_bytes(os.path.join(module.params['src'], ""), errors='surrogate_or_strict') diff_files_changed = copy_diff_files(b_src, b_dest, module) left_only_changed = copy_left_only(b_src, b_dest, module) common_dirs_changed = copy_common_dirs( b_src, b_dest, module) owner_group_changed = chown_recursive(b_dest, module) if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed: changed = True if module.check_mode and not os.path.exists(b_dest): changed = True res_args = dict(dest=dest, src=src, md5sum=md5sum_src, checksum=checksum_src, changed=changed) if backup_file: res_args['backup_file'] = backup_file if not module.check_mode: file_args = module.load_file_common_arguments(module.params, path=dest) res_args['changed'] = module.set_fs_attributes_if_different( file_args, res_args['changed']) module.exit_json(**res_args)
def copyfile(src, dest): shutil.copyfile(src, dest) try: shutil.copystat(src, dest) except Exception: pass
fitsDB.save() fitsDB.compress() sys.exit() for f in fitsDB.objectList: try: copyStatus = f['originalavailable'] except: copyStatus = False; if not copyStatus: source = f['originalFilename'] destination = os.path.join(destinationFolder, f['filename']) print(source + " --> writing to: " + destination) shutil.copy(source, destination) shutil.copystat(source, destination) f['originalavailable'] = True fitsDB.save() fitsDB.compress() sys.exit() for index in range(len(fitsDB.objectList)):
import shutil with open('/etc/passwd', 'rb') as sfobj: with open('/tmp/mima.txt', 'wb') as dfobj: shutil.copyfileobj(sfobj, dfobj) # 拷贝文件对象 shutil.copyfile('/etc/passwd', '/tmp/mima2.txt') shutil.copy('/etc/shadow', '/tmp/') # cp /etc/shadow /tmp/ shutil.copy2('/etc/shadow', '/tmp/') # cp -p /etc/shadow /tmp/ shutil.move('/tmp/mima.txt', '/var/tmp/') # mv /tmp/mima.txt /var/tmp/ shutil.copytree('/etc/security', '/tmp/anquan') # cp -r /etc/security /tmp/anquan shutil.rmtree('/tmp/anquan') # rm -rf /tmp/anquan # 将mima2.txt的权限设置成与/etc/shadow一样 shutil.copymode('/etc/shadow', '/tmp/mima2.txt') # 将mima2.txt的元数据设置成与/etc/shadow一样 # 元数据使用stat /etc/shadow查看 shutil.copystat('/etc/shadow', '/tmp/mima2.txt') shutil.chown('/tmp/mima2.txt', user='******', group='zhangsan')
def makebackup(ui, repo, dir, files): try: os.mkdir(dir) except OSError, err: if err.errno != errno.EEXIST: raise backups = {} for f in files: if os.path.isfile(repo.wjoin(f)): fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_') + '.', dir=dir) os.close(fd) ui.debug('backup %r as %r\n' % (f, tmpname)) util.copyfile(repo.wjoin(f), tmpname) shutil.copystat(repo.wjoin(f), tmpname) backups[f] = tmpname return backups def getshelfpath(repo, name): if name: shelfpath = "shelves/" + name else: # Check if a shelf from an older version exists if os.path.isfile(repo.join('shelve')): shelfpath = 'shelve' else: shelfpath = "shelves/default"
def run(self, context): self.keystore = context.expandPropertyValues(self.keystore) options = self.options mkdir(self.path) for src, dest in self.jars.resolveWithDestinations(context): if '..' in dest: # to avoid people abusing this to copy files outside the dest directory! raise Exception( 'This target does not permit destination paths to contain ".." relative path expressions' ) try: with open(src, 'rb') as s: with openForWrite(os.path.join(self.path, dest), 'wb') as d: d.write(s.read()) shutil.copystat(src, os.path.join(self.path, dest)) # When we re-jar with the user specified manifest entries, jar will complain # about duplicate attributes IF the original MANIFEST.MF already has those entries. # This is happening for latest version of SL where Application-Name, Permission etc # were already there. # # The block of code below will first extract the original MANIFEST.MF from the source # jar file, read all manifest entry to a list. When constructing the new manifest entries, # make sure the old MANIFEST.MF doesn't have that entry before putting the new manifest entry # to the list. This will avoid the duplicate attribute error. # if self.manifestDefaults: lines = [] # read each line of MANIFEST.MF of the original jar and put them in lines with zipfile.ZipFile(src, 'r') as zf: lst = zf.infolist() for zi in lst: fn = zi.filename if fn.lower().endswith('manifest.mf'): try: manifest_txt = zf.read(zi.filename) except Exception, e: raise BuildException( 'Failed reading the manifest file %s with exception:%s' % (fn, e)) # if we have all manifest text, parse and save each line if manifest_txt: # CR LF | LF | CR can be there as line feed and hence the code below lines = manifest_txt.replace( '\r\n', '\n').replace('\r', '\n').split('\n') # done break original_entries = collections.OrderedDict( ) # to ensure we don't overwrite/duplicate these # populate the manifest_entries with original values from original manifest for l in lines: if ':' in l and not l.startswith( ' ' ): # ignore continuation lines etc because keys are all we care about key, value = l.split(':', 1) original_entries[key] = value.strip() # build up a list of the new manifest entries (will be merged into any existing manifest by jar) manifest_entries = collections.OrderedDict() for i in self.manifestDefaults: # if entry isn't there yet, add to the list if i not in original_entries: manifest_entries[i] = context.expandPropertyValues( self.manifestDefaults[i]) # create the manifest file # we want to add the manifest entries explicitly specified here but # NOT the 'default' manifest entries we usually add, since these # are likely to have been set already, and we do not want duplicates mkdir(self.workDir) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file options = dict(options) options['jar.manifest.defaults'] = {} create_manifest(manifest, manifest_entries, options) # update the EXISTING jar file with the new manifest entries, which will be merged into # existing manifest by the jar tool jar(os.path.join(self.path, dest), manifest, None, options, update=True) signjar(os.path.join(self.path, dest), self.keystore, options, alias=self.alias, storepass=self.storepass, outputHandler=ProcessOutputHandler.create( 'signjars', treatStdErrAsErrors=False, options=options)) except BuildException, e: raise BuildException('Error processing %s: %s' % (os.path.basename(dest), e))
def run_optimizer(file_path, cmd, as_filter=False, input_data=None): file_path = os.path.abspath(file_path) cwd = os.path.dirname(file_path) ext = os.path.splitext(file_path)[1] if not ext or len(ext) > 10 or not ext.startswith('.'): ext = '.jpg' fd, outfile = tempfile.mkstemp(dir=cwd, suffix=ext) try: if as_filter: outf = os.fdopen(fd, 'wb') else: os.close(fd) iname, oname = os.path.basename(file_path), os.path.basename(outfile) def repl(q, r): cmd[cmd.index(q)] = r if not as_filter: repl(True, iname), repl(False, oname) stdin = subprocess.PIPE if as_filter else None stderr = subprocess.PIPE if as_filter else subprocess.STDOUT creationflags = subprocess.DETACHED_PROCESS if iswindows else 0 p = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=stderr, stdin=stdin, creationflags=creationflags) stderr = p.stderr if as_filter else p.stdout if as_filter: src = input_data or open(file_path, 'rb') def copy(src, dest): try: shutil.copyfileobj(src, dest) finally: src.close(), dest.close() inw = Thread(name='CopyInput', target=copy, args=(src, p.stdin)) inw.daemon = True inw.start() outw = Thread(name='CopyOutput', target=copy, args=(p.stdout, outf)) outw.daemon = True outw.start() raw = force_unicode(stderr.read()) if p.wait() != 0: return raw else: if as_filter: outw.join(60.0), inw.join(60.0) try: sz = os.path.getsize(outfile) except OSError: sz = 0 if sz < 1: return '%s returned a zero size image' % cmd[0] shutil.copystat(file_path, outfile) atomic_rename(outfile, file_path) finally: try: os.remove(outfile) except OSError as err: if err.errno != errno.ENOENT: raise try: os.remove(outfile + '.bak') # optipng creates these files except OSError as err: if err.errno != errno.ENOENT: raise
def shelve(ui, repo, *pats, **opts): '''interactively select changes to set aside If a list of files is omitted, all changes reported by :hg:` status` will be candidates for shelving. You will be prompted for whether to shelve changes to each modified file, and for files with multiple changes, for each change to use. The shelve command works with the Color extension to display diffs in color. On each prompt, the following responses are possible:: y - shelve this change n - skip this change s - skip remaining changes to this file f - shelve remaining changes to this file d - done, skip remaining changes and files a - shelve all changes to all remaining files q - quit, shelving no changes ? - display help ''' if not ui.interactive(): raise util.Abort(_('shelve can only be run interactively')) # List all the active shelves by name and return ' if opts['list']: listshelves(ui, repo) return forced = opts['force'] or opts['append'] # Shelf name and path shelfname = opts.get('name') shelfpath = getshelfpath(repo, shelfname) if os.path.exists(repo.join(shelfpath)) and not forced: raise util.Abort(_('shelve data already exists')) def shelvefunc(ui, repo, message, match, opts): parents = repo.dirstate.parents() changes = repo.status(match=match)[:3] modified, added, removed = changes diffopts = patch.diffopts(ui, opts={'git': True, 'nodates': True}) chunks = patch.diff(repo, changes=changes, opts=diffopts) fp = cStringIO.StringIO(''.join(chunks)) try: ac = parsepatch(fp) except patch.PatchError, err: raise util.Abort(_('error parsing patch: %s') % err) del fp # 1. filter patch, so we have intending-to apply subset of it chunks = filterpatch(ui, ac, not opts['all']) rc = refilterpatch(ac, chunks) # set of files to be processed contenders = set() for h in chunks: try: contenders.update(set(h.files())) except AttributeError: pass # exclude sources of copies that are otherwise untouched changed = modified + added + removed newfiles = set(f for f in changed if f in contenders) if not newfiles: ui.status(_('no changes to shelve\n')) return 0 # 2. backup changed files, so we can restore them in case of error backupdir = repo.join('shelve-backups') try: backups = makebackup(ui, repo, backupdir, newfiles) # patch to shelve sp = cStringIO.StringIO() for c in chunks: c.write(sp) # patch to apply to shelved files fp = cStringIO.StringIO() for c in rc: # skip files not selected for shelving if c.filename() in newfiles: c.write(fp) dopatch = fp.tell() fp.seek(0) try: # 3a. apply filtered patch to clean repo (clean) opts['no_backup'] = True cmdutil.revert(ui, repo, repo['.'], parents, *[repo.wjoin(f) for f in newfiles], **opts) for f in added: if f in newfiles: util.unlinkpath(repo.wjoin(f)) # 3b. (apply) if dopatch: try: ui.debug('applying patch\n') ui.debug(fp.getvalue()) patch.internalpatch(ui, repo, fp, 1, eolmode=None) except patch.PatchError, err: raise util.Abort(str(err)) del fp # 4. We prepared working directory according to filtered # patch. Now is the time to save the shelved changes! ui.debug("saving patch to shelve\n") if opts['append']: sp.write(repo.opener(shelfpath).read()) sp.seek(0) f = repo.opener(shelfpath, "w") f.write(sp.getvalue()) del f, sp except: ui.warn("shelving failed: %s\n" % sys.exc_info()[1]) try: # re-schedule remove matchremoved = scmutil.matchfiles(repo, removed) cmdutil.forget(ui, repo, matchremoved, "", True) for f in removed: if f in newfiles and os.path.isfile(repo.wjoin(f)): os.unlink(repo.wjoin(f)) # copy back backups for realname, tmpname in backups.iteritems(): ui.debug('restoring %r to %r\n' % (tmpname, realname)) util.copyfile(tmpname, repo.wjoin(realname)) # Our calls to copystat() here and above are a # hack to trick any editors that have f open that # we haven't modified them. # # Also note that this racy as an editor could # notice the file's mtime before we've finished # writing it. shutil.copystat(tmpname, repo.wjoin(realname)) # re-schedule add matchadded = scmutil.matchfiles(repo, added) cmdutil.add(ui, repo, matchadded, False, False, "", True) ui.debug('removing shelve file\n') if os.path.isfile(repo.join(shelfpath)): os.unlink(repo.join(shelfpath)) except OSError, err: ui.warn("restoring backup failed: %s\n" % err)