def do_link(src, trg): try: os.link(src.location, trg.location) return True except EnvironmentError as e: if e.errno != errno.EEXIST: raise path = trg.location + '#new' unlink_if_exists(path) try: os.link(src.location, path) except EnvironmentError as e: if e.errno != errno.EXDEV: # someone is screwing with us, or unlink_if_exists is broken. raise # hardlink is impossible, force copyfile return False try: os.rename(path, trg.location) except EnvironmentError: unlink_if_exists(path) if e.eerrno != errno.EXDEV: # weird error, broken FS codes, perms, or someone is screwing with us. raise # this is only possible on overlay fs's; while annoying, you can have two # different filesystems in use in the same directory in those cases. return False return True
def do_link(src, trg): try: os.link(src.location, trg.location) return True except EnvironmentError as e: if e.errno == errno.EXDEV: # hardlink is impossible, force copyfile return False elif e.errno != errno.EEXIST: raise path = trg.location + '#new' unlink_if_exists(path) try: os.link(src.location, path) except EnvironmentError as e: if e.errno != errno.EXDEV: # someone is screwing with us, or unlink_if_exists is broken. raise # hardlink is impossible, force copyfile return False try: os.rename(path, trg.location) except EnvironmentError: unlink_if_exists(path) if e.eerrno != errno.EXDEV: # weird error, broken FS codes, perms, or someone is screwing with us. raise # this is only possible on overlay fs's; while annoying, you can have two # different filesystems in use in the same directory in those cases. return False return True
def add_data(self): if self.observer is None: end = start = lambda x:None else: start = self.observer.phase_start end = self.observer.phase_end pkg = self.new_pkg final_path = discern_loc(self.repo.base, pkg, self.repo.extension) tmp_path = pjoin(os.path.dirname(final_path), ".tmp.%i.%s" % (os.getpid(), os.path.basename(final_path))) self.tmp_path, self.final_path = tmp_path, final_path if not ensure_dirs(os.path.dirname(tmp_path), mode=0755): raise repo_interfaces.Failure("failed creating directory %r" % os.path.dirname(tmp_path)) try: start("generating tarball: %s" % tmp_path) tar.write_set(pkg.contents, tmp_path, compressor='bzip2', parallelize=True) end("tarball created", True) start("writing Xpak") # ok... got a tarball. now add xpak. xpak.Xpak.write_xpak(tmp_path, generate_attr_dict(pkg)) end("wrote Xpak", True) # ok... we tagged the xpak on. os.chmod(tmp_path, 0644) except Exception as e: try: unlink_if_exists(tmp_path) except EnvironmentError as e: logger.warning("failed removing %r: %r" % (tmp_path, e)) raise return True
def _clean_old_caches(path): for name in ('plugincache2', ): try: unlink_if_exists(pjoin(path, name)) except EnvironmentError as e: logger.error( "attempting to clean old plugin cache %r failed with %s", pjoin(path, name), e)
def write(tempspace, finalpath, pkg, cset=None, platform='', maintainer='', compressor='gz'): # The debian-binary file if cset is None: cset = pkg.contents # The data.tar.gz file data_path = pjoin(tempspace, 'data.tar.gz') tar.write_set(cset, data_path, compressor='gz', absolute_paths=False) # Control data file control = {} control['Package'] = pkg.package #control['Section'] = pkg.category control['Version'] = pkg.fullver control['Architecture'] = platform if maintainer: control['Maintainer'] = maintainer control['Description'] = pkg.description pkgdeps = "%s" % (pkg.rdepends, ) if (pkgdeps is not None and pkgdeps != ""): control.update(parsedeps(pkgdeps)) control_ds = text_data_source("".join("%s: %s\n" % (k, v) for (k, v) in control.iteritems())) control_path = pjoin(tempspace, 'control.tar.gz') tar.write_set(contents.contentsSet([ fs.fsFile('control', {'size': len(control_ds.text_fileobj().getvalue())}, data=control_ds, uid=0, gid=0, mode=0644, mtime=time.time()) ]), control_path, compressor='gz') dbinary_path = pjoin(tempspace, 'debian-binary') with open(dbinary_path, 'w') as f: f.write("2.0\n") ret = spawn(['ar', '-r', finalpath, dbinary_path, data_path, control_path]) if ret != 0: unlink_if_exists(finalpath) raise Exception("failed creating archive: return code %s" % (ret, ))
def write(tempspace, finalpath, pkg, cset=None, platform='', maintainer='', compressor='gz'): # The debian-binary file if cset is None: cset = pkg.contents # The data.tar.gz file data_path = pjoin(tempspace, 'data.tar.gz') tar.write_set(cset, data_path, compressor='gz', absolute_paths=False) # Control data file control = {} control['Package'] = pkg.package #control['Section'] = pkg.category control['Version'] = pkg.fullver control['Architecture'] = platform if maintainer: control['Maintainer'] = maintainer control['Description'] = pkg.description pkgdeps = "%s" % (pkg.rdepends,) if (pkgdeps is not None and pkgdeps != ""): control.update(parsedeps(pkgdeps)) control_ds = text_data_source("".join("%s: %s\n" % (k, v) for (k, v) in control.iteritems())) control_path = pjoin(tempspace, 'control.tar.gz') tar.write_set( contents.contentsSet([ fs.fsFile('control', {'size':len(control_ds.text_fileobj().getvalue())}, data=control_ds, uid=0, gid=0, mode=0644, mtime=time.time()) ]), control_path, compressor='gz') dbinary_path = pjoin(tempspace, 'debian-binary') with open(dbinary_path, 'w') as f: f.write("2.0\n") ret = spawn(['ar', '-r', finalpath, dbinary_path, data_path, control_path]) if ret != 0: unlink_if_exists(finalpath) raise Exception("failed creating archive: return code %s" % (ret,))
def unmerge_contents(cset, offset=None, callback=None): """ unmerge a :obj:`pkgcore.fs.contents.contentsSet` instance to the livefs :param cset: :obj:`pkgcore.fs.contents.contentsSet` instance :param offset: if not None, offset to prefix all locations with. Think of it as target dir. :param callback: callable to report each entry being unmerged :return: True, or an exception is thrown on failure (OSError, although see default_copyfile for specifics). :raise EnvironmentError: see :func:`default_copyfile` and :func:`default_mkdir` """ if callback is None: callback = lambda obj: None iterate = iter if offset is not None: iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep)) for x in iterate(cset.iterdirs(invert=True)): callback(x) unlink_if_exists(x.location) # this is a fair sight faster then using sorted/reversed l = list(iterate(cset.iterdirs())) l.sort(reverse=True) for x in l: try: os.rmdir(x.location) except OSError as e: if not e.errno in (errno.ENOTEMPTY, errno.ENOENT, errno.ENOTDIR, errno.EBUSY, errno.EEXIST): raise else: callback(x) return True
def add_data(self): if self.observer is None: end = start = lambda x: None else: start = self.observer.phase_start end = self.observer.phase_end pkg = self.new_pkg final_path = discern_loc(self.repo.base, pkg, self.repo.extension) tmp_path = pjoin( os.path.dirname(final_path), ".tmp.%i.%s" % (os.getpid(), os.path.basename(final_path))) self.tmp_path, self.final_path = tmp_path, final_path if not ensure_dirs(os.path.dirname(tmp_path), mode=0755): raise repo_interfaces.Failure("failed creating directory %r" % os.path.dirname(tmp_path)) try: start("generating tarball: %s" % tmp_path) tar.write_set(pkg.contents, tmp_path, compressor='bzip2', parallelize=True) end("tarball created", True) start("writing Xpak") # ok... got a tarball. now add xpak. xpak.Xpak.write_xpak(tmp_path, generate_attr_dict(pkg)) end("wrote Xpak", True) # ok... we tagged the xpak on. os.chmod(tmp_path, 0644) except Exception as e: try: unlink_if_exists(tmp_path) except EnvironmentError as e: logger.warning("failed removing %r: %r" % (tmp_path, e)) raise return True
ensure_perms(obj.change_attributes(location=fp)) if existant: os.rename(existant_fp, obj.location) return True def do_link(src, trg): try: os.link(src.location, trg.location) return True except EnvironmentError, e: if e.errno != errno.EEXIST: raise path = trg.location + '#new' unlink_if_exists(path) try: os.link(src.location, path) except EnvironmentError, e: if e.errno != errno.EXDEV: # someone is screwing with us, or unlink_if_exists is broken. raise # hardlink is impossible, force copyfile return False try: os.rename(path, trg.location) except EnvironmentError: unlink_if_exists(path) if e.eerrno != errno.EXDEV: # weird error, broken FS codes, perms, or someone is screwing with us. raise