def _write_cache_file(path, data, uid=-1, gid=-1): """Write a new cache file.""" cachefile = None try: try: cachefile = AtomicWriteFile(path, binary=False, perms=0o664, uid=uid, gid=gid) cachefile.write(CACHE_HEADER + "\n") for (module, mtime), plugs in sorted(data.items(), key=operator.itemgetter(0)): plugs = sort_plugs(plugs) plugs = ':'.join(f'{plug.key},{plug.priority},{plug.target}' for plug in plugs) cachefile.write(f'{module}:{mtime}:{plugs}\n') cachefile.close() except EnvironmentError as e: # We cannot write a new cache. We should log this # since it will have a performance impact. # Use error, not exception for this one: the traceback # is not necessary and too alarming. logger.error( 'Cannot write cache for %s: %s. ' 'Try running pplugincache.', path, e) finally: if cachefile is not None: cachefile.discard()
def _write_mtime_cache(mtimes, data, location): old_umask = os.umask(0113) try: f = None logger.debug("attempting to update mtime cache at %r", (location,)) try: if not ensure_dirs(os.path.dirname(location), gid=portage_gid, mode=0775): # bugger, can't update.. return f = AtomicWriteFile(location, gid=portage_gid, perms=0664) # invert the data... rev_data = {} for pkg, ver_dict in data.iteritems(): for fullver, virtuals in ver_dict.iteritems(): for virtual in virtuals: rev_data.setdefault(virtual.category, []).extend( (pkg, fullver, str(virtual))) for cat, mtime in mtimes.iteritems(): if cat in rev_data: f.write("%s\t%i\t%s\n" % (cat, mtime, '\t'.join(rev_data[cat]))) else: f.write("%s\t%i\n" % (cat, mtime)) f.close() os.chown(location, -1, portage_gid) except IOError as e: if f is not None: f.discard() if e.errno != errno.EACCES: raise logger.warning("unable to update vdb virtuals cache due to " "lacking permissions") finally: os.umask(old_umask)
def _write_mtime_cache(mtimes, data, location): old_umask = os.umask(0113) try: f = None logger.debug("attempting to update mtime cache at %r", (location,)) try: if not ensure_dirs(os.path.dirname(location), gid=portage_gid, mode=0775): # bugger, can't update.. return f = AtomicWriteFile(location, gid=portage_gid, perms=0664) # invert the data... rev_data = {} for pkg, ver_dict in data.iteritems(): for fullver, virtuals in ver_dict.iteritems(): for virtual in virtuals: rev_data.setdefault(virtual.category, []).extend( (pkg, fullver, str(virtual))) for cat, mtime in mtimes.iteritems(): if cat in rev_data: f.write("%s\t%i\t%s\n" % (cat, mtime, '\t'.join(rev_data[cat]))) else: f.write("%s\t%i\n" % (cat, mtime)) f.close() os.chown(location, -1, portage_gid) except IOError as e: if f is not None: f.discard() if e.errno != errno.EACCES: raise logger.warning("unable to update vdb virtuals cache due to " "lacking permissions") finally: os.umask(old_umask)
def update_pkg_desc_index(repo, observer): """Update a repo's package description cache (metadata/pkg_desc_index)""" ret = 0 pkg_desc_index = pjoin(repo.location, "metadata", "pkg_desc_index") f = None try: f = AtomicWriteFile(pkg_desc_index) for cat, pkgs in sorted(repo.packages.items()): for pkg in sorted(pkgs): cpvs = sorted( CPV(cat, pkg, v) for v in repo.versions[(cat, pkg)]) # get the most recent pkg description, skipping bad pkgs for cpv in reversed(cpvs): try: desc = repo[(cat, pkg, cpv.fullver)].description versions = ' '.join(x.fullver for x in cpvs) f.write(f"{cat}/{pkg} {versions}: {desc}\n") break except MetadataException as e: # should be caught and outputted already by cache regen ret = 1 f.close() except IOError as e: observer.error( f"Unable to update pkg_desc_index file {pkg_desc_index!r}: {e.strerror}" ) ret = os.EX_IOERR finally: if f is not None: f.discard() return ret
def update_use_local_desc(repo, observer): """Update a repo's local USE flag description cache (profiles/use.local.desc)""" ret = 0 use_local_desc = pjoin(repo.location, "profiles", "use.local.desc") f = None def _raise_xml_error(exc): observer.error(f'{cat}/{pkg}: failed parsing metadata.xml: {str(exc)}') nonlocal ret ret = 1 try: f = AtomicWriteFile(use_local_desc) f.write( textwrap.dedent('''\ # This file is deprecated as per GLEP 56 in favor of metadata.xml. # Please add your descriptions to your package's metadata.xml ONLY. # * generated automatically using pmaint *\n\n''')) with patch('pkgcore.log.logger.error', _raise_xml_error): for cat, pkgs in sorted(repo.packages.items()): for pkg in sorted(pkgs): metadata = repo._get_metadata_xml(cat, pkg) for flag, desc in sorted(metadata.local_use.items()): f.write(f'{cat}/{pkg}:{flag} - {desc}\n') f.close() except IOError as e: observer.error( f"Unable to update use.local.desc file {use_local_desc!r}: {e.strerror}" ) ret = os.EX_IOERR finally: if f is not None: f.discard() return ret
def flush(self): f = None try: f = AtomicWriteFile(self.path, gid=self.gid, perms=self.mode) f.write("\n".join(str(x) for x in sorted(self._atoms))) f.close() except: if f is not None: f.discard() raise
def _write_data(self): handler = None try: try: handler = AtomicWriteFile(self._location) self._serialize_to_handle(list(self.data.items()), handler) handler.close() except PermissionError as e: logger.error( f'failed writing binpkg cache to {self._location!r}: {e}') finally: if handler is not None: handler.discard()