Beispiel #1
0
    def _update_metadata(self, pkg, ebp=None):
        parsed_eapi = pkg.eapi
        if not parsed_eapi.is_supported:
            return {'EAPI': str(parsed_eapi)}

        with processor.reuse_or_request(ebp) as my_proc:
            try:
                mydata = my_proc.get_keys(pkg, self._ecache)
            except processor.ProcessorError as e:
                raise metadata_errors.MetadataException(
                    pkg, 'data', 'failed sourcing ebuild', e)

        inherited = mydata.pop("INHERITED", None)
        # Rewrite defined_phases as needed, since we now know the EAPI.
        eapi = get_eapi(mydata.get('EAPI', '0'))
        if parsed_eapi != eapi:
            raise metadata_errors.MetadataException(
                pkg, 'eapi',
                f"parsed EAPI '{parsed_eapi}' doesn't match sourced EAPI '{eapi}'"
            )
        wipes = set(mydata)

        wipes.difference_update(eapi.metadata_keys)
        if mydata["DEFINED_PHASES"] != '-':
            phases = mydata["DEFINED_PHASES"].split()
            d = eapi.phases_rev
            phases = set(d.get(x) for x in phases)
            # Discard is required should we have gotten
            # a phase that isn't actually in this EAPI.
            phases.discard(None)
            mydata["DEFINED_PHASES"] = ' '.join(sorted(phases))

        if inherited:
            mydata["_eclasses_"] = self._ecache.get_eclass_data(
                inherited.split())
        mydata['_chf_'] = chksum.LazilyHashedPath(pkg.path)

        for x in wipes:
            del mydata[x]

        if self._cache is not None:
            for cache in self._cache:
                if not cache.readonly:
                    try:
                        cache[pkg.cpvstr] = mydata
                    except cache_errors.CacheError as e:
                        logger.warning("caught cache error: %s", e)
                        del e
                        continue
                    break

        return mydata
Beispiel #2
0
    def _get_metadata(self, pkg, ebp=None, force_regen=False):
        caches = self._cache
        if force_regen:
            caches = ()
        ebuild_hash = chksum.LazilyHashedPath(pkg.path)
        for cache in caches:
            if cache is not None:
                try:
                    data = cache[pkg.cpvstr]
                    if cache.validate_entry(data, ebuild_hash, self._ecache):
                        return data
                    if not cache.readonly:
                        del cache[pkg.cpvstr]
                except KeyError:
                    continue
                except cache_errors.CacheError as e:
                    logger.warning("caught cache error: %s", e)
                    del e
                    continue

        # no cache entries, regen
        return self._update_metadata(pkg, ebp=ebp)
Beispiel #3
0
 def _chf_(self):
     return chksum.LazilyHashedPath(self._parent._get_path(self._pkg))
Beispiel #4
0
class package_factory(metadata.factory):

    child_class = package

    # For the plugin system.
    priority = 5

    def __init__(self, parent, cachedb, eclass_cache, mirrors, default_mirrors,
                 *args, **kwargs):
        super().__init__(parent, *args, **kwargs)
        self._cache = cachedb
        self._ecache = eclass_cache

        if mirrors:
            mirrors = {k: fetch.mirror(v, k) for k, v in mirrors.items()}

        self.mirrors = mirrors
        if default_mirrors:
            self.default_mirrors = fetch.default_mirror(
                default_mirrors, "conf. default mirror")
        else:
            self.default_mirrors = None

    def get_ebuild_src(self, pkg):
        return self._parent_repo._get_ebuild_src(pkg)

    def _get_ebuild_path(self, pkg):
        return self._parent_repo._get_ebuild_path(pkg)

    def _get_ebuild_mtime(self, pkg):
        return os.stat(self._get_ebuild_path(pkg)).st_mtime

    def _get_metadata(self, pkg, ebp=None, force_regen=False):
        caches = self._cache
        if force_regen:
            caches = ()
        ebuild_hash = chksum.LazilyHashedPath(pkg.path)
        for cache in caches:
            if cache is not None:
                try:
                    data = cache[pkg.cpvstr]
                    if cache.validate_entry(data, ebuild_hash, self._ecache):
                        return data
                    if not cache.readonly:
                        del cache[pkg.cpvstr]
                except KeyError:
                    continue
                except cache_errors.CacheError as e:
                    logger.warning("caught cache error: %s", e)
                    del e
                    continue

        # no cache entries, regen
        return self._update_metadata(pkg, ebp=ebp)

    def _update_metadata(self, pkg, ebp=None):
        parsed_eapi = pkg.eapi
        if not parsed_eapi.is_supported:
            return {'EAPI': str(parsed_eapi)}

        with processor.reuse_or_request(ebp) as my_proc:
            try:
                mydata = my_proc.get_keys(pkg, self._ecache)
            except processor.ProcessorError as e:
                raise metadata_errors.MetadataException(
                    pkg, 'data', 'failed sourcing ebuild', e)

        # Rewrite defined_phases as needed, since we now know the EAPI.
        eapi = get_eapi(mydata.get('EAPI', '0'))
        if parsed_eapi != eapi:
            raise metadata_errors.MetadataException(
                pkg, 'eapi',
                f"parsed EAPI '{parsed_eapi}' doesn't match sourced EAPI '{eapi}'"
            )
        wipes = set(mydata)

        wipes.difference_update(eapi.metadata_keys)
        if mydata["DEFINED_PHASES"] != '-':
            phases = mydata["DEFINED_PHASES"].split()
            d = eapi.phases_rev
            phases = set(d.get(x) for x in phases)
            # Discard is required should we have gotten
            # a phase that isn't actually in this EAPI.
            phases.discard(None)
            mydata["DEFINED_PHASES"] = ' '.join(sorted(phases))

        if inherited := mydata.pop("INHERITED", None):
            mydata["_eclasses_"] = self._ecache.get_eclass_data(
                inherited.split())
        mydata['_chf_'] = chksum.LazilyHashedPath(pkg.path)

        for x in wipes:
            del mydata[x]

        if self._cache is not None:
            for cache in self._cache:
                if not cache.readonly:
                    try:
                        cache[pkg.cpvstr] = mydata
                    except cache_errors.CacheError as e:
                        logger.warning("caught cache error: %s", e)
                        del e
                        continue
                    break

        return mydata