示例#1
0
    def __init__(self, pmask_locations, abs_user_config, user_config=True):
        self._punmaskdict = ExtendedAtomDict(list)
        self._pmaskdict = ExtendedAtomDict(list)

        pkgmasklines = []
        pkgunmasklines = []
        for x in pmask_locations:
            pkgmasklines.append(
                grabfile_package(os.path.join(x, "package.mask"), recursive=1))
            pkgunmasklines.append(
                grabfile_package(os.path.join(x, "package.unmask"),
                                 recursive=1))

        if user_config:
            pkgmasklines.append(
                grabfile_package(os.path.join(abs_user_config, "package.mask"),
                                 recursive=1,
                                 allow_wildcard=True))
            pkgunmasklines.append(
                grabfile_package(os.path.join(abs_user_config,
                                              "package.unmask"),
                                 recursive=1,
                                 allow_wildcard=True))

        pkgmasklines = stack_lists(pkgmasklines, incremental=1)
        pkgunmasklines = stack_lists(pkgunmasklines, incremental=1)

        for x in pkgmasklines:
            self._pmaskdict.setdefault(x.cp, []).append(x)

        for x in pkgunmasklines:
            self._punmaskdict.setdefault(x.cp, []).append(x)
示例#2
0
	def _parse_user_files_to_extatomdict(self, file_name, location, user_config):
		ret = ExtendedAtomDict(dict)
		if user_config:
			pusedict = grabdict_package(
				os.path.join(location, file_name),
				recursive=1, newlines=1, allow_wildcard=True,
				allow_repo=True, verify_eapi=False,
				allow_build_id=True, allow_use=False)
			for k, v in pusedict.items():
				l = []
				use_expand_prefix = ''
				for flag in v:
					if flag == "\n":
						use_expand_prefix = ""
						continue
					if flag[-1] == ":":
						use_expand_prefix = flag[:-1].lower() + "_"
						continue
					if flag[0] == "-":
						nv = "-" + use_expand_prefix + flag[1:]
					else:
						nv = use_expand_prefix + flag
					l.append(nv)
				ret.setdefault(k.cp, {})[k] = tuple(l)

		return ret
示例#3
0
	def __init__(self, pmask_locations, abs_user_config, user_config=True):
		self._punmaskdict = ExtendedAtomDict(list)
		self._pmaskdict = ExtendedAtomDict(list)

		pkgmasklines = []
		pkgunmasklines = []
		for x in pmask_locations:
			pkgmasklines.append(grabfile_package(
				os.path.join(x, "package.mask"), recursive=1))
			pkgunmasklines.append(grabfile_package(
				os.path.join(x, "package.unmask"), recursive=1))

		if user_config:
			pkgmasklines.append(grabfile_package(
				os.path.join(abs_user_config, "package.mask"), recursive=1, allow_wildcard=True))
			pkgunmasklines.append(grabfile_package(
				os.path.join(abs_user_config, "package.unmask"), recursive=1, allow_wildcard=True))

		pkgmasklines = stack_lists(pkgmasklines, incremental=1)
		pkgunmasklines = stack_lists(pkgunmasklines, incremental=1)

		for x in pkgmasklines:
			self._pmaskdict.setdefault(x.cp, []).append(x)

		for x in pkgunmasklines:
			self._punmaskdict.setdefault(x.cp, []).append(x)
示例#4
0
    def _parse_user_files_to_extatomdict(self, file_name, location,
                                         user_config):
        ret = ExtendedAtomDict(dict)
        if user_config:
            pusedict = grabdict_package(os.path.join(location, file_name),
                                        recursive=1,
                                        newlines=1,
                                        allow_wildcard=True,
                                        allow_repo=True,
                                        verify_eapi=False,
                                        allow_build_id=True,
                                        allow_use=False)
            for k, v in pusedict.items():
                l = []
                use_expand_prefix = ''
                for flag in v:
                    if flag == "\n":
                        use_expand_prefix = ""
                        continue
                    if flag[-1] == ":":
                        use_expand_prefix = flag[:-1].lower() + "_"
                        continue
                    if flag[0] == "-":
                        nv = "-" + use_expand_prefix + flag[1:]
                    else:
                        nv = use_expand_prefix + flag
                    l.append(nv)
                ret.setdefault(k.cp, {})[k] = tuple(l)

        return ret
示例#5
0
	def __init__(self, profiles, abs_user_config, user_config=True,
				global_accept_keywords=""):
		self._pkeywords_list = []
		rawpkeywords = [grabdict_package(
			os.path.join(x.location, "package.keywords"),
			recursive=x.portage1_directories,
			verify_eapi=True, eapi=x.eapi, eapi_default=None,
			allow_build_id=x.allow_build_id)
			for x in profiles]
		for pkeyworddict in rawpkeywords:
			if not pkeyworddict:
				# Omit non-existent files from the stack.
				continue
			cpdict = {}
			for k, v in pkeyworddict.items():
				cpdict.setdefault(k.cp, {})[k] = v
			self._pkeywords_list.append(cpdict)
		self._pkeywords_list = tuple(self._pkeywords_list)

		self._p_accept_keywords = []
		raw_p_accept_keywords = [grabdict_package(
			os.path.join(x.location, "package.accept_keywords"),
			recursive=x.portage1_directories,
			verify_eapi=True, eapi=x.eapi, eapi_default=None)
			for x in profiles]
		for d in raw_p_accept_keywords:
			if not d:
				# Omit non-existent files from the stack.
				continue
			cpdict = {}
			for k, v in d.items():
				cpdict.setdefault(k.cp, {})[k] = tuple(v)
			self._p_accept_keywords.append(cpdict)
		self._p_accept_keywords = tuple(self._p_accept_keywords)

		self.pkeywordsdict = ExtendedAtomDict(dict)

		if user_config:
			pkgdict = grabdict_package(
				os.path.join(abs_user_config, "package.keywords"),
				recursive=1, allow_wildcard=True, allow_repo=True,
				verify_eapi=False)

			for k, v in grabdict_package(
				os.path.join(abs_user_config, "package.accept_keywords"),
				recursive=1, allow_wildcard=True, allow_repo=True,
				verify_eapi=False).items():
				pkgdict.setdefault(k, []).extend(v)

			accept_keywords_defaults = global_accept_keywords.split()
			accept_keywords_defaults = tuple('~' + keyword for keyword in \
				accept_keywords_defaults if keyword[:1] not in "~-")
			for k, v in pkgdict.items():
				# default to ~arch if no specific keyword is given
				if not v:
					v = accept_keywords_defaults
				else:
					v = tuple(v)
				self.pkeywordsdict.setdefault(k.cp, {})[k] = v
示例#6
0
 def __init__(self):
     self._atoms = set()
     self._atommap = ExtendedAtomDict(set)
     self._loaded = False
     self._loading = False
     self.errors = []
     self._nonatoms = set()
     self.world_candidate = False
	def testExtendedAtomDict(self):
		d = ExtendedAtomDict(dict)
		d["*/*"] = { "test1": "x" }
		d["dev-libs/*"] = { "test2": "y" }
		d.setdefault("sys-apps/portage", {})["test3"] = "z"
		self.assertEqual(d.get("dev-libs/A"), { "test1": "x", "test2": "y" })
		self.assertEqual(d.get("sys-apps/portage"), { "test1": "x", "test3": "z" })
		self.assertEqual(d["dev-libs/*"], { "test2": "y" })
		self.assertEqual(d["sys-apps/portage"], {'test1': 'x', 'test3': 'z'})
示例#8
0
	def _parse_user_files_to_extatomdict(self, file_name, location, user_config):
		ret = ExtendedAtomDict(dict)
		if user_config:
			pusedict = grabdict_package(
				os.path.join(location, file_name), recursive=1, allow_wildcard=True, allow_repo=True, verify_eapi=False)
			for k, v in pusedict.items():
				ret.setdefault(k.cp, {})[k] = tuple(v)

		return ret
示例#9
0
	def _parse_user_files_to_extatomdict(self, file_name, location, user_config):
		ret = ExtendedAtomDict(dict)
		if user_config:
			pusedict = grabdict_package(
				os.path.join(location, file_name), recursive=1, allow_wildcard=True, allow_repo=True, verify_eapi=False)
			for k, v in pusedict.items():
				ret.setdefault(k.cp, {})[k] = tuple(v)

		return ret
示例#10
0
 def __init__(self, allow_wildcard=False, allow_repo=False):
     self._atoms = set()
     self._atommap = ExtendedAtomDict(set)
     self._loaded = False
     self._loading = False
     self.errors = []
     self._nonatoms = set()
     self.world_candidate = False
     self._allow_wildcard = allow_wildcard
     self._allow_repo = allow_repo
示例#11
0
 def _parse_file_to_dict(self,
                         file_name,
                         juststrings=False,
                         recursive=True,
                         eapi_filter=None,
                         user_config=False):
     ret = {}
     location_dict = {}
     eapi = read_corresponding_eapi_file(file_name, default=None)
     if eapi is None and not user_config:
         eapi = "0"
     if eapi is None:
         ret = ExtendedAtomDict(dict)
     else:
         ret = {}
     file_dict = grabdict_package(file_name,
                                  recursive=recursive,
                                  allow_wildcard=(eapi is None),
                                  allow_repo=(eapi is None),
                                  verify_eapi=(eapi is not None))
     if eapi is not None and eapi_filter is not None and not eapi_filter(
             eapi):
         if file_dict:
             writemsg(_("--- EAPI '%s' does not support '%s': '%s'\n") %
                      (eapi, os.path.basename(file_name), file_name),
                      noiselevel=-1)
         return ret
     useflag_re = _get_useflag_re(eapi)
     for k, v in file_dict.items():
         useflags = []
         for prefixed_useflag in v:
             if prefixed_useflag[:1] == "-":
                 useflag = prefixed_useflag[1:]
             else:
                 useflag = prefixed_useflag
             if useflag_re.match(useflag) is None:
                 writemsg(
                     _("--- Invalid USE flag for '%s' in '%s': '%s'\n") %
                     (k, file_name, prefixed_useflag),
                     noiselevel=-1)
             else:
                 useflags.append(prefixed_useflag)
         location_dict.setdefault(k, []).extend(useflags)
     for k, v in location_dict.items():
         if juststrings:
             v = " ".join(v)
         else:
             v = tuple(v)
         ret.setdefault(k.cp, {})[k] = v
     return ret
示例#12
0
	def __init__(self, license_group_locations, abs_user_config, user_config=True):

		self._accept_license_str = None
		self._accept_license = None
		self._license_groups = {}
		self._plicensedict = ExtendedAtomDict(dict)
		self._undef_lic_groups = set()

		if user_config:
			license_group_locations = list(license_group_locations) + [abs_user_config]

		self._read_license_groups(license_group_locations)

		if user_config:
			self._read_user_config(abs_user_config)
示例#13
0
 def _parse_file_to_dict(self, file_name, juststrings=False, recursive=True, eapi_filter=None, user_config=False):
     ret = {}
     location_dict = {}
     eapi = read_corresponding_eapi_file(file_name, default=None)
     if eapi is None and not user_config:
         eapi = "0"
     if eapi is None:
         ret = ExtendedAtomDict(dict)
     else:
         ret = {}
     file_dict = grabdict_package(
         file_name,
         recursive=recursive,
         allow_wildcard=(eapi is None),
         allow_repo=(eapi is None),
         verify_eapi=(eapi is not None),
     )
     if eapi is not None and eapi_filter is not None and not eapi_filter(eapi):
         if file_dict:
             writemsg(
                 _("--- EAPI '%s' does not support '%s': '%s'\n") % (eapi, os.path.basename(file_name), file_name),
                 noiselevel=-1,
             )
         return ret
     useflag_re = _get_useflag_re(eapi)
     for k, v in file_dict.items():
         useflags = []
         for prefixed_useflag in v:
             if prefixed_useflag[:1] == "-":
                 useflag = prefixed_useflag[1:]
             else:
                 useflag = prefixed_useflag
             if useflag_re.match(useflag) is None:
                 writemsg(
                     _("--- Invalid USE flag for '%s' in '%s': '%s'\n") % (k, file_name, prefixed_useflag),
                     noiselevel=-1,
                 )
             else:
                 useflags.append(prefixed_useflag)
         location_dict.setdefault(k, []).extend(useflags)
     for k, v in location_dict.items():
         if juststrings:
             v = " ".join(v)
         else:
             v = tuple(v)
         ret.setdefault(k.cp, {})[k] = v
     return ret
示例#14
0
	def __init__(self, profiles, abs_user_config, user_config=True,
				global_accept_keywords=""):
		self._pkeywords_list = []
		rawpkeywords = [grabdict_package(
			os.path.join(x.location, "package.keywords"),
			recursive=x.portage1_directories,
			verify_eapi=True, eapi=x.eapi, eapi_default=None,
			allow_build_id=x.allow_build_id)
			for x in profiles]
		for pkeyworddict in rawpkeywords:
			if not pkeyworddict:
				# Omit non-existent files from the stack.
				continue
			cpdict = {}
			for k, v in pkeyworddict.items():
				cpdict.setdefault(k.cp, {})[k] = v
			self._pkeywords_list.append(cpdict)
		self._pkeywords_list = tuple(self._pkeywords_list)

		self._p_accept_keywords = []
		raw_p_accept_keywords = [grabdict_package(
			os.path.join(x.location, "package.accept_keywords"),
			recursive=x.portage1_directories,
			verify_eapi=True, eapi=x.eapi, eapi_default=None)
			for x in profiles]
		for d in raw_p_accept_keywords:
			if not d:
				# Omit non-existent files from the stack.
				continue
			cpdict = {}
			for k, v in d.items():
				cpdict.setdefault(k.cp, {})[k] = tuple(v)
			self._p_accept_keywords.append(cpdict)
		self._p_accept_keywords = tuple(self._p_accept_keywords)

		self.pkeywordsdict = ExtendedAtomDict(dict)

		if user_config:
			pkgdict = grabdict_package(
				os.path.join(abs_user_config, "package.keywords"),
				recursive=1, allow_wildcard=True, allow_repo=True,
				verify_eapi=False, allow_build_id=True)

			for k, v in grabdict_package(
				os.path.join(abs_user_config, "package.accept_keywords"),
				recursive=1, allow_wildcard=True, allow_repo=True,
				verify_eapi=False, allow_build_id=True).items():
				pkgdict.setdefault(k, []).extend(v)

			accept_keywords_defaults = global_accept_keywords.split()
			accept_keywords_defaults = tuple('~' + keyword for keyword in \
				accept_keywords_defaults if keyword[:1] not in "~-")
			for k, v in pkgdict.items():
				# default to ~arch if no specific keyword is given
				if not v:
					v = accept_keywords_defaults
				else:
					v = tuple(v)
				self.pkeywordsdict.setdefault(k.cp, {})[k] = v
示例#15
0
	def __init__(self):
		self._atoms = set()
		self._atommap = ExtendedAtomDict(set)
		self._loaded = False
		self._loading = False
		self.errors = []
		self._nonatoms = set()
		self.world_candidate = False
示例#16
0
文件: base.py 项目: entoo/portage-src
 def __init__(self, allow_wildcard=False, allow_repo=False):
     self._atoms = set()
     self._atommap = ExtendedAtomDict(set)
     self._loaded = False
     self._loading = False
     self.errors = []
     self._nonatoms = set()
     self.world_candidate = False
     self._allow_wildcard = allow_wildcard
     self._allow_repo = allow_repo
示例#17
0
class MaskManager(object):

	def __init__(self, pmask_locations, abs_user_config, user_config=True):
		self._punmaskdict = ExtendedAtomDict(list)
		self._pmaskdict = ExtendedAtomDict(list)

		pkgmasklines = []
		pkgunmasklines = []
		for x in pmask_locations:
			pkgmasklines.append(grabfile_package(
				os.path.join(x, "package.mask"), recursive=1))
			pkgunmasklines.append(grabfile_package(
				os.path.join(x, "package.unmask"), recursive=1))

		if user_config:
			pkgmasklines.append(grabfile_package(
				os.path.join(abs_user_config, "package.mask"), recursive=1, allow_wildcard=True))
			pkgunmasklines.append(grabfile_package(
				os.path.join(abs_user_config, "package.unmask"), recursive=1, allow_wildcard=True))

		pkgmasklines = stack_lists(pkgmasklines, incremental=1)
		pkgunmasklines = stack_lists(pkgunmasklines, incremental=1)

		for x in pkgmasklines:
			self._pmaskdict.setdefault(x.cp, []).append(x)

		for x in pkgunmasklines:
			self._punmaskdict.setdefault(x.cp, []).append(x)

	def getMaskAtom(self, cpv, slot):
		"""
		Take a package and return a matching package.mask atom, or None if no
		such atom exists or it has been cancelled by package.unmask. PROVIDE
		is not checked, so atoms will not be found for old-style virtuals.

		@param cpv: The package name
		@type cpv: String
		@param slot: The package's slot
		@type slot: String
		@rtype: String
		@return: An matching atom string or None if one is not found.
		"""

		cp = cpv_getkey(cpv)
		mask_atoms = self._pmaskdict.get(cp)
		if mask_atoms:
			pkg_list = ["%s:%s" % (cpv, slot)]
			unmask_atoms = self._punmaskdict.get(cp)
			for x in mask_atoms:
				if not match_from_list(x, pkg_list):
					continue
				if unmask_atoms:
					for y in unmask_atoms:
						if match_from_list(y, pkg_list):
							return None
				return x
		return None
示例#18
0
	def __init__(self, license_group_locations, abs_user_config, user_config=True):

		self._accept_license_str = None
		self._accept_license = None
		self._license_groups = {}
		self._plicensedict = ExtendedAtomDict(dict)
		self._undef_lic_groups = set()

		if user_config:
			license_group_locations = list(license_group_locations) + [abs_user_config]

		self._read_license_groups(license_group_locations)

		if user_config:
			self._read_user_config(abs_user_config)
 def testExtendedAtomDict(self):
     d = ExtendedAtomDict(dict)
     d["*/*"] = {"test1": "x"}
     d["dev-libs/*"] = {"test2": "y"}
     d.setdefault("sys-apps/portage", {})["test3"] = "z"
     self.assertEqual(d.get("dev-libs/A"), {"test1": "x", "test2": "y"})
     self.assertEqual(d.get("sys-apps/portage"), {
         "test1": "x",
         "test3": "z"
     })
     self.assertEqual(d["dev-libs/*"], {"test2": "y"})
     self.assertEqual(d["sys-apps/portage"], {'test1': 'x', 'test3': 'z'})
示例#20
0
文件: base.py 项目: entoo/portage-src
class PackageSet(object):
    # Set this to operations that are supported by your subclass. While
    # technically there is no difference between "merge" and "unmerge" regarding
    # package sets, the latter doesn't make sense for some sets like "system"
    # or "security" and therefore isn't supported by them.
    _operations = ["merge"]
    description = "generic package set"

    def __init__(self, allow_wildcard=False, allow_repo=False):
        self._atoms = set()
        self._atommap = ExtendedAtomDict(set)
        self._loaded = False
        self._loading = False
        self.errors = []
        self._nonatoms = set()
        self.world_candidate = False
        self._allow_wildcard = allow_wildcard
        self._allow_repo = allow_repo

    def __contains__(self, atom):
        self._load()
        return atom in self._atoms or atom in self._nonatoms

    def __iter__(self):
        self._load()
        for x in self._atoms:
            yield x
        for x in self._nonatoms:
            yield x

    def __bool__(self):
        self._load()
        return bool(self._atoms or self._nonatoms)

    if sys.hexversion < 0x3000000:
        __nonzero__ = __bool__

    def supportsOperation(self, op):
        if not op in OPERATIONS:
            raise ValueError(op)
        return op in self._operations

    def _load(self):
        if not (self._loaded or self._loading):
            self._loading = True
            self.load()
            self._loaded = True
            self._loading = False

    def getAtoms(self):
        self._load()
        return self._atoms.copy()

    def getNonAtoms(self):
        self._load()
        return self._nonatoms.copy()

    def _setAtoms(self, atoms):
        self._atoms.clear()
        self._nonatoms.clear()
        for a in atoms:
            if not isinstance(a, Atom):
                if isinstance(a, basestring):
                    a = a.strip()
                if not a:
                    continue
                try:
                    a = Atom(a, allow_wildcard=True, allow_repo=True)
                except InvalidAtom:
                    self._nonatoms.add(a)
                    continue
            if not self._allow_wildcard and a.extended_syntax:
                raise InvalidAtom("extended atom syntax not allowed here")
            if not self._allow_repo and a.repo:
                raise InvalidAtom("repository specification not allowed here")
            self._atoms.add(a)

        self._updateAtomMap()

    def load(self):
        # This method must be overwritten by subclasses
        # Editable sets should use the value of self._mtime to determine if they
        # need to reload themselves
        raise NotImplementedError()

    def containsCPV(self, cpv):
        self._load()
        for a in self._atoms:
            if match_from_list(a, [cpv]):
                return True
        return False

    def getMetadata(self, key):
        if hasattr(self, key.lower()):
            return getattr(self, key.lower())
        else:
            return ""

    def _updateAtomMap(self, atoms=None):
        """Update self._atommap for specific atoms or all atoms."""
        if not atoms:
            self._atommap.clear()
            atoms = self._atoms
        for a in atoms:
            self._atommap.setdefault(a.cp, set()).add(a)

            # Not sure if this one should really be in PackageSet

    def findAtomForPackage(self, pkg, modified_use=None):
        """Return the best match for a given package from the arguments, or
		None if there are no matches.  This matches virtual arguments against
		the PROVIDE metadata.  This can raise an InvalidDependString exception
		if an error occurs while parsing PROVIDE."""

        if modified_use is not None and modified_use is not pkg.use.enabled:
            pkg = pkg.copy()
            pkg._metadata["USE"] = " ".join(modified_use)

            # Atoms matched via PROVIDE must be temporarily transformed since
            # match_from_list() only works correctly when atom.cp == pkg.cp.
        rev_transform = {}
        for atom in self.iterAtomsForPackage(pkg):
            if atom.cp == pkg.cp:
                rev_transform[atom] = atom
            else:
                rev_transform[Atom(atom.replace(atom.cp, pkg.cp, 1), allow_wildcard=True, allow_repo=True)] = atom
        best_match = best_match_to_list(pkg, iter(rev_transform))
        if best_match:
            return rev_transform[best_match]
        return None

    def iterAtomsForPackage(self, pkg):
        """
		Find all matching atoms for a given package. This matches virtual
		arguments against the PROVIDE metadata.  This will raise an
		InvalidDependString exception if PROVIDE is invalid.
		"""
        cpv_slot_list = [pkg]
        cp = cpv_getkey(pkg.cpv)
        self._load()  # make sure the atoms are loaded

        atoms = self._atommap.get(cp)
        if atoms:
            for atom in atoms:
                if match_from_list(atom, cpv_slot_list):
                    yield atom
        provides = pkg._metadata["PROVIDE"]
        if not provides:
            return
        provides = provides.split()
        for provide in provides:
            try:
                provided_cp = Atom(provide).cp
            except InvalidAtom:
                continue
            atoms = self._atommap.get(provided_cp)
            if atoms:
                for atom in atoms:
                    if match_from_list(atom.replace(provided_cp, cp), cpv_slot_list):
                        yield atom
示例#21
0
	def __init__(self, repositories, profiles, abs_user_config,
		user_config=True, strict_umatched_removal=False):
		self._punmaskdict = ExtendedAtomDict(list)
		self._pmaskdict = ExtendedAtomDict(list)
		# Preserves atoms that are eliminated by negative
		# incrementals in user_pkgmasklines.
		self._pmaskdict_raw = ExtendedAtomDict(list)

		#Read profile/package.mask from every repo.
		#Repositories inherit masks from their parent profiles and
		#are able to remove mask from them with -atoms.
		#Such a removal affects only the current repo, but not the parent.
		#Add ::repo specs to every atom to make sure atoms only affect
		#packages from the current repo.

		# Cache the repository-wide package.mask files as a particular
		# repo may be often referenced by others as the master.
		pmask_cache = {}

		def grab_pmask(loc, repo_config):
			if loc not in pmask_cache:
				path = os.path.join(loc, 'profiles', 'package.mask')
				pmask_cache[loc] = grabfile_package(path,
						recursive=repo_config.portage1_profiles,
						remember_source_file=True, verify_eapi=True,
						eapi_default=repo_config.eapi,
						allow_build_id=("build-id"
						in repo_config.profile_formats))
				if repo_config.portage1_profiles_compat and os.path.isdir(path):
					warnings.warn(_("Repository '%(repo_name)s' is implicitly using "
						"'portage-1' profile format in its profiles/package.mask, but "
						"the repository profiles are not marked as that format.  This will break "
						"in the future.  Please either convert the following paths "
						"to files, or add\nprofile-formats = portage-1\nto the "
						"repository's layout.conf.\n")
						% dict(repo_name=repo_config.name))

			return pmask_cache[loc]

		repo_pkgmasklines = []
		for repo in repositories.repos_with_profiles():
			lines = []
			repo_lines = grab_pmask(repo.location, repo)
			removals = frozenset(line[0][1:] for line in repo_lines
				if line[0][:1] == "-")
			matched_removals = set()
			for master in repo.masters:
				master_lines = grab_pmask(master.location, master)
				for line in master_lines:
					if line[0] in removals:
						matched_removals.add(line[0])
				# Since we don't stack masters recursively, there aren't any
				# atoms earlier in the stack to be matched by negative atoms in
				# master_lines. Also, repo_lines may contain negative atoms
				# that are intended to negate atoms from a different master
				# than the one with which we are currently stacking. Therefore,
				# we disable warn_for_unmatched_removal here (see bug #386569).
				lines.append(stack_lists([master_lines, repo_lines], incremental=1,
					remember_source_file=True, warn_for_unmatched_removal=False))

			# It's safe to warn for unmatched removal if masters have not
			# been overridden by the user, which is guaranteed when
			# user_config is false (when called by repoman).
			if repo.masters:
				unmatched_removals = removals.difference(matched_removals)
				if unmatched_removals and not user_config:
					source_file = os.path.join(repo.location,
						"profiles", "package.mask")
					unmatched_removals = list(unmatched_removals)
					if len(unmatched_removals) > 3:
						writemsg(
							_("--- Unmatched removal atoms in %s: %s and %s more\n") %
							(source_file,
							", ".join("-" + x for x in unmatched_removals[:3]),
							len(unmatched_removals) - 3), noiselevel=-1)
					else:
						writemsg(
							_("--- Unmatched removal atom(s) in %s: %s\n") %
							(source_file,
							", ".join("-" + x for x in unmatched_removals)),
							noiselevel=-1)

			else:
				lines.append(stack_lists([repo_lines], incremental=1,
					remember_source_file=True, warn_for_unmatched_removal=not user_config,
					strict_warn_for_unmatched_removal=strict_umatched_removal))
			repo_pkgmasklines.extend(append_repo(stack_lists(lines), repo.name, remember_source_file=True))

		repo_pkgunmasklines = []
		for repo in repositories.repos_with_profiles():
			if not repo.portage1_profiles:
				continue
			repo_lines = grabfile_package(os.path.join(repo.location, "profiles", "package.unmask"), \
				recursive=1, remember_source_file=True,
				verify_eapi=True, eapi_default=repo.eapi,
				allow_build_id=("build-id" in repo.profile_formats))
			lines = stack_lists([repo_lines], incremental=1, \
				remember_source_file=True, warn_for_unmatched_removal=True,
				strict_warn_for_unmatched_removal=strict_umatched_removal)
			repo_pkgunmasklines.extend(append_repo(lines, repo.name, remember_source_file=True))

		#Read package.mask from the user's profile. Stack them in the end
		#to allow profiles to override masks from their parent profiles.
		profile_pkgmasklines = []
		profile_pkgunmasklines = []
		for x in profiles:
			profile_pkgmasklines.append(grabfile_package(
				os.path.join(x.location, "package.mask"),
				recursive=x.portage1_directories,
				remember_source_file=True, verify_eapi=True,
				eapi=x.eapi, eapi_default=None,
				allow_build_id=x.allow_build_id))
			if x.portage1_directories:
				profile_pkgunmasklines.append(grabfile_package(
					os.path.join(x.location, "package.unmask"),
					recursive=x.portage1_directories,
					remember_source_file=True, verify_eapi=True,
					eapi=x.eapi, eapi_default=None,
					allow_build_id=x.allow_build_id))
		profile_pkgmasklines = stack_lists(profile_pkgmasklines, incremental=1, \
			remember_source_file=True, warn_for_unmatched_removal=True,
			strict_warn_for_unmatched_removal=strict_umatched_removal)
		profile_pkgunmasklines = stack_lists(profile_pkgunmasklines, incremental=1, \
			remember_source_file=True, warn_for_unmatched_removal=True,
			strict_warn_for_unmatched_removal=strict_umatched_removal)

		#Read /etc/portage/package.mask. Don't stack it to allow the user to
		#remove mask atoms from everywhere with -atoms.
		user_pkgmasklines = []
		user_pkgunmasklines = []
		if user_config:
			user_pkgmasklines = grabfile_package(
				os.path.join(abs_user_config, "package.mask"), recursive=1, \
				allow_wildcard=True, allow_repo=True,
				remember_source_file=True, verify_eapi=False,
				allow_build_id=True)
			user_pkgunmasklines = grabfile_package(
				os.path.join(abs_user_config, "package.unmask"), recursive=1, \
				allow_wildcard=True, allow_repo=True,
				remember_source_file=True, verify_eapi=False,
				allow_build_id=True)

		#Stack everything together. At this point, only user_pkgmasklines may contain -atoms.
		#Don't warn for unmatched -atoms here, since we don't do it for any other user config file.
		raw_pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
		pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines, user_pkgmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
		pkgunmasklines = stack_lists([repo_pkgunmasklines, profile_pkgunmasklines, user_pkgunmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)

		for x, source_file in raw_pkgmasklines:
			self._pmaskdict_raw.setdefault(x.cp, []).append(x)

		for x, source_file in pkgmasklines:
			self._pmaskdict.setdefault(x.cp, []).append(x)

		for x, source_file in pkgunmasklines:
			self._punmaskdict.setdefault(x.cp, []).append(x)

		for d in (self._pmaskdict_raw, self._pmaskdict, self._punmaskdict):
			for k, v in d.items():
				d[k] = tuple(v)
示例#22
0
class LicenseManager:
    def __init__(self,
                 license_group_locations,
                 abs_user_config,
                 user_config=True):

        self._accept_license_str = None
        self._accept_license = None
        self._license_groups = {}
        self._plicensedict = ExtendedAtomDict(dict)
        self._undef_lic_groups = set()

        if user_config:
            license_group_locations = list(license_group_locations) + [
                abs_user_config
            ]

        self._read_license_groups(license_group_locations)

        if user_config:
            self._read_user_config(abs_user_config)

    def _read_user_config(self, abs_user_config):
        licdict = grabdict_package(os.path.join(abs_user_config,
                                                "package.license"),
                                   recursive=1,
                                   allow_wildcard=True,
                                   allow_repo=True,
                                   verify_eapi=False)
        for k, v in licdict.items():
            self._plicensedict.setdefault(k.cp, {})[k] = \
             self.expandLicenseTokens(v)

    def _read_license_groups(self, locations):
        for loc in locations:
            for k, v in grabdict(os.path.join(loc, "license_groups")).items():
                self._license_groups.setdefault(k, []).extend(v)

        for k, v in self._license_groups.items():
            self._license_groups[k] = frozenset(v)

    def extract_global_changes(self, old=""):
        ret = old
        atom_license_map = self._plicensedict.get("*/*")
        if atom_license_map is not None:
            v = atom_license_map.pop("*/*", None)
            if v is not None:
                ret = " ".join(v)
                if old:
                    ret = old + " " + ret
                if not atom_license_map:
                    #No tokens left in atom_license_map, remove it.
                    del self._plicensedict["*/*"]
        return ret

    def expandLicenseTokens(self, tokens):
        """ Take a token from ACCEPT_LICENSE or package.license and expand it
		if it's a group token (indicated by @) or just return it if it's not a
		group.  If a group is negated then negate all group elements."""
        expanded_tokens = []
        for x in tokens:
            expanded_tokens.extend(self._expandLicenseToken(x, None))
        return expanded_tokens

    def _expandLicenseToken(self, token, traversed_groups):
        negate = False
        rValue = []
        if token.startswith("-"):
            negate = True
            license_name = token[1:]
        else:
            license_name = token
        if not license_name.startswith("@"):
            rValue.append(token)
            return rValue
        group_name = license_name[1:]
        if traversed_groups is None:
            traversed_groups = set()
        license_group = self._license_groups.get(group_name)
        if group_name in traversed_groups:
            writemsg(_("Circular license group reference"
                       " detected in '%s'\n") % group_name,
                     noiselevel=-1)
            rValue.append("@" + group_name)
        elif license_group:
            traversed_groups.add(group_name)
            for l in license_group:
                if l.startswith("-"):
                    writemsg(_("Skipping invalid element %s"
                               " in license group '%s'\n") % (l, group_name),
                             noiselevel=-1)
                else:
                    rValue.extend(self._expandLicenseToken(
                        l, traversed_groups))
        else:
            if self._license_groups and \
             group_name not in self._undef_lic_groups:
                self._undef_lic_groups.add(group_name)
                writemsg(_("Undefined license group '%s'\n") % group_name,
                         noiselevel=-1)
            rValue.append("@" + group_name)
        if negate:
            rValue = ["-" + token for token in rValue]
        return rValue

    def _getPkgAcceptLicense(self, cpv, slot, repo):
        """
		Get an ACCEPT_LICENSE list, accounting for package.license.
		"""
        accept_license = self._accept_license
        cp = cpv_getkey(cpv)
        cpdict = self._plicensedict.get(cp)
        if cpdict:
            if not hasattr(cpv, "slot"):
                cpv = _pkg_str(cpv, slot=slot, repo=repo)
            plicence_list = ordered_by_atom_specificity(cpdict, cpv)
            if plicence_list:
                accept_license = list(self._accept_license)
                for x in plicence_list:
                    accept_license.extend(x)
        return accept_license

    def get_prunned_accept_license(self, cpv, use, lic, slot, repo):
        """
		Generate a pruned version of ACCEPT_LICENSE, by intersection with
		LICENSE. This is required since otherwise ACCEPT_LICENSE might be
		too big (bigger than ARG_MAX), causing execve() calls to fail with
		E2BIG errors as in bug #262647.
		"""
        try:
            licenses = set(use_reduce(lic, uselist=use, flat=True))
        except InvalidDependString:
            licenses = set()
        licenses.discard('||')

        accept_license = self._getPkgAcceptLicense(cpv, slot, repo)

        if accept_license:
            acceptable_licenses = set()
            for x in accept_license:
                if x == '*':
                    acceptable_licenses.update(licenses)
                elif x == '-*':
                    acceptable_licenses.clear()
                elif x[:1] == '-':
                    acceptable_licenses.discard(x[1:])
                elif x in licenses:
                    acceptable_licenses.add(x)

            licenses = acceptable_licenses
        return ' '.join(sorted(licenses))

    def getMissingLicenses(self, cpv, use, lic, slot, repo):
        """
		Take a LICENSE string and return a list of any licenses that the user
		may need to accept for the given package.  The returned list will not
		contain any licenses that have already been accepted.  This method
		can throw an InvalidDependString exception.

		@param cpv: The package name (for package.license support)
		@type cpv: String
		@param use: "USE" from the cpv's metadata
		@type use: String
		@param lic: "LICENSE" from the cpv's metadata
		@type lic: String
		@param slot: "SLOT" from the cpv's metadata
		@type slot: String
		@rtype: List
		@return: A list of licenses that have not been accepted.
		"""

        licenses = set(use_reduce(lic, matchall=1, flat=True))
        licenses.discard('||')

        acceptable_licenses = set()
        for x in self._getPkgAcceptLicense(cpv, slot, repo):
            if x == '*':
                acceptable_licenses.update(licenses)
            elif x == '-*':
                acceptable_licenses.clear()
            elif x[:1] == '-':
                acceptable_licenses.discard(x[1:])
            else:
                acceptable_licenses.add(x)

        license_str = lic
        if "?" in license_str:
            use = use.split()
        else:
            use = []

        license_struct = use_reduce(license_str, uselist=use, opconvert=True)
        return self._getMaskedLicenses(license_struct, acceptable_licenses)

    def _getMaskedLicenses(self, license_struct, acceptable_licenses):
        if not license_struct:
            return []
        if license_struct[0] == "||":
            ret = []
            for element in license_struct[1:]:
                if isinstance(element, list):
                    if element:
                        tmp = self._getMaskedLicenses(element,
                                                      acceptable_licenses)
                        if not tmp:
                            return []
                        ret.extend(tmp)
                else:
                    if element in acceptable_licenses:
                        return []
                    ret.append(element)
            # Return all masked licenses, since we don't know which combination
            # (if any) the user will decide to unmask.
            return ret

        ret = []
        for element in license_struct:
            if isinstance(element, list):
                if element:
                    ret.extend(
                        self._getMaskedLicenses(element, acceptable_licenses))
            else:
                if element not in acceptable_licenses:
                    ret.append(element)
        return ret

    def set_accept_license_str(self, accept_license_str):
        if accept_license_str != self._accept_license_str:
            self._accept_license_str = accept_license_str
            self._accept_license = tuple(
                self.expandLicenseTokens(accept_license_str.split()))
示例#23
0
	def _parse_file_to_dict(self, file_name, juststrings=False, recursive=True,
		eapi_filter=None, user_config=False, eapi=None, eapi_default="0"):
		"""
		@param file_name: input file name
		@type file_name: str
		@param juststrings: store dict values as space-delimited strings
			instead of tuples
		@type juststrings: bool
		@param recursive: triggers recursion if the input file is a
			directory
		@type recursive: bool
		@param eapi_filter: a function that accepts a single eapi
			argument, and returns true if the the current file type
			is supported by the given EAPI
		@type eapi_filter: callable
		@param user_config: current file is part of the local
			configuration (not repository content)
		@type user_config: bool
		@param eapi: the EAPI of the current profile node, which allows
			a call to read_corresponding_eapi_file to be skipped
		@type eapi: str
		@param eapi_default: the default EAPI which applies if the
			current profile node does not define a local EAPI
		@type eapi_default: str
		@rtype: tuple
		@return: collection of USE flags
		"""
		ret = {}
		location_dict = {}
		if eapi is None:
			eapi = read_corresponding_eapi_file(file_name,
				default=eapi_default)
		extended_syntax = eapi is None and user_config
		if extended_syntax:
			ret = ExtendedAtomDict(dict)
		else:
			ret = {}
		file_dict = grabdict_package(file_name, recursive=recursive,
			allow_wildcard=extended_syntax, allow_repo=extended_syntax,
			verify_eapi=(not extended_syntax), eapi=eapi,
			eapi_default=eapi_default)
		if eapi is not None and eapi_filter is not None and not eapi_filter(eapi):
			if file_dict:
				writemsg(_("--- EAPI '%s' does not support '%s': '%s'\n") %
					(eapi, os.path.basename(file_name), file_name),
					noiselevel=-1)
			return ret
		useflag_re = _get_useflag_re(eapi)
		for k, v in file_dict.items():
			useflags = []
			for prefixed_useflag in v:
				if prefixed_useflag[:1] == "-":
					useflag = prefixed_useflag[1:]
				else:
					useflag = prefixed_useflag
				if useflag_re.match(useflag) is None:
					writemsg(_("--- Invalid USE flag for '%s' in '%s': '%s'\n") %
						(k, file_name, prefixed_useflag), noiselevel=-1)
				else:
					useflags.append(prefixed_useflag)
			location_dict.setdefault(k, []).extend(useflags)
		for k, v in location_dict.items():
			if juststrings:
				v = " ".join(v)
			else:
				v = tuple(v)
			ret.setdefault(k.cp, {})[k] = v
		return ret
示例#24
0
class MaskManager(object):

	def __init__(self, repositories, profiles, abs_user_config,
		user_config=True, strict_umatched_removal=False):
		self._punmaskdict = ExtendedAtomDict(list)
		self._pmaskdict = ExtendedAtomDict(list)
		# Preserves atoms that are eliminated by negative
		# incrementals in user_pkgmasklines.
		self._pmaskdict_raw = ExtendedAtomDict(list)

		#Read profile/package.mask from every repo.
		#Repositories inherit masks from their parent profiles and
		#are able to remove mask from them with -atoms.
		#Such a removal affects only the current repo, but not the parent.
		#Add ::repo specs to every atom to make sure atoms only affect
		#packages from the current repo.

		# Cache the repository-wide package.mask files as a particular
		# repo may be often referenced by others as the master.
		pmask_cache = {}

		def grab_pmask(loc, repo_config):
			if loc not in pmask_cache:
				path = os.path.join(loc, 'profiles', 'package.mask')
				pmask_cache[loc] = grabfile_package(path,
						recursive=repo_config.portage1_profiles,
						remember_source_file=True, verify_eapi=True)
				if repo_config.portage1_profiles_compat and os.path.isdir(path):
					warnings.warn(_("Repository '%(repo_name)s' is implicitly using "
						"'portage-1' profile format in its profiles/package.mask, but "
						"the repository profiles are not marked as that format.  This will break "
						"in the future.  Please either convert the following paths "
						"to files, or add\nprofile-formats = portage-1\nto the "
						"repositories layout.conf.\n")
						% dict(repo_name=repo_config.name))

			return pmask_cache[loc]

		repo_pkgmasklines = []
		for repo in repositories.repos_with_profiles():
			lines = []
			repo_lines = grab_pmask(repo.location, repo)
			removals = frozenset(line[0][1:] for line in repo_lines
				if line[0][:1] == "-")
			matched_removals = set()
			for master in repo.masters:
				master_lines = grab_pmask(master.location, master)
				for line in master_lines:
					if line[0] in removals:
						matched_removals.add(line[0])
				# Since we don't stack masters recursively, there aren't any
				# atoms earlier in the stack to be matched by negative atoms in
				# master_lines. Also, repo_lines may contain negative atoms
				# that are intended to negate atoms from a different master
				# than the one with which we are currently stacking. Therefore,
				# we disable warn_for_unmatched_removal here (see bug #386569).
				lines.append(stack_lists([master_lines, repo_lines], incremental=1,
					remember_source_file=True, warn_for_unmatched_removal=False))

			# It's safe to warn for unmatched removal if masters have not
			# been overridden by the user, which is guaranteed when
			# user_config is false (when called by repoman).
			if repo.masters:
				unmatched_removals = removals.difference(matched_removals)
				if unmatched_removals and not user_config:
					source_file = os.path.join(repo.location,
						"profiles", "package.mask")
					unmatched_removals = list(unmatched_removals)
					if len(unmatched_removals) > 3:
						writemsg(
							_("--- Unmatched removal atoms in %s: %s and %s more\n") %
							(source_file,
							", ".join("-" + x for x in unmatched_removals[:3]),
							len(unmatched_removals) - 3), noiselevel=-1)
					else:
						writemsg(
							_("--- Unmatched removal atom(s) in %s: %s\n") %
							(source_file,
							", ".join("-" + x for x in unmatched_removals)),
							noiselevel=-1)

			else:
				lines.append(stack_lists([repo_lines], incremental=1,
					remember_source_file=True, warn_for_unmatched_removal=not user_config,
					strict_warn_for_unmatched_removal=strict_umatched_removal))
			repo_pkgmasklines.extend(append_repo(stack_lists(lines), repo.name, remember_source_file=True))

		repo_pkgunmasklines = []
		for repo in repositories.repos_with_profiles():
			if not repo.portage1_profiles:
				continue
			repo_lines = grabfile_package(os.path.join(repo.location, "profiles", "package.unmask"), \
				recursive=1, remember_source_file=True, verify_eapi=True)
			lines = stack_lists([repo_lines], incremental=1, \
				remember_source_file=True, warn_for_unmatched_removal=True,
				strict_warn_for_unmatched_removal=strict_umatched_removal)
			repo_pkgunmasklines.extend(append_repo(lines, repo.name, remember_source_file=True))

		#Read package.mask from the user's profile. Stack them in the end
		#to allow profiles to override masks from their parent profiles.
		profile_pkgmasklines = []
		profile_pkgunmasklines = []
		for x in profiles:
			profile_pkgmasklines.append(grabfile_package(
				os.path.join(x.location, "package.mask"),
				recursive=x.portage1_directories,
				remember_source_file=True, verify_eapi=True))
			if x.portage1_directories:
				profile_pkgunmasklines.append(grabfile_package(
					os.path.join(x.location, "package.unmask"),
					recursive=x.portage1_directories,
					remember_source_file=True, verify_eapi=True))
		profile_pkgmasklines = stack_lists(profile_pkgmasklines, incremental=1, \
			remember_source_file=True, warn_for_unmatched_removal=True,
			strict_warn_for_unmatched_removal=strict_umatched_removal)
		profile_pkgunmasklines = stack_lists(profile_pkgunmasklines, incremental=1, \
			remember_source_file=True, warn_for_unmatched_removal=True,
			strict_warn_for_unmatched_removal=strict_umatched_removal)

		#Read /etc/portage/package.mask. Don't stack it to allow the user to
		#remove mask atoms from everywhere with -atoms.
		user_pkgmasklines = []
		user_pkgunmasklines = []
		if user_config:
			user_pkgmasklines = grabfile_package(
				os.path.join(abs_user_config, "package.mask"), recursive=1, \
				allow_wildcard=True, allow_repo=True, remember_source_file=True, verify_eapi=False)
			user_pkgunmasklines = grabfile_package(
				os.path.join(abs_user_config, "package.unmask"), recursive=1, \
				allow_wildcard=True, allow_repo=True, remember_source_file=True, verify_eapi=False)

		#Stack everything together. At this point, only user_pkgmasklines may contain -atoms.
		#Don't warn for unmatched -atoms here, since we don't do it for any other user config file.
		raw_pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
		pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines, user_pkgmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
		pkgunmasklines = stack_lists([repo_pkgunmasklines, profile_pkgunmasklines, user_pkgunmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)

		for x, source_file in raw_pkgmasklines:
			self._pmaskdict_raw.setdefault(x.cp, []).append(x)

		for x, source_file in pkgmasklines:
			self._pmaskdict.setdefault(x.cp, []).append(x)

		for x, source_file in pkgunmasklines:
			self._punmaskdict.setdefault(x.cp, []).append(x)

		for d in (self._pmaskdict_raw, self._pmaskdict, self._punmaskdict):
			for k, v in d.items():
				d[k] = tuple(v)

	def _getMaskAtom(self, cpv, slot, repo, unmask_atoms=None):
		"""
		Take a package and return a matching package.mask atom, or None if no
		such atom exists or it has been cancelled by package.unmask. PROVIDE
		is not checked, so atoms will not be found for old-style virtuals.

		@param cpv: The package name
		@type cpv: String
		@param slot: The package's slot
		@type slot: String
		@param repo: The package's repository [optional]
		@type repo: String
		@param unmask_atoms: if desired pass in self._punmaskdict.get(cp)
		@type unmask_atoms: list
		@rtype: String
		@return: A matching atom string or None if one is not found.
		"""
		cp = cpv_getkey(cpv)
		mask_atoms = self._pmaskdict.get(cp)
		if mask_atoms:
			pkg = "".join((cpv, _slot_separator, slot))
			if repo and repo != Package.UNKNOWN_REPO:
				pkg = "".join((pkg, _repo_separator, repo))
			pkg_list = [pkg]
			for x in mask_atoms:
				if not match_from_list(x, pkg_list):
					continue
				if unmask_atoms:
					for y in unmask_atoms:
						if match_from_list(y, pkg_list):
							return None
				return x
		return None


	def getMaskAtom(self, cpv, slot, repo):
		"""
		Take a package and return a matching package.mask atom, or None if no
		such atom exists or it has been cancelled by package.unmask. PROVIDE
		is not checked, so atoms will not be found for old-style virtuals.

		@param cpv: The package name
		@type cpv: String
		@param slot: The package's slot
		@type slot: String
		@param repo: The package's repository [optional]
		@type repo: String
		@rtype: String
		@return: A matching atom string or None if one is not found.
		"""
		cp = cpv_getkey(cpv)
		return self._getMaskAtom(cpv, slot, repo, self._punmaskdict.get(cp))


	def getRawMaskAtom(self, cpv, slot, repo):
		"""
		Take a package and return a matching package.mask atom, or None if no
		such atom exists. It HAS NOT! been cancelled by any package.unmask.
		PROVIDE is not checked, so atoms will not be found for old-style
		virtuals.

		@param cpv: The package name
		@type cpv: String
		@param slot: The package's slot
		@type slot: String
		@param repo: The package's repository [optional]
		@type repo: String
		@rtype: String
		@return: A matching atom string or None if one is not found.
		"""

		return self._getMaskAtom(cpv, slot, repo)
示例#25
0
    def _parse_file_to_dict(self,
                            file_name,
                            juststrings=False,
                            recursive=True,
                            eapi_filter=None,
                            user_config=False,
                            eapi=None,
                            eapi_default="0",
                            allow_build_id=False):
        """
		@param file_name: input file name
		@type file_name: str
		@param juststrings: store dict values as space-delimited strings
			instead of tuples
		@type juststrings: bool
		@param recursive: triggers recursion if the input file is a
			directory
		@type recursive: bool
		@param eapi_filter: a function that accepts a single eapi
			argument, and returns true if the the current file type
			is supported by the given EAPI
		@type eapi_filter: callable
		@param user_config: current file is part of the local
			configuration (not repository content)
		@type user_config: bool
		@param eapi: the EAPI of the current profile node, which allows
			a call to read_corresponding_eapi_file to be skipped
		@type eapi: str
		@param eapi_default: the default EAPI which applies if the
			current profile node does not define a local EAPI
		@type eapi_default: str
		@param allow_build_id: allow atoms to specify a particular
			build-id
		@type allow_build_id: bool
		@rtype: tuple
		@return: collection of USE flags
		"""
        ret = {}
        location_dict = {}
        if eapi is None:
            eapi = read_corresponding_eapi_file(file_name,
                                                default=eapi_default)
        extended_syntax = eapi is None and user_config
        if extended_syntax:
            ret = ExtendedAtomDict(dict)
        else:
            ret = {}
        file_dict = grabdict_package(file_name,
                                     recursive=recursive,
                                     allow_wildcard=extended_syntax,
                                     allow_repo=extended_syntax,
                                     verify_eapi=(not extended_syntax),
                                     eapi=eapi,
                                     eapi_default=eapi_default,
                                     allow_build_id=allow_build_id,
                                     allow_use=False)
        if eapi is not None and eapi_filter is not None and not eapi_filter(
                eapi):
            if file_dict:
                writemsg(_("--- EAPI '%s' does not support '%s': '%s'\n") %
                         (eapi, os.path.basename(file_name), file_name),
                         noiselevel=-1)
            return ret
        useflag_re = _get_useflag_re(eapi)
        for k, v in file_dict.items():
            useflags = []
            use_expand_prefix = ''
            for prefixed_useflag in v:
                if extended_syntax and prefixed_useflag == "\n":
                    use_expand_prefix = ""
                    continue
                if extended_syntax and prefixed_useflag[-1] == ":":
                    use_expand_prefix = prefixed_useflag[:-1].lower() + "_"
                    continue

                if prefixed_useflag[:1] == "-":
                    useflag = use_expand_prefix + prefixed_useflag[1:]
                    prefixed_useflag = "-" + useflag
                else:
                    useflag = use_expand_prefix + prefixed_useflag
                    prefixed_useflag = useflag
                if useflag_re.match(useflag) is None:
                    writemsg(
                        _("--- Invalid USE flag for '%s' in '%s': '%s'\n") %
                        (k, file_name, prefixed_useflag),
                        noiselevel=-1)
                else:
                    useflags.append(prefixed_useflag)
            location_dict.setdefault(k, []).extend(useflags)
        for k, v in location_dict.items():
            if juststrings:
                v = " ".join(v)
            else:
                v = tuple(v)
            ret.setdefault(k.cp, {})[k] = v
        return ret
示例#26
0
class PackageSet(object):
    # Set this to operations that are supported by your subclass. While
    # technically there is no difference between "merge" and "unmerge" regarding
    # package sets, the latter doesn't make sense for some sets like "system"
    # or "security" and therefore isn't supported by them.
    _operations = ["merge"]
    description = "generic package set"

    def __init__(self, allow_wildcard=False, allow_repo=False):
        self._atoms = set()
        self._atommap = ExtendedAtomDict(set)
        self._loaded = False
        self._loading = False
        self.errors = []
        self._nonatoms = set()
        self.world_candidate = False
        self._allow_wildcard = allow_wildcard
        self._allow_repo = allow_repo

    def __contains__(self, atom):
        self._load()
        return atom in self._atoms or atom in self._nonatoms

    def __iter__(self):
        self._load()
        for x in self._atoms:
            yield x
        for x in self._nonatoms:
            yield x

    def __bool__(self):
        self._load()
        return bool(self._atoms or self._nonatoms)

    if sys.hexversion < 0x3000000:
        __nonzero__ = __bool__

    def supportsOperation(self, op):
        if not op in OPERATIONS:
            raise ValueError(op)
        return op in self._operations

    def _load(self):
        if not (self._loaded or self._loading):
            self._loading = True
            self.load()
            self._loaded = True
            self._loading = False

    def getAtoms(self):
        self._load()
        return self._atoms.copy()

    def getNonAtoms(self):
        self._load()
        return self._nonatoms.copy()

    def _setAtoms(self, atoms):
        self._atoms.clear()
        self._nonatoms.clear()
        for a in atoms:
            if not isinstance(a, Atom):
                if isinstance(a, basestring):
                    a = a.strip()
                if not a:
                    continue
                try:
                    a = Atom(a, allow_wildcard=True, allow_repo=True)
                except InvalidAtom:
                    self._nonatoms.add(a)
                    continue
            if not self._allow_wildcard and a.extended_syntax:
                raise InvalidAtom("extended atom syntax not allowed here")
            if not self._allow_repo and a.repo:
                raise InvalidAtom("repository specification not allowed here")
            self._atoms.add(a)

        self._updateAtomMap()

    def load(self):
        # This method must be overwritten by subclasses
        # Editable sets should use the value of self._mtime to determine if they
        # need to reload themselves
        raise NotImplementedError()

    def containsCPV(self, cpv):
        self._load()
        for a in self._atoms:
            if match_from_list(a, [cpv]):
                return True
        return False

    def getMetadata(self, key):
        if hasattr(self, key.lower()):
            return getattr(self, key.lower())
        else:
            return ""

    def _updateAtomMap(self, atoms=None):
        """Update self._atommap for specific atoms or all atoms."""
        if not atoms:
            self._atommap.clear()
            atoms = self._atoms
        for a in atoms:
            self._atommap.setdefault(a.cp, set()).add(a)

    # Not sure if this one should really be in PackageSet
    def findAtomForPackage(self, pkg, modified_use=None):
        """Return the best match for a given package from the arguments, or
		None if there are no matches.  This matches virtual arguments against
		the PROVIDE metadata.  This can raise an InvalidDependString exception
		if an error occurs while parsing PROVIDE."""

        if modified_use is not None and modified_use is not pkg.use.enabled:
            pkg = pkg.copy()
            pkg._metadata["USE"] = " ".join(modified_use)

        # Atoms matched via PROVIDE must be temporarily transformed since
        # match_from_list() only works correctly when atom.cp == pkg.cp.
        rev_transform = {}
        for atom in self.iterAtomsForPackage(pkg):
            if atom.cp == pkg.cp:
                rev_transform[atom] = atom
            else:
                rev_transform[Atom(atom.replace(atom.cp, pkg.cp, 1),
                                   allow_wildcard=True,
                                   allow_repo=True)] = atom
        best_match = best_match_to_list(pkg, iter(rev_transform))
        if best_match:
            return rev_transform[best_match]
        return None

    def iterAtomsForPackage(self, pkg):
        """
		Find all matching atoms for a given package. This matches virtual
		arguments against the PROVIDE metadata.  This will raise an
		InvalidDependString exception if PROVIDE is invalid.
		"""
        cpv_slot_list = [pkg]
        cp = cpv_getkey(pkg.cpv)
        self._load()  # make sure the atoms are loaded

        atoms = self._atommap.get(cp)
        if atoms:
            for atom in atoms:
                if match_from_list(atom, cpv_slot_list):
                    yield atom
示例#27
0
class KeywordsManager(object):
	"""Manager class to handle keywords processing and validation"""

	def __init__(self, profiles, abs_user_config, user_config=True,
				global_accept_keywords=""):
		self._pkeywords_list = []
		rawpkeywords = [grabdict_package(
			os.path.join(x.location, "package.keywords"),
			recursive=x.portage1_directories,
			verify_eapi=True, eapi=x.eapi, eapi_default=None,
			allow_build_id=x.allow_build_id)
			for x in profiles]
		for pkeyworddict in rawpkeywords:
			if not pkeyworddict:
				# Omit non-existent files from the stack.
				continue
			cpdict = {}
			for k, v in pkeyworddict.items():
				cpdict.setdefault(k.cp, {})[k] = v
			self._pkeywords_list.append(cpdict)
		self._pkeywords_list = tuple(self._pkeywords_list)

		self._p_accept_keywords = []
		raw_p_accept_keywords = [grabdict_package(
			os.path.join(x.location, "package.accept_keywords"),
			recursive=x.portage1_directories,
			verify_eapi=True, eapi=x.eapi, eapi_default=None)
			for x in profiles]
		for d in raw_p_accept_keywords:
			if not d:
				# Omit non-existent files from the stack.
				continue
			cpdict = {}
			for k, v in d.items():
				cpdict.setdefault(k.cp, {})[k] = tuple(v)
			self._p_accept_keywords.append(cpdict)
		self._p_accept_keywords = tuple(self._p_accept_keywords)

		self.pkeywordsdict = ExtendedAtomDict(dict)

		if user_config:
			pkgdict = grabdict_package(
				os.path.join(abs_user_config, "package.keywords"),
				recursive=1, allow_wildcard=True, allow_repo=True,
				verify_eapi=False, allow_build_id=True)

			for k, v in grabdict_package(
				os.path.join(abs_user_config, "package.accept_keywords"),
				recursive=1, allow_wildcard=True, allow_repo=True,
				verify_eapi=False, allow_build_id=True).items():
				pkgdict.setdefault(k, []).extend(v)

			accept_keywords_defaults = global_accept_keywords.split()
			accept_keywords_defaults = tuple('~' + keyword for keyword in \
				accept_keywords_defaults if keyword[:1] not in "~-")
			for k, v in pkgdict.items():
				# default to ~arch if no specific keyword is given
				if not v:
					v = accept_keywords_defaults
				else:
					v = tuple(v)
				self.pkeywordsdict.setdefault(k.cp, {})[k] = v


	def getKeywords(self, cpv, slot, keywords, repo):
		try:
			cpv.slot
		except AttributeError:
			pkg = _pkg_str(cpv, slot=slot, repo=repo)
		else:
			pkg = cpv
		cp = pkg.cp
		keywords = [[x for x in keywords.split() if x != "-*"]]
		for pkeywords_dict in self._pkeywords_list:
			cpdict = pkeywords_dict.get(cp)
			if cpdict:
				pkg_keywords = ordered_by_atom_specificity(cpdict, pkg)
				if pkg_keywords:
					keywords.extend(pkg_keywords)
		return stack_lists(keywords, incremental=True)

	def isStable(self, pkg, global_accept_keywords, backuped_accept_keywords):
		mygroups = self.getKeywords(pkg, None, pkg._metadata["KEYWORDS"], None)
		pgroups = global_accept_keywords.split()

		unmaskgroups = self.getPKeywords(pkg, None, None,
			global_accept_keywords)
		pgroups.extend(unmaskgroups)

		egroups = backuped_accept_keywords.split()

		if unmaskgroups or egroups:
			pgroups = self._getEgroups(egroups, pgroups)
		else:
			pgroups = set(pgroups)

		if self._getMissingKeywords(pkg, pgroups, mygroups):
			return False

		# If replacing all keywords with unstable variants would mask the
		# package, then it's considered stable for the purposes of
		# use.stable.mask/force interpretation. For unstable configurations,
		# this guarantees that the effective use.force/mask settings for a
		# particular ebuild do not change when that ebuild is stabilized.
		unstable = []
		for kw in mygroups:
			if kw[:1] != "~":
				kw = "~" + kw
			unstable.append(kw)

		return bool(self._getMissingKeywords(pkg, pgroups, set(unstable)))

	def getMissingKeywords(self,
							cpv,
							slot,
							keywords,
							repo,
							global_accept_keywords,
							backuped_accept_keywords):
		"""
		Take a package and return a list of any KEYWORDS that the user may
		need to accept for the given package. If the KEYWORDS are empty
		and the the ** keyword has not been accepted, the returned list will
		contain ** alone (in order to distinguish from the case of "none
		missing").

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: A list of KEYWORDS that have not been accepted.
		"""

		mygroups = self.getKeywords(cpv, slot, keywords, repo)
		# Repoman may modify this attribute as necessary.
		pgroups = global_accept_keywords.split()

		unmaskgroups = self.getPKeywords(cpv, slot, repo,
				global_accept_keywords)
		pgroups.extend(unmaskgroups)

		# Hack: Need to check the env directly here as otherwise stacking
		# doesn't work properly as negative values are lost in the config
		# object (bug #139600)
		egroups = backuped_accept_keywords.split()

		if unmaskgroups or egroups:
			pgroups = self._getEgroups(egroups, pgroups)
		else:
			pgroups = set(pgroups)

		return self._getMissingKeywords(cpv, pgroups, mygroups)


	def getRawMissingKeywords(self,
							cpv,
							slot,
							keywords,
							repo,
							global_accept_keywords):
		"""
		Take a package and return a list of any KEYWORDS that the user may
		need to accept for the given package. If the KEYWORDS are empty,
		the returned list will contain ** alone (in order to distinguish
		from the case of "none missing").  This DOES NOT apply any user config
		package.accept_keywords acceptance.

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@rtype: List
		@return: lists of KEYWORDS that have not been accepted
		and the keywords it looked for.
		"""

		mygroups = self.getKeywords(cpv, slot, keywords, repo)
		pgroups = global_accept_keywords.split()
		pgroups = set(pgroups)
		return self._getMissingKeywords(cpv, pgroups, mygroups)


	@staticmethod
	def _getEgroups(egroups, mygroups):
		"""gets any keywords defined in the environment

		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: list of KEYWORDS that have been accepted
		"""
		mygroups = list(mygroups)
		mygroups.extend(egroups)
		inc_pgroups = set()
		for x in mygroups:
			if x[:1] == "-":
				if x == "-*":
					inc_pgroups.clear()
				else:
					inc_pgroups.discard(x[1:])
			else:
				inc_pgroups.add(x)
		return inc_pgroups


	@staticmethod
	def _getMissingKeywords(cpv, pgroups, mygroups):
		"""Determines the missing keywords

		@param pgroups: The pkg keywords accepted
		@type pgroups: list
		@param mygroups: The ebuild keywords
		@type mygroups: list
		"""
		match = False
		hasstable = False
		hastesting = False
		for gp in mygroups:
			if gp == "*":
				match = True
				break
			elif gp == "~*":
				hastesting = True
				for x in pgroups:
					if x[:1] == "~":
						match = True
						break
				if match:
					break
			elif gp in pgroups:
				match = True
				break
			elif gp.startswith("~"):
				hastesting = True
			elif not gp.startswith("-"):
				hasstable = True
		if not match and \
			((hastesting and "~*" in pgroups) or \
			(hasstable and "*" in pgroups) or "**" in pgroups):
			match = True
		if match:
			missing = []
		else:
			if not mygroups:
				# If KEYWORDS is empty then we still have to return something
				# in order to distinguish from the case of "none missing".
				mygroups = ["**"]
			missing = mygroups
		return missing


	def getPKeywords(self, cpv, slot, repo, global_accept_keywords):
		"""Gets any package.keywords settings for cp for the given
		cpv, slot and repo

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: list of KEYWORDS that have been accepted
		"""

		pgroups = global_accept_keywords.split()
		try:
			cpv.slot
		except AttributeError:
			cpv = _pkg_str(cpv, slot=slot, repo=repo)
		cp = cpv.cp

		unmaskgroups = []
		if self._p_accept_keywords:
			accept_keywords_defaults = tuple('~' + keyword for keyword in \
				pgroups if keyword[:1] not in "~-")
			for d in self._p_accept_keywords:
				cpdict = d.get(cp)
				if cpdict:
					pkg_accept_keywords = \
						ordered_by_atom_specificity(cpdict, cpv)
					if pkg_accept_keywords:
						for x in pkg_accept_keywords:
							if not x:
								x = accept_keywords_defaults
							unmaskgroups.extend(x)

		pkgdict = self.pkeywordsdict.get(cp)
		if pkgdict:
			pkg_accept_keywords = \
				ordered_by_atom_specificity(pkgdict, cpv)
			if pkg_accept_keywords:
				for x in pkg_accept_keywords:
					unmaskgroups.extend(x)
		return unmaskgroups
示例#28
0
    def __init__(self,
                 repositories,
                 profiles,
                 abs_user_config,
                 user_config=True,
                 strict_umatched_removal=False):
        self._punmaskdict = ExtendedAtomDict(list)
        self._pmaskdict = ExtendedAtomDict(list)
        # Preserves atoms that are eliminated by negative
        # incrementals in user_pkgmasklines.
        self._pmaskdict_raw = ExtendedAtomDict(list)

        #Read profile/package.mask from every repo.
        #Repositories inherit masks from their parent profiles and
        #are able to remove mask from them with -atoms.
        #Such a removal affects only the current repo, but not the parent.
        #Add ::repo specs to every atom to make sure atoms only affect
        #packages from the current repo.

        # Cache the repository-wide package.mask files as a particular
        # repo may be often referenced by others as the master.
        pmask_cache = {}

        def grab_pmask(loc, repo_config):
            if loc not in pmask_cache:
                path = os.path.join(loc, 'profiles', 'package.mask')
                pmask_cache[loc] = grabfile_package(
                    path,
                    recursive=repo_config.portage1_profiles,
                    remember_source_file=True,
                    verify_eapi=True)
                if repo_config.portage1_profiles_compat and os.path.isdir(
                        path):
                    warnings.warn(
                        _("Repository '%(repo_name)s' is implicitly using "
                          "'portage-1' profile format in its profiles/package.mask, but "
                          "the repository profiles are not marked as that format.  This will break "
                          "in the future.  Please either convert the following paths "
                          "to files, or add\nprofile-formats = portage-1\nto the "
                          "repositories layout.conf.\n") %
                        dict(repo_name=repo_config.name))

            return pmask_cache[loc]

        repo_pkgmasklines = []
        for repo in repositories.repos_with_profiles():
            lines = []
            repo_lines = grab_pmask(repo.location, repo)
            removals = frozenset(line[0][1:] for line in repo_lines
                                 if line[0][:1] == "-")
            matched_removals = set()
            for master in repo.masters:
                master_lines = grab_pmask(master.location, master)
                for line in master_lines:
                    if line[0] in removals:
                        matched_removals.add(line[0])
                # Since we don't stack masters recursively, there aren't any
                # atoms earlier in the stack to be matched by negative atoms in
                # master_lines. Also, repo_lines may contain negative atoms
                # that are intended to negate atoms from a different master
                # than the one with which we are currently stacking. Therefore,
                # we disable warn_for_unmatched_removal here (see bug #386569).
                lines.append(
                    stack_lists([master_lines, repo_lines],
                                incremental=1,
                                remember_source_file=True,
                                warn_for_unmatched_removal=False))

            # It's safe to warn for unmatched removal if masters have not
            # been overridden by the user, which is guaranteed when
            # user_config is false (when called by repoman).
            if repo.masters:
                unmatched_removals = removals.difference(matched_removals)
                if unmatched_removals and not user_config:
                    source_file = os.path.join(repo.location, "profiles",
                                               "package.mask")
                    unmatched_removals = list(unmatched_removals)
                    if len(unmatched_removals) > 3:
                        writemsg(_(
                            "--- Unmatched removal atoms in %s: %s and %s more\n"
                        ) % (source_file, ", ".join(
                            "-" + x for x in unmatched_removals[:3]),
                             len(unmatched_removals) - 3),
                                 noiselevel=-1)
                    else:
                        writemsg(
                            _("--- Unmatched removal atom(s) in %s: %s\n") %
                            (source_file, ", ".join(
                                "-" + x for x in unmatched_removals)),
                            noiselevel=-1)

            else:
                lines.append(
                    stack_lists([repo_lines],
                                incremental=1,
                                remember_source_file=True,
                                warn_for_unmatched_removal=not user_config,
                                strict_warn_for_unmatched_removal=
                                strict_umatched_removal))
            repo_pkgmasklines.extend(
                append_repo(stack_lists(lines),
                            repo.name,
                            remember_source_file=True))

        repo_pkgunmasklines = []
        for repo in repositories.repos_with_profiles():
            if not repo.portage1_profiles:
                continue
            repo_lines = grabfile_package(os.path.join(repo.location, "profiles", "package.unmask"), \
             recursive=1, remember_source_file=True, verify_eapi=True)
            lines = stack_lists([repo_lines], incremental=1, \
             remember_source_file=True, warn_for_unmatched_removal=True,
             strict_warn_for_unmatched_removal=strict_umatched_removal)
            repo_pkgunmasklines.extend(
                append_repo(lines, repo.name, remember_source_file=True))

        #Read package.mask from the user's profile. Stack them in the end
        #to allow profiles to override masks from their parent profiles.
        profile_pkgmasklines = []
        profile_pkgunmasklines = []
        for x in profiles:
            profile_pkgmasklines.append(
                grabfile_package(os.path.join(x.location, "package.mask"),
                                 recursive=x.portage1_directories,
                                 remember_source_file=True,
                                 verify_eapi=True))
            if x.portage1_directories:
                profile_pkgunmasklines.append(
                    grabfile_package(os.path.join(x.location,
                                                  "package.unmask"),
                                     recursive=x.portage1_directories,
                                     remember_source_file=True,
                                     verify_eapi=True))
        profile_pkgmasklines = stack_lists(profile_pkgmasklines, incremental=1, \
         remember_source_file=True, warn_for_unmatched_removal=True,
         strict_warn_for_unmatched_removal=strict_umatched_removal)
        profile_pkgunmasklines = stack_lists(profile_pkgunmasklines, incremental=1, \
         remember_source_file=True, warn_for_unmatched_removal=True,
         strict_warn_for_unmatched_removal=strict_umatched_removal)

        #Read /etc/portage/package.mask. Don't stack it to allow the user to
        #remove mask atoms from everywhere with -atoms.
        user_pkgmasklines = []
        user_pkgunmasklines = []
        if user_config:
            user_pkgmasklines = grabfile_package(
             os.path.join(abs_user_config, "package.mask"), recursive=1, \
             allow_wildcard=True, allow_repo=True, remember_source_file=True, verify_eapi=False)
            user_pkgunmasklines = grabfile_package(
             os.path.join(abs_user_config, "package.unmask"), recursive=1, \
             allow_wildcard=True, allow_repo=True, remember_source_file=True, verify_eapi=False)

        #Stack everything together. At this point, only user_pkgmasklines may contain -atoms.
        #Don't warn for unmatched -atoms here, since we don't do it for any other user config file.
        raw_pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines], \
         incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
        pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines, user_pkgmasklines], \
         incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
        pkgunmasklines = stack_lists([repo_pkgunmasklines, profile_pkgunmasklines, user_pkgunmasklines], \
         incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)

        for x, source_file in raw_pkgmasklines:
            self._pmaskdict_raw.setdefault(x.cp, []).append(x)

        for x, source_file in pkgmasklines:
            self._pmaskdict.setdefault(x.cp, []).append(x)

        for x, source_file in pkgunmasklines:
            self._punmaskdict.setdefault(x.cp, []).append(x)

        for d in (self._pmaskdict_raw, self._pmaskdict, self._punmaskdict):
            for k, v in d.items():
                d[k] = tuple(v)
示例#29
0
class KeywordsManager(object):
    """Manager class to handle keywords processing and validation"""
    def __init__(self,
                 profiles,
                 abs_user_config,
                 user_config=True,
                 global_accept_keywords=""):
        self._pkeywords_list = []
        rawpkeywords = [grabdict_package(
         os.path.join(x.location, "package.keywords"),
         recursive=x.portage1_directories,
         verify_eapi=True) \
         for x in profiles]
        for pkeyworddict in rawpkeywords:
            if not pkeyworddict:
                # Omit non-existent files from the stack.
                continue
            cpdict = {}
            for k, v in pkeyworddict.items():
                cpdict.setdefault(k.cp, {})[k] = v
            self._pkeywords_list.append(cpdict)
        self._pkeywords_list = tuple(self._pkeywords_list)

        self._p_accept_keywords = []
        raw_p_accept_keywords = [grabdict_package(
         os.path.join(x.location, "package.accept_keywords"),
         recursive=x.portage1_directories,
         verify_eapi=True) \
         for x in profiles]
        for d in raw_p_accept_keywords:
            if not d:
                # Omit non-existent files from the stack.
                continue
            cpdict = {}
            for k, v in d.items():
                cpdict.setdefault(k.cp, {})[k] = tuple(v)
            self._p_accept_keywords.append(cpdict)
        self._p_accept_keywords = tuple(self._p_accept_keywords)

        self.pkeywordsdict = ExtendedAtomDict(dict)

        if user_config:
            pkgdict = grabdict_package(os.path.join(abs_user_config,
                                                    "package.keywords"),
                                       recursive=1,
                                       allow_wildcard=True,
                                       allow_repo=True,
                                       verify_eapi=False)

            for k, v in grabdict_package(os.path.join(
                    abs_user_config, "package.accept_keywords"),
                                         recursive=1,
                                         allow_wildcard=True,
                                         allow_repo=True,
                                         verify_eapi=False).items():
                pkgdict.setdefault(k, []).extend(v)

            accept_keywords_defaults = global_accept_keywords.split()
            accept_keywords_defaults = tuple('~' + keyword for keyword in \
             accept_keywords_defaults if keyword[:1] not in "~-")
            for k, v in pkgdict.items():
                # default to ~arch if no specific keyword is given
                if not v:
                    v = accept_keywords_defaults
                else:
                    v = tuple(v)
                self.pkeywordsdict.setdefault(k.cp, {})[k] = v

    def getKeywords(self, cpv, slot, keywords, repo):
        try:
            cpv.slot
        except AttributeError:
            pkg = _pkg_str(cpv, slot=slot, repo=repo)
        else:
            pkg = cpv
        cp = pkg.cp
        keywords = [[x for x in keywords.split() if x != "-*"]]
        for pkeywords_dict in self._pkeywords_list:
            cpdict = pkeywords_dict.get(cp)
            if cpdict:
                pkg_keywords = ordered_by_atom_specificity(cpdict, pkg)
                if pkg_keywords:
                    keywords.extend(pkg_keywords)
        return stack_lists(keywords, incremental=True)

    def isStable(self, pkg, global_accept_keywords, backuped_accept_keywords):
        mygroups = self.getKeywords(pkg, None, pkg._metadata["KEYWORDS"], None)
        pgroups = global_accept_keywords.split()

        unmaskgroups = self.getPKeywords(pkg, None, None,
                                         global_accept_keywords)
        pgroups.extend(unmaskgroups)

        egroups = backuped_accept_keywords.split()

        if unmaskgroups or egroups:
            pgroups = self._getEgroups(egroups, pgroups)
        else:
            pgroups = set(pgroups)

        if self._getMissingKeywords(pkg, pgroups, mygroups):
            return False

        if pkg.cpv._settings.local_config:
            # If replacing all keywords with unstable variants would mask the
            # package, then it's considered stable.
            unstable = []
            for kw in mygroups:
                if kw[:1] != "~":
                    kw = "~" + kw
                unstable.append(kw)

            return bool(self._getMissingKeywords(pkg, pgroups, set(unstable)))
        else:
            # For repoman, if the package has an effective stable keyword that
            # intersects with the effective ACCEPT_KEYWORDS for the current
            # profile, then consider it stable.
            for kw in pgroups:
                if kw[:1] != "~":
                    if kw in mygroups or '*' in mygroups:
                        return True
                    if kw == '*':
                        for x in mygroups:
                            if x[:1] != "~":
                                return True
            return False

    def getMissingKeywords(self, cpv, slot, keywords, repo,
                           global_accept_keywords, backuped_accept_keywords):
        """
		Take a package and return a list of any KEYWORDS that the user may
		need to accept for the given package. If the KEYWORDS are empty
		and the the ** keyword has not been accepted, the returned list will
		contain ** alone (in order to distinguish from the case of "none
		missing").

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: A list of KEYWORDS that have not been accepted.
		"""

        mygroups = self.getKeywords(cpv, slot, keywords, repo)
        # Repoman may modify this attribute as necessary.
        pgroups = global_accept_keywords.split()

        unmaskgroups = self.getPKeywords(cpv, slot, repo,
                                         global_accept_keywords)
        pgroups.extend(unmaskgroups)

        # Hack: Need to check the env directly here as otherwise stacking
        # doesn't work properly as negative values are lost in the config
        # object (bug #139600)
        egroups = backuped_accept_keywords.split()

        if unmaskgroups or egroups:
            pgroups = self._getEgroups(egroups, pgroups)
        else:
            pgroups = set(pgroups)

        return self._getMissingKeywords(cpv, pgroups, mygroups)

    def getRawMissingKeywords(self, cpv, slot, keywords, repo,
                              global_accept_keywords):
        """
		Take a package and return a list of any KEYWORDS that the user may
		need to accept for the given package. If the KEYWORDS are empty,
		the returned list will contain ** alone (in order to distinguish
		from the case of "none missing").  This DOES NOT apply any user config
		package.accept_keywords acceptance.

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@rtype: List
		@return: lists of KEYWORDS that have not been accepted
		and the keywords it looked for.
		"""

        mygroups = self.getKeywords(cpv, slot, keywords, repo)
        pgroups = global_accept_keywords.split()
        pgroups = set(pgroups)
        return self._getMissingKeywords(cpv, pgroups, mygroups)

    @staticmethod
    def _getEgroups(egroups, mygroups):
        """gets any keywords defined in the environment

		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: list of KEYWORDS that have been accepted
		"""
        mygroups = list(mygroups)
        mygroups.extend(egroups)
        inc_pgroups = set()
        for x in mygroups:
            if x[:1] == "-":
                if x == "-*":
                    inc_pgroups.clear()
                else:
                    inc_pgroups.discard(x[1:])
            else:
                inc_pgroups.add(x)
        return inc_pgroups

    @staticmethod
    def _getMissingKeywords(cpv, pgroups, mygroups):
        """Determines the missing keywords

		@param pgroups: The pkg keywords accepted
		@type pgroups: list
		@param mygroups: The ebuild keywords
		@type mygroups: list
		"""
        match = False
        hasstable = False
        hastesting = False
        for gp in mygroups:
            if gp == "*":
                match = True
                break
            elif gp == "~*":
                hastesting = True
                for x in pgroups:
                    if x[:1] == "~":
                        match = True
                        break
                if match:
                    break
            elif gp in pgroups:
                match = True
                break
            elif gp.startswith("~"):
                hastesting = True
            elif not gp.startswith("-"):
                hasstable = True
        if not match and \
         ((hastesting and "~*" in pgroups) or \
         (hasstable and "*" in pgroups) or "**" in pgroups):
            match = True
        if match:
            missing = []
        else:
            if not mygroups:
                # If KEYWORDS is empty then we still have to return something
                # in order to distinguish from the case of "none missing".
                mygroups = ["**"]
            missing = mygroups
        return missing

    def getPKeywords(self, cpv, slot, repo, global_accept_keywords):
        """Gets any package.keywords settings for cp for the given
		cpv, slot and repo

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: list of KEYWORDS that have been accepted
		"""

        pgroups = global_accept_keywords.split()
        try:
            cpv.slot
        except AttributeError:
            cpv = _pkg_str(cpv, slot=slot, repo=repo)
        cp = cpv.cp

        unmaskgroups = []
        if self._p_accept_keywords:
            accept_keywords_defaults = tuple('~' + keyword for keyword in \
             pgroups if keyword[:1] not in "~-")
            for d in self._p_accept_keywords:
                cpdict = d.get(cp)
                if cpdict:
                    pkg_accept_keywords = \
                     ordered_by_atom_specificity(cpdict, cpv)
                    if pkg_accept_keywords:
                        for x in pkg_accept_keywords:
                            if not x:
                                x = accept_keywords_defaults
                            unmaskgroups.extend(x)

        pkgdict = self.pkeywordsdict.get(cp)
        if pkgdict:
            pkg_accept_keywords = \
             ordered_by_atom_specificity(pkgdict, cpv)
            if pkg_accept_keywords:
                for x in pkg_accept_keywords:
                    unmaskgroups.extend(x)
        return unmaskgroups
示例#30
0
class MaskManager(object):
    def __init__(self, pmask_locations, abs_user_config, user_config=True):
        self._punmaskdict = ExtendedAtomDict(list)
        self._pmaskdict = ExtendedAtomDict(list)

        pkgmasklines = []
        pkgunmasklines = []
        for x in pmask_locations:
            pkgmasklines.append(
                grabfile_package(os.path.join(x, "package.mask"), recursive=1))
            pkgunmasklines.append(
                grabfile_package(os.path.join(x, "package.unmask"),
                                 recursive=1))

        if user_config:
            pkgmasklines.append(
                grabfile_package(os.path.join(abs_user_config, "package.mask"),
                                 recursive=1,
                                 allow_wildcard=True))
            pkgunmasklines.append(
                grabfile_package(os.path.join(abs_user_config,
                                              "package.unmask"),
                                 recursive=1,
                                 allow_wildcard=True))

        pkgmasklines = stack_lists(pkgmasklines, incremental=1)
        pkgunmasklines = stack_lists(pkgunmasklines, incremental=1)

        for x in pkgmasklines:
            self._pmaskdict.setdefault(x.cp, []).append(x)

        for x in pkgunmasklines:
            self._punmaskdict.setdefault(x.cp, []).append(x)

    def getMaskAtom(self, cpv, slot):
        """
		Take a package and return a matching package.mask atom, or None if no
		such atom exists or it has been cancelled by package.unmask. PROVIDE
		is not checked, so atoms will not be found for old-style virtuals.

		@param cpv: The package name
		@type cpv: String
		@param slot: The package's slot
		@type slot: String
		@rtype: String
		@return: An matching atom string or None if one is not found.
		"""

        cp = cpv_getkey(cpv)
        mask_atoms = self._pmaskdict.get(cp)
        if mask_atoms:
            pkg_list = ["%s:%s" % (cpv, slot)]
            unmask_atoms = self._punmaskdict.get(cp)
            for x in mask_atoms:
                if not match_from_list(x, pkg_list):
                    continue
                if unmask_atoms:
                    for y in unmask_atoms:
                        if match_from_list(y, pkg_list):
                            return None
                return x
        return None
示例#31
0
    def __init__(self,
                 profiles,
                 abs_user_config,
                 user_config=True,
                 global_accept_keywords=""):
        self._pkeywords_list = []
        rawpkeywords = [
            grabdict_package(
                os.path.join(x.location, "package.keywords"),
                recursive=x.portage1_directories,
                verify_eapi=True,
                eapi=x.eapi,
                eapi_default=None,
                allow_repo=allow_profile_repo_deps(x),
                allow_build_id=x.allow_build_id,
            ) for x in profiles
        ]
        for pkeyworddict in rawpkeywords:
            if not pkeyworddict:
                # Omit non-existent files from the stack.
                continue
            cpdict = {}
            for k, v in pkeyworddict.items():
                cpdict.setdefault(k.cp, {})[k] = v
            self._pkeywords_list.append(cpdict)
        self._pkeywords_list = tuple(self._pkeywords_list)

        self._p_accept_keywords = []
        raw_p_accept_keywords = [
            grabdict_package(
                os.path.join(x.location, "package.accept_keywords"),
                recursive=x.portage1_directories,
                verify_eapi=True,
                eapi=x.eapi,
                eapi_default=None,
                allow_repo=allow_profile_repo_deps(x),
            ) for x in profiles
        ]
        for d in raw_p_accept_keywords:
            if not d:
                # Omit non-existent files from the stack.
                continue
            cpdict = {}
            for k, v in d.items():
                cpdict.setdefault(k.cp, {})[k] = tuple(v)
            self._p_accept_keywords.append(cpdict)
        self._p_accept_keywords = tuple(self._p_accept_keywords)

        self.pkeywordsdict = ExtendedAtomDict(dict)

        if user_config:
            user_accept_kwrds_path = os.path.join(abs_user_config,
                                                  "package.accept_keywords")
            user_kwrds_path = os.path.join(abs_user_config, "package.keywords")
            pkgdict = grabdict_package(
                user_kwrds_path,
                recursive=1,
                allow_wildcard=True,
                allow_repo=True,
                verify_eapi=False,
                allow_build_id=True,
            )

            if pkgdict and portage._internal_caller:
                warnings.warn(
                    _("%s is deprecated, use %s instead") %
                    (user_kwrds_path, user_accept_kwrds_path),
                    UserWarning,
                )

            for k, v in grabdict_package(
                    user_accept_kwrds_path,
                    recursive=1,
                    allow_wildcard=True,
                    allow_repo=True,
                    verify_eapi=False,
                    allow_build_id=True,
            ).items():
                pkgdict.setdefault(k, []).extend(v)

            accept_keywords_defaults = global_accept_keywords.split()
            accept_keywords_defaults = tuple(
                "~" + keyword for keyword in accept_keywords_defaults
                if keyword[:1] not in "~-")
            for k, v in pkgdict.items():
                # default to ~arch if no specific keyword is given
                if not v:
                    v = accept_keywords_defaults
                else:
                    v = tuple(v)
                self.pkeywordsdict.setdefault(k.cp, {})[k] = v
示例#32
0
class LicenseManager(object):

	def __init__(self, license_group_locations, abs_user_config, user_config=True):

		self._accept_license_str = None
		self._accept_license = None
		self._license_groups = {}
		self._plicensedict = ExtendedAtomDict(dict)
		self._undef_lic_groups = set()

		if user_config:
			license_group_locations = list(license_group_locations) + [abs_user_config]

		self._read_license_groups(license_group_locations)

		if user_config:
			self._read_user_config(abs_user_config)

	def _read_user_config(self, abs_user_config):
		licdict = grabdict_package(os.path.join(
			abs_user_config, "package.license"), recursive=1, allow_wildcard=True)
		for k, v in licdict.items():
			self._plicensedict.setdefault(k.cp, {})[k] = \
				self.expandLicenseTokens(v)

	def _read_license_groups(self, locations):
		for loc in locations:
			for k, v in grabdict(
				os.path.join(loc, "license_groups")).items():
				self._license_groups.setdefault(k, []).extend(v)

	def extract_global_changes(self, old=""):
		ret = old
		atom_license_map = self._plicensedict.get("*/*")
		if atom_license_map is not None:
			v = atom_license_map.pop("*/*", None)
			if v is not None:
				ret = " ".join(v)
				if old:
					ret = old + " " + ret
				if not atom_license_map:
					#No tokens left in atom_license_map, remove it.
					del self._plicensedict["*/*"]
		return ret
	
	def expandLicenseTokens(self, tokens):
		""" Take a token from ACCEPT_LICENSE or package.license and expand it
		if it's a group token (indicated by @) or just return it if it's not a
		group.  If a group is negated then negate all group elements."""
		expanded_tokens = []
		for x in tokens:
			expanded_tokens.extend(self._expandLicenseToken(x, None))
		return expanded_tokens

	def _expandLicenseToken(self, token, traversed_groups):
		negate = False
		rValue = []
		if token.startswith("-"):
			negate = True
			license_name = token[1:]
		else:
			license_name = token
		if not license_name.startswith("@"):
			rValue.append(token)
			return rValue
		group_name = license_name[1:]
		if traversed_groups is None:
			traversed_groups = set()
		license_group = self._license_groups.get(group_name)
		if group_name in traversed_groups:
			writemsg(_("Circular license group reference"
				" detected in '%s'\n") % group_name, noiselevel=-1)
			rValue.append("@"+group_name)
		elif license_group:
			traversed_groups.add(group_name)
			for l in license_group:
				if l.startswith("-"):
					writemsg(_("Skipping invalid element %s"
						" in license group '%s'\n") % (l, group_name),
						noiselevel=-1)
				else:
					rValue.extend(self._expandLicenseToken(l, traversed_groups))
		else:
			if self._license_groups and \
				group_name not in self._undef_lic_groups:
				self._undef_lic_groups.add(group_name)
				writemsg(_("Undefined license group '%s'\n") % group_name,
					noiselevel=-1)
			rValue.append("@"+group_name)
		if negate:
			rValue = ["-" + token for token in rValue]
		return rValue

	def _getPkgAcceptLicense(self, cpv, slot):
		"""
		Get an ACCEPT_LICENSE list, accounting for package.license.
		"""
		accept_license = self._accept_license
		cp = cpv_getkey(cpv)
		cpdict = self._plicensedict.get(cp)
		if cpdict:
			cpv_slot = "%s:%s" % (cpv, slot)
			plicence_list = ordered_by_atom_specificity(cpdict, cpv_slot)
			if plicence_list:
				accept_license = list(self._accept_license)
				for x in plicence_list:
					accept_license.extend(x)
		return accept_license

	def get_prunned_accept_license(self, cpv, use, lic, slot):
		"""
		Generate a pruned version of ACCEPT_LICENSE, by intersection with
		LICENSE. This is required since otherwise ACCEPT_LICENSE might be
		too big (bigger than ARG_MAX), causing execve() calls to fail with
		E2BIG errors as in bug #262647.
		"""
		try:
			licenses = set(use_reduce(lic, uselist=use, flat=True))
		except InvalidDependString:
			licenses = set()
		licenses.discard('||')

		accept_license = self._getPkgAcceptLicense(cpv, slot)

		if accept_license:
			acceptable_licenses = set()
			for x in accept_license:
				if x == '*':
					acceptable_licenses.update(licenses)
				elif x == '-*':
					acceptable_licenses.clear()
				elif x[:1] == '-':
					acceptable_licenses.discard(x[1:])
				elif x in licenses:
					acceptable_licenses.add(x)

			licenses = acceptable_licenses
		return ' '.join(sorted(licenses))

	def getMissingLicenses(self, cpv, use, lic, slot):
		"""
		Take a LICENSE string and return a list any licenses that the user may
		may need to accept for the given package.  The returned list will not
		contain any licenses that have already been accepted.  This method
		can throw an InvalidDependString exception.

		@param cpv: The package name (for package.license support)
		@type cpv: String
		@param use: "USE" from the cpv's metadata
		@type use: String
		@param lic: "LICENSE" from the cpv's metadata
		@type lic: String
		@param slot: "SLOT" from the cpv's metadata
		@type slot: String
		@rtype: List
		@return: A list of licenses that have not been accepted.
		"""

		licenses = set(use_reduce(lic, matchall=1, flat=True))
		licenses.discard('||')

		acceptable_licenses = set()
		for x in self._getPkgAcceptLicense(cpv, slot):
			if x == '*':
				acceptable_licenses.update(licenses)
			elif x == '-*':
				acceptable_licenses.clear()
			elif x[:1] == '-':
				acceptable_licenses.discard(x[1:])
			else:
				acceptable_licenses.add(x)

		license_str = lic
		if "?" in license_str:
			use = use.split()
		else:
			use = []

		license_struct = use_reduce(license_str, uselist=use, opconvert=True)
		return self._getMaskedLicenses(license_struct, acceptable_licenses)

	def _getMaskedLicenses(self, license_struct, acceptable_licenses):
		if not license_struct:
			return []
		if license_struct[0] == "||":
			ret = []
			for element in license_struct[1:]:
				if isinstance(element, list):
					if element:
						tmp = self._getMaskedLicenses(element, acceptable_licenses)
						if not tmp:
							return []
						ret.extend(tmp)
				else:
					if element in acceptable_licenses:
						return []
					ret.append(element)
			# Return all masked licenses, since we don't know which combination
			# (if any) the user will decide to unmask.
			return ret

		ret = []
		for element in license_struct:
			if isinstance(element, list):
				if element:
					ret.extend(self._getMaskedLicenses(element,
						acceptable_licenses))
			else:
				if element not in acceptable_licenses:
					ret.append(element)
		return ret

	def set_accept_license_str(self, accept_license_str):
		if accept_license_str != self._accept_license_str:
			self._accept_license_str = accept_license_str
			self._accept_license = tuple(self.expandLicenseTokens(accept_license_str.split()))
class KeywordsManager(object):
	"""Manager class to handle keywords processing and validation"""

	def __init__(self, profiles, abs_user_config, user_config=True,
				global_accept_keywords=""):
		self._pkeywords_list = []
		rawpkeywords = [grabdict_package(
			os.path.join(x.location, "package.keywords"),
			recursive=x.portage1_directories,
			verify_eapi=True) \
			for x in profiles]
		for pkeyworddict in rawpkeywords:
			if not pkeyworddict:
				# Omit non-existent files from the stack.
				continue
			cpdict = {}
			for k, v in pkeyworddict.items():
				cpdict.setdefault(k.cp, {})[k] = v
			self._pkeywords_list.append(cpdict)
		self._pkeywords_list = tuple(self._pkeywords_list)

		self._p_accept_keywords = []
		raw_p_accept_keywords = [grabdict_package(
			os.path.join(x.location, "package.accept_keywords"),
			recursive=x.portage1_directories,
			verify_eapi=True) \
			for x in profiles]
		for d in raw_p_accept_keywords:
			if not d:
				# Omit non-existent files from the stack.
				continue
			cpdict = {}
			for k, v in d.items():
				cpdict.setdefault(k.cp, {})[k] = tuple(v)
			self._p_accept_keywords.append(cpdict)
		self._p_accept_keywords = tuple(self._p_accept_keywords)

		self.pkeywordsdict = ExtendedAtomDict(dict)

		if user_config:
			pkgdict = grabdict_package(
				os.path.join(abs_user_config, "package.keywords"),
				recursive=1, allow_wildcard=True, allow_repo=True,
				verify_eapi=False)

			for k, v in grabdict_package(
				os.path.join(abs_user_config, "package.accept_keywords"),
				recursive=1, allow_wildcard=True, allow_repo=True,
				verify_eapi=False).items():
				pkgdict.setdefault(k, []).extend(v)

			accept_keywords_defaults = global_accept_keywords.split()
			accept_keywords_defaults = tuple('~' + keyword for keyword in \
				accept_keywords_defaults if keyword[:1] not in "~-")
			for k, v in pkgdict.items():
				# default to ~arch if no specific keyword is given
				if not v:
					v = accept_keywords_defaults
				else:
					v = tuple(v)
				self.pkeywordsdict.setdefault(k.cp, {})[k] = v


	def getKeywords(self, cpv, slot, keywords, repo):
		cp = cpv_getkey(cpv)
		pkg = "".join((cpv, _slot_separator, slot))
		if repo and repo != Package.UNKNOWN_REPO:
			pkg = "".join((pkg, _repo_separator, repo))
		keywords = [[x for x in keywords.split() if x != "-*"]]
		for pkeywords_dict in self._pkeywords_list:
			cpdict = pkeywords_dict.get(cp)
			if cpdict:
				pkg_keywords = ordered_by_atom_specificity(cpdict, pkg)
				if pkg_keywords:
					keywords.extend(pkg_keywords)
		return stack_lists(keywords, incremental=True)


	def getMissingKeywords(self,
							cpv,
							slot,
							keywords,
							repo,
							global_accept_keywords,
							backuped_accept_keywords):
		"""
		Take a package and return a list of any KEYWORDS that the user may
		need to accept for the given package. If the KEYWORDS are empty
		and the the ** keyword has not been accepted, the returned list will
		contain ** alone (in order to distinguish from the case of "none
		missing").

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: A list of KEYWORDS that have not been accepted.
		"""

		mygroups = self.getKeywords(cpv, slot, keywords, repo)
		# Repoman may modify this attribute as necessary.
		pgroups = global_accept_keywords.split()

		unmaskgroups = self.getPKeywords(cpv, slot, repo,
				global_accept_keywords)
		pgroups.extend(unmaskgroups)

		# Hack: Need to check the env directly here as otherwise stacking
		# doesn't work properly as negative values are lost in the config
		# object (bug #139600)
		egroups = backuped_accept_keywords.split()

		if unmaskgroups or egroups:
			pgroups = self._getEgroups(egroups, pgroups)
		else:
			pgroups = set(pgroups)

		return self._getMissingKeywords(cpv, pgroups, mygroups)


	def getRawMissingKeywords(self,
							cpv,
							slot,
							keywords,
							repo,
							global_accept_keywords):
		"""
		Take a package and return a list of any KEYWORDS that the user may
		need to accept for the given package. If the KEYWORDS are empty,
		the returned list will contain ** alone (in order to distinguish
		from the case of "none missing").  This DOES NOT apply any user config
		package.accept_keywords acceptance.

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@rtype: List
		@return: lists of KEYWORDS that have not been accepted
		and the keywords it looked for.
		"""

		mygroups = self.getKeywords(cpv, slot, keywords, repo)
		pgroups = global_accept_keywords.split()
		pgroups = set(pgroups)
		return self._getMissingKeywords(cpv, pgroups, mygroups)


	@staticmethod
	def _getEgroups(egroups, mygroups):
		"""gets any keywords defined in the environment

		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: list of KEYWORDS that have been accepted
		"""
		mygroups = list(mygroups)
		mygroups.extend(egroups)
		inc_pgroups = set()
		for x in mygroups:
			if x[:1] == "-":
				if x == "-*":
					inc_pgroups.clear()
				else:
					inc_pgroups.discard(x[1:])
			else:
				inc_pgroups.add(x)
		return inc_pgroups


	@staticmethod
	def _getMissingKeywords(cpv, pgroups, mygroups):
		"""Determines the missing keywords

		@param pgroups: The pkg keywords accepted
		@type pgroups: list
		@param mygroups: The ebuild keywords
		@type mygroups: list
		"""
		match = False
		hasstable = False
		hastesting = False
		for gp in mygroups:
			if gp == "*":
				match = True
				break
			elif gp == "~*":
				hastesting = True
				for x in pgroups:
					if x[:1] == "~":
						match = True
						break
				if match:
					break
			elif gp in pgroups:
				match = True
				break
			elif gp.startswith("~"):
				hastesting = True
			elif not gp.startswith("-"):
				hasstable = True
		if not match and \
			((hastesting and "~*" in pgroups) or \
			(hasstable and "*" in pgroups) or "**" in pgroups):
			match = True
		if match:
			missing = []
		else:
			return ["**"]
	
	def getPKeywords(self, cpv, slot, repo, global_accept_keywords):
		"""Gets any package.keywords settings for cp for the given
		cpv, slot and repo

		@param cpv: The package name (for package.keywords support)
		@type cpv: String
		@param slot: The 'SLOT' key from the raw package metadata
		@type slot: String
		@param keywords: The 'KEYWORDS' key from the raw package metadata
		@type keywords: String
		@param global_accept_keywords: The current value of ACCEPT_KEYWORDS
		@type global_accept_keywords: String
		@param backuped_accept_keywords: ACCEPT_KEYWORDS from the backup env
		@type backuped_accept_keywords: String
		@rtype: List
		@return: list of KEYWORDS that have been accepted
		"""

		pgroups = global_accept_keywords.split()
		cp = cpv_getkey(cpv)

		unmaskgroups = []
		if self._p_accept_keywords:
			cpv_slot = "%s:%s" % (cpv, slot)
			accept_keywords_defaults = tuple('~' + keyword for keyword in \
				pgroups if keyword[:1] not in "~-")
			for d in self._p_accept_keywords:
				cpdict = d.get(cp)
				if cpdict:
					pkg_accept_keywords = \
						ordered_by_atom_specificity(cpdict, cpv_slot)
					if pkg_accept_keywords:
						for x in pkg_accept_keywords:
							if not x:
								x = accept_keywords_defaults
							unmaskgroups.extend(x)

		pkgdict = self.pkeywordsdict.get(cp)
		if pkgdict:
			cpv_slot = "%s:%s" % (cpv, slot)
			pkg_accept_keywords = \
				ordered_by_atom_specificity(pkgdict, cpv_slot, repo=repo)
			if pkg_accept_keywords:
				for x in pkg_accept_keywords:
					unmaskgroups.extend(x)
		return unmaskgroups
示例#34
0
class MaskManager:
    def __init__(
        self,
        repositories,
        profiles,
        abs_user_config,
        user_config=True,
        strict_umatched_removal=False,
    ):
        self._punmaskdict = ExtendedAtomDict(list)
        self._pmaskdict = ExtendedAtomDict(list)
        # Preserves atoms that are eliminated by negative
        # incrementals in user_pkgmasklines.
        self._pmaskdict_raw = ExtendedAtomDict(list)

        # Read profile/package.mask from every repo.
        # Repositories inherit masks from their parent profiles and
        # are able to remove mask from them with -atoms.
        # Such a removal affects only the current repo, but not the parent.
        # Add ::repo specs to every atom to make sure atoms only affect
        # packages from the current repo.

        # Cache the repository-wide package.mask files as a particular
        # repo may be often referenced by others as the master.
        pmask_cache = {}

        def grab_pmask(loc, repo_config):
            if loc not in pmask_cache:
                path = os.path.join(loc, "profiles", "package.mask")
                pmask_cache[loc] = grabfile_package(
                    path,
                    recursive=repo_config.portage1_profiles,
                    remember_source_file=True,
                    verify_eapi=True,
                    eapi_default=repo_config.eapi,
                    allow_repo=allow_profile_repo_deps(repo_config),
                    allow_build_id=("build-id" in repo_config.profile_formats),
                )
                if repo_config.portage1_profiles_compat and os.path.isdir(
                        path):
                    warnings.warn(
                        _("Repository '%(repo_name)s' is implicitly using "
                          "'portage-1' profile format in its profiles/package.mask, but "
                          "the repository profiles are not marked as that format.  This will break "
                          "in the future.  Please either convert the following paths "
                          "to files, or add\nprofile-formats = portage-1\nto the "
                          "repository's layout.conf.\n") %
                        dict(repo_name=repo_config.name))

            return pmask_cache[loc]

        repo_pkgmasklines = []
        for repo in repositories.repos_with_profiles():
            lines = []
            repo_lines = grab_pmask(repo.location, repo)
            removals = frozenset(line[0][1:] for line in repo_lines
                                 if line[0][:1] == "-")
            matched_removals = set()
            for master in repo.masters:
                master_lines = grab_pmask(master.location, master)
                for line in master_lines:
                    if line[0] in removals:
                        matched_removals.add(line[0])
                # Since we don't stack masters recursively, there aren't any
                # atoms earlier in the stack to be matched by negative atoms in
                # master_lines. Also, repo_lines may contain negative atoms
                # that are intended to negate atoms from a different master
                # than the one with which we are currently stacking. Therefore,
                # we disable warn_for_unmatched_removal here (see bug #386569).
                lines.append(
                    stack_lists(
                        [master_lines, repo_lines],
                        incremental=1,
                        remember_source_file=True,
                        warn_for_unmatched_removal=False,
                    ))

            # It's safe to warn for unmatched removal if masters have not
            # been overridden by the user, which is guaranteed when
            # user_config is false (when called by repoman).
            if repo.masters:
                unmatched_removals = removals.difference(matched_removals)
                if unmatched_removals and not user_config:
                    source_file = os.path.join(repo.location, "profiles",
                                               "package.mask")
                    unmatched_removals = list(unmatched_removals)
                    if len(unmatched_removals) > 3:
                        writemsg(
                            _("--- Unmatched removal atoms in %s: %s and %s more\n"
                              ) % (
                                  source_file,
                                  ", ".join("-" + x
                                            for x in unmatched_removals[:3]),
                                  len(unmatched_removals) - 3,
                              ),
                            noiselevel=-1,
                        )
                    else:
                        writemsg(
                            _("--- Unmatched removal atom(s) in %s: %s\n") % (
                                source_file,
                                ", ".join("-" + x for x in unmatched_removals),
                            ),
                            noiselevel=-1,
                        )

            else:
                lines.append(
                    stack_lists(
                        [repo_lines],
                        incremental=1,
                        remember_source_file=True,
                        warn_for_unmatched_removal=not user_config,
                        strict_warn_for_unmatched_removal=
                        strict_umatched_removal,
                    ))
            repo_pkgmasklines.extend(
                append_repo(stack_lists(lines),
                            repo.name,
                            remember_source_file=True))

        repo_pkgunmasklines = []
        for repo in repositories.repos_with_profiles():
            if not repo.portage1_profiles:
                continue
            repo_lines = grabfile_package(
                os.path.join(repo.location, "profiles", "package.unmask"),
                recursive=1,
                remember_source_file=True,
                verify_eapi=True,
                eapi_default=repo.eapi,
                allow_repo=allow_profile_repo_deps(repo),
                allow_build_id=("build-id" in repo.profile_formats),
            )
            lines = stack_lists(
                [repo_lines],
                incremental=1,
                remember_source_file=True,
                warn_for_unmatched_removal=True,
                strict_warn_for_unmatched_removal=strict_umatched_removal,
            )
            repo_pkgunmasklines.extend(
                append_repo(lines, repo.name, remember_source_file=True))

        # Read package.mask from the user's profile. Stack them in the end
        # to allow profiles to override masks from their parent profiles.
        profile_pkgmasklines = []
        profile_pkgunmasklines = []
        for x in profiles:
            profile_pkgmasklines.append(
                grabfile_package(
                    os.path.join(x.location, "package.mask"),
                    recursive=x.portage1_directories,
                    remember_source_file=True,
                    verify_eapi=True,
                    eapi=x.eapi,
                    eapi_default=None,
                    allow_repo=allow_profile_repo_deps(x),
                    allow_build_id=x.allow_build_id,
                ))
            if x.portage1_directories:
                profile_pkgunmasklines.append(
                    grabfile_package(
                        os.path.join(x.location, "package.unmask"),
                        recursive=x.portage1_directories,
                        remember_source_file=True,
                        verify_eapi=True,
                        eapi=x.eapi,
                        eapi_default=None,
                        allow_repo=allow_profile_repo_deps(x),
                        allow_build_id=x.allow_build_id,
                    ))
        profile_pkgmasklines = stack_lists(
            profile_pkgmasklines,
            incremental=1,
            remember_source_file=True,
            warn_for_unmatched_removal=True,
            strict_warn_for_unmatched_removal=strict_umatched_removal,
        )
        profile_pkgunmasklines = stack_lists(
            profile_pkgunmasklines,
            incremental=1,
            remember_source_file=True,
            warn_for_unmatched_removal=True,
            strict_warn_for_unmatched_removal=strict_umatched_removal,
        )

        # Read /etc/portage/package.mask. Don't stack it to allow the user to
        # remove mask atoms from everywhere with -atoms.
        user_pkgmasklines = []
        user_pkgunmasklines = []
        if user_config:
            user_pkgmasklines = grabfile_package(
                os.path.join(abs_user_config, "package.mask"),
                recursive=1,
                allow_wildcard=True,
                allow_repo=True,
                remember_source_file=True,
                verify_eapi=False,
                allow_build_id=True,
            )
            user_pkgunmasklines = grabfile_package(
                os.path.join(abs_user_config, "package.unmask"),
                recursive=1,
                allow_wildcard=True,
                allow_repo=True,
                remember_source_file=True,
                verify_eapi=False,
                allow_build_id=True,
            )

        # Stack everything together. At this point, only user_pkgmasklines may contain -atoms.
        # Don't warn for unmatched -atoms here, since we don't do it for any other user config file.
        raw_pkgmasklines = stack_lists(
            [repo_pkgmasklines, profile_pkgmasklines],
            incremental=1,
            remember_source_file=True,
            warn_for_unmatched_removal=False,
            ignore_repo=True,
        )
        pkgmasklines = stack_lists(
            [repo_pkgmasklines, profile_pkgmasklines, user_pkgmasklines],
            incremental=1,
            remember_source_file=True,
            warn_for_unmatched_removal=False,
            ignore_repo=True,
        )
        pkgunmasklines = stack_lists(
            [repo_pkgunmasklines, profile_pkgunmasklines, user_pkgunmasklines],
            incremental=1,
            remember_source_file=True,
            warn_for_unmatched_removal=False,
            ignore_repo=True,
        )

        for x, source_file in raw_pkgmasklines:
            self._pmaskdict_raw.setdefault(x.cp, []).append(x)

        for x, source_file in pkgmasklines:
            self._pmaskdict.setdefault(x.cp, []).append(x)

        for x, source_file in pkgunmasklines:
            self._punmaskdict.setdefault(x.cp, []).append(x)

        for d in (self._pmaskdict_raw, self._pmaskdict, self._punmaskdict):
            for k, v in d.items():
                d[k] = tuple(v)

    def _getMaskAtom(self, cpv, slot, repo, unmask_atoms=None):
        """
        Take a package and return a matching package.mask atom, or None if no
        such atom exists or it has been cancelled by package.unmask.

        @param cpv: The package name
        @type cpv: String
        @param slot: The package's slot
        @type slot: String
        @param repo: The package's repository [optional]
        @type repo: String
        @param unmask_atoms: if desired pass in self._punmaskdict.get(cp)
        @type unmask_atoms: list
        @rtype: String
        @return: A matching atom string or None if one is not found.
        """

        try:
            cpv.slot
        except AttributeError:
            pkg = _pkg_str(cpv, slot=slot, repo=repo)
        else:
            pkg = cpv

        mask_atoms = self._pmaskdict.get(pkg.cp)
        if mask_atoms:
            pkg_list = [pkg]
            for x in mask_atoms:
                if not match_from_list(x, pkg_list):
                    continue
                if unmask_atoms:
                    for y in unmask_atoms:
                        if match_from_list(y, pkg_list):
                            return None
                return x
        return None

    def getMaskAtom(self, cpv, slot, repo):
        """
        Take a package and return a matching package.mask atom, or None if no
        such atom exists or it has been cancelled by package.unmask.

        @param cpv: The package name
        @type cpv: String
        @param slot: The package's slot
        @type slot: String
        @param repo: The package's repository [optional]
        @type repo: String
        @rtype: String
        @return: A matching atom string or None if one is not found.
        """

        try:
            cpv.slot
        except AttributeError:
            pkg = _pkg_str(cpv, slot=slot, repo=repo)
        else:
            pkg = cpv

        return self._getMaskAtom(pkg, slot, repo,
                                 self._punmaskdict.get(pkg.cp))

    def getRawMaskAtom(self, cpv, slot, repo):
        """
        Take a package and return a matching package.mask atom, or None if no
        such atom exists. It HAS NOT! been cancelled by any package.unmask.

        @param cpv: The package name
        @type cpv: String
        @param slot: The package's slot
        @type slot: String
        @param repo: The package's repository [optional]
        @type repo: String
        @rtype: String
        @return: A matching atom string or None if one is not found.
        """

        return self._getMaskAtom(cpv, slot, repo)