Пример #1
0
	def getUseMask(self, pkg=None, stable=None):
		if pkg is None:
			return frozenset(stack_lists(
				self._usemask_list, incremental=True))

		slot = None
		cp = getattr(pkg, "cp", None)
		if cp is None:
			slot = dep_getslot(pkg)
			repo = dep_getrepo(pkg)
			pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
			cp = pkg.cp

		if stable is None:
			stable = self._isStable(pkg)

		usemask = []

		if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
			repos = []
			try:
				repos.extend(repo.name for repo in
					self.repositories[pkg.repo].masters)
			except KeyError:
				pass
			repos.append(pkg.repo)
			for repo in repos:
				usemask.append(self._repo_usemask_dict.get(repo, {}))
				if stable:
					usemask.append(self._repo_usestablemask_dict.get(repo, {}))
				cpdict = self._repo_pusemask_dict.get(repo, {}).get(cp)
				if cpdict:
					pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
					if pkg_usemask:
						usemask.extend(pkg_usemask)
				if stable:
					cpdict = self._repo_pusestablemask_dict.get(repo, {}).get(cp)
					if cpdict:
						pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
						if pkg_usemask:
							usemask.extend(pkg_usemask)

		for i, pusemask_dict in enumerate(self._pusemask_list):
			if self._usemask_list[i]:
				usemask.append(self._usemask_list[i])
			if stable and self._usestablemask_list[i]:
				usemask.append(self._usestablemask_list[i])
			cpdict = pusemask_dict.get(cp)
			if cpdict:
				pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
				if pkg_usemask:
					usemask.extend(pkg_usemask)
			if stable:
				cpdict = self._pusestablemask_list[i].get(cp)
				if cpdict:
					pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
					if pkg_usemask:
						usemask.extend(pkg_usemask)

		return frozenset(stack_lists(usemask, incremental=True))
Пример #2
0
    def __init__(self, pmask_locations, abs_user_config, user_config=True):
        self._punmaskdict = ExtendedAtomDict(list)
        self._pmaskdict = ExtendedAtomDict(list)

        pkgmasklines = []
        pkgunmasklines = []
        for x in pmask_locations:
            pkgmasklines.append(
                grabfile_package(os.path.join(x, "package.mask"), recursive=1))
            pkgunmasklines.append(
                grabfile_package(os.path.join(x, "package.unmask"),
                                 recursive=1))

        if user_config:
            pkgmasklines.append(
                grabfile_package(os.path.join(abs_user_config, "package.mask"),
                                 recursive=1,
                                 allow_wildcard=True))
            pkgunmasklines.append(
                grabfile_package(os.path.join(abs_user_config,
                                              "package.unmask"),
                                 recursive=1,
                                 allow_wildcard=True))

        pkgmasklines = stack_lists(pkgmasklines, incremental=1)
        pkgunmasklines = stack_lists(pkgunmasklines, incremental=1)

        for x in pkgmasklines:
            self._pmaskdict.setdefault(x.cp, []).append(x)

        for x in pkgunmasklines:
            self._punmaskdict.setdefault(x.cp, []).append(x)
Пример #3
0
	def getUseForce(self, pkg=None):
		if pkg is None:
			return frozenset(stack_lists(
				self._useforce_list, incremental=True))

		cp = getattr(pkg, "cp", None)
		if cp is None:
			cp = cpv_getkey(remove_slot(pkg))
		useforce = []
		if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
			repos = []
			try:
				repos.extend(repo.name for repo in
					self.repositories[pkg.repo].masters)
			except KeyError:
				pass
			repos.append(pkg.repo)
			for repo in repos:
				useforce.append(self._repo_useforce_dict.get(repo, {}))
				cpdict = self._repo_puseforce_dict.get(repo, {}).get(cp)
				if cpdict:
					pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
					if pkg_useforce:
						useforce.extend(pkg_useforce)
		for i, puseforce_dict in enumerate(self._puseforce_list):
			if self._useforce_list[i]:
				useforce.append(self._useforce_list[i])
			cpdict = puseforce_dict.get(cp)
			if cpdict:
				pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
				if pkg_useforce:
					useforce.extend(pkg_useforce)
		return frozenset(stack_lists(useforce, incremental=True))
Пример #4
0
	def __init__(self, pmask_locations, abs_user_config, user_config=True):
		self._punmaskdict = ExtendedAtomDict(list)
		self._pmaskdict = ExtendedAtomDict(list)

		pkgmasklines = []
		pkgunmasklines = []
		for x in pmask_locations:
			pkgmasklines.append(grabfile_package(
				os.path.join(x, "package.mask"), recursive=1))
			pkgunmasklines.append(grabfile_package(
				os.path.join(x, "package.unmask"), recursive=1))

		if user_config:
			pkgmasklines.append(grabfile_package(
				os.path.join(abs_user_config, "package.mask"), recursive=1, allow_wildcard=True))
			pkgunmasklines.append(grabfile_package(
				os.path.join(abs_user_config, "package.unmask"), recursive=1, allow_wildcard=True))

		pkgmasklines = stack_lists(pkgmasklines, incremental=1)
		pkgunmasklines = stack_lists(pkgunmasklines, incremental=1)

		for x in pkgmasklines:
			self._pmaskdict.setdefault(x.cp, []).append(x)

		for x in pkgunmasklines:
			self._punmaskdict.setdefault(x.cp, []).append(x)
Пример #5
0
    def getUseMask(self, pkg=None, stable=None):
        if pkg is None:
            return frozenset(stack_lists(self._usemask_list, incremental=True))

        slot = None
        cp = getattr(pkg, "cp", None)
        if cp is None:
            slot = dep_getslot(pkg)
            repo = dep_getrepo(pkg)
            pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
            cp = pkg.cp

        if stable is None:
            stable = self._isStable(pkg)

        usemask = []

        if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
            repos = []
            try:
                repos.extend(repo.name
                             for repo in self.repositories[pkg.repo].masters)
            except KeyError:
                pass
            repos.append(pkg.repo)
            for repo in repos:
                usemask.append(self._repo_usemask_dict.get(repo, {}))
                if stable:
                    usemask.append(self._repo_usestablemask_dict.get(repo, {}))
                cpdict = self._repo_pusemask_dict.get(repo, {}).get(cp)
                if cpdict:
                    pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
                    if pkg_usemask:
                        usemask.extend(pkg_usemask)
                if stable:
                    cpdict = self._repo_pusestablemask_dict.get(repo,
                                                                {}).get(cp)
                    if cpdict:
                        pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
                        if pkg_usemask:
                            usemask.extend(pkg_usemask)

        for i, pusemask_dict in enumerate(self._pusemask_list):
            if self._usemask_list[i]:
                usemask.append(self._usemask_list[i])
            if stable and self._usestablemask_list[i]:
                usemask.append(self._usestablemask_list[i])
            cpdict = pusemask_dict.get(cp)
            if cpdict:
                pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
                if pkg_usemask:
                    usemask.extend(pkg_usemask)
            if stable:
                cpdict = self._pusestablemask_list[i].get(cp)
                if cpdict:
                    pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
                    if pkg_usemask:
                        usemask.extend(pkg_usemask)

        return frozenset(stack_lists(usemask, incremental=True))
Пример #6
0
	def load(self):
		self._setAtoms(x for x in stack_lists(
			[grabfile_package(os.path.join(y.location, "packages"),
			verify_eapi=True, eapi=y.eapi, eapi_default=None)
			for y in self._profiles
			if "profile-set" in y.profile_formats],
			incremental=1) if x[:1] != "*")
Пример #7
0
    def load(self):
        debug = self._debug
        if debug:
            writemsg_level("\nPackagesSystemSet: profiles: %s\n" %
                           (self._profiles, ),
                           level=logging.DEBUG,
                           noiselevel=-1)

        mylist = [
            grabfile_package(os.path.join(x.location, "packages"),
                             verify_eapi=True,
                             eapi=x.eapi,
                             eapi_default=None,
                             allow_build_id=x.allow_build_id)
            for x in self._profiles
        ]

        if debug:
            writemsg_level("\nPackagesSystemSet: raw packages: %s\n" % \
             (mylist,), level=logging.DEBUG, noiselevel=-1)

        mylist = stack_lists(mylist, incremental=1)

        if debug:
            writemsg_level("\nPackagesSystemSet: stacked packages: %s\n" % \
             (mylist,), level=logging.DEBUG, noiselevel=-1)

        self._setAtoms([x[1:] for x in mylist if x[0] == "*"])
Пример #8
0
	def __init__(self, configpaths, valid_versions=None, repository_modules=False):
		'''Module init

		@param configpaths: ordered list of filepaths to load
		'''
		if repository_modules:
			self.configpaths = [os.path.join(path, 'repository.yaml') for path in configpaths]
		elif _not_installed:
			self.configpaths = [os.path.realpath(os.path.join(os.path.dirname(
				os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(
				os.path.dirname(__file__)))))), 'repoman/cnf/repository/repository.yaml'))]
		else:
			self.configpaths = [os.path.join(portage.const.EPREFIX or '/',
				'usr/share/repoman/repository/repository.yaml')]
		logging.debug("ModuleConfig; configpaths: %s", self.configpaths)

		self.controller = Modules(path=MODULES_PATH, namepath="repoman.modules.scan")
		logging.debug("ModuleConfig; module_names: %s", self.controller.module_names)

		self._configs = None
		self.enabled = []
		self.pkgs_loop = []
		self.ebuilds_loop = []
		self.final_loop = []
		self.modules_forced = ['ebuild', 'mtime']
		self.load_configs(valid_versions=valid_versions)
		for loop in ['pkgs', 'ebuilds', 'final']:
			logging.debug("ModuleConfig; Processing loop %s", loop)
			setattr(self, '%s_loop' % loop, self._determine_list(loop))
		self.linechecks = stack_lists(c['linechecks_modules'].split() for c in self._configs)
Пример #9
0
    def getUseMask(self, pkg=None):
        if pkg is None:
            return frozenset(stack_lists(self._usemask_list, incremental=True))

        cp = getattr(pkg, "cp", None)
        if cp is None:
            cp = cpv_getkey(remove_slot(pkg))
        usemask = []
        for i, pusemask_dict in enumerate(self._pusemask_list):
            if self._usemask_list[i]:
                usemask.append(self._usemask_list[i])
            cpdict = pusemask_dict.get(cp)
            if cpdict:
                pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
                if pkg_usemask:
                    usemask.extend(pkg_usemask)
        return frozenset(stack_lists(usemask, incremental=True))
Пример #10
0
	def getUseForce(self, pkg=None):
		if pkg is None:
			return frozenset(stack_lists(
				self._useforce_list, incremental=True))

		cp = getattr(pkg, "cp", None)
		if cp is None:
			cp = cpv_getkey(remove_slot(pkg))
		useforce = []
		for i, puseforce_dict in enumerate(self._puseforce_list):
			if self._useforce_list[i]:
				useforce.append(self._useforce_list[i])
			cpdict = puseforce_dict.get(cp)
			if cpdict:
				pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
				if pkg_useforce:
					useforce.extend(pkg_useforce)
		return frozenset(stack_lists(useforce, incremental=True))
Пример #11
0
	def testStackLists(self):
		
		tests = [ ( [ ['a','b','c'], ['d','e','f'] ], ['a','c','b','e','d','f'], False ),
			  ( [ ['a','x'], ['b','x'] ], ['a','x','b'], False ),
			  ( [ ['a','b','c'], ['-*'] ], [], True ),
			  ( [ ['a'], ['-a'] ], [], True ) ]

		for test in tests:
			result = stack_lists( test[0], test[2] )
			self.assertEqual( set(result) , set(test[1]) )
Пример #12
0
 def load(self):
     self._setAtoms(x for x in stack_lists([
         grabfile_package(os.path.join(y.location, "packages"),
                          verify_eapi=True,
                          eapi=y.eapi,
                          eapi_default=None,
                          allow_build_id=y.allow_build_id)
         for y in self._profiles if "profile-set" in y.profile_formats
     ],
                                           incremental=1) if x[:1] != "*")
Пример #13
0
    def testStackLists(self):

        tests = [([['a', 'b', 'c'],
                   ['d', 'e', 'f']], ['a', 'c', 'b', 'e', 'd', 'f'], False),
                 ([['a', 'x'], ['b', 'x']], ['a', 'x', 'b'], False),
                 ([['a', 'b', 'c'], ['-*']], [], True),
                 ([['a'], ['-a']], [], True)]

        for test in tests:
            result = stack_lists(test[0], test[2])
            self.assertEqual(result, test[1])
Пример #14
0
    def testStackLists(self):

        tests = [
            ([["a", "b", "c"], ["d", "e", "f"]], ["a", "c", "b", "e", "d", "f"], False),
            ([["a", "x"], ["b", "x"]], ["a", "x", "b"], False),
            ([["a", "b", "c"], ["-*"]], [], True),
            ([["a"], ["-a"]], [], True),
        ]

        for test in tests:
            result = stack_lists(test[0], test[2])
            self.assertEqual(result, test[1])
Пример #15
0
 def getKeywords(self, cpv, slot, keywords, repo):
     cp = cpv_getkey(cpv)
     pkg = "".join((cpv, _slot_separator, slot))
     if repo and repo != Package.UNKNOWN_REPO:
         pkg = "".join((pkg, _repo_separator, repo))
     keywords = [[x for x in keywords.split() if x != "-*"]]
     for pkeywords_dict in self._pkeywords_list:
         cpdict = pkeywords_dict.get(cp)
         if cpdict:
             pkg_keywords = ordered_by_atom_specificity(cpdict, pkg)
             if pkg_keywords:
                 keywords.extend(pkg_keywords)
     return stack_lists(keywords, incremental=True)
Пример #16
0
	def getKeywords(self, cpv, slot, keywords, repo):
		cp = cpv_getkey(cpv)
		pkg = "".join((cpv, _slot_separator, slot))
		if repo and repo != Package.UNKNOWN_REPO:
			pkg = "".join((pkg, _repo_separator, repo))
		keywords = [[x for x in keywords.split() if x != "-*"]]
		for pkeywords_dict in self._pkeywords_list:
			cpdict = pkeywords_dict.get(cp)
			if cpdict:
				pkg_keywords = ordered_by_atom_specificity(cpdict, pkg)
				if pkg_keywords:
					keywords.extend(pkg_keywords)
		return stack_lists(keywords, incremental=True)
Пример #17
0
    def testStackLists(self):

        tests = [
            ([["a", "b", "c"], ["d", "e",
                                "f"]], ["a", "c", "b", "e", "d", "f"], False),
            ([["a", "x"], ["b", "x"]], ["a", "x", "b"], False),
            ([["a", "b", "c"], ["-*"]], [], True),
            ([["a"], ["-a"]], [], True),
        ]

        for test in tests:
            result = stack_lists(test[0], test[2])
            self.assertEqual(set(result), set(test[1]))
Пример #18
0
    def __init__(self,
                 configpaths,
                 valid_versions=None,
                 repository_modules=False):
        """Module init

        @param configpaths: ordered list of filepaths to load
        """
        if repository_modules:
            self.configpaths = [
                os.path.join(path, "repository.yaml") for path in configpaths
            ]
        elif _not_installed:
            self.configpaths = [
                os.path.realpath(
                    os.path.join(
                        os.path.dirname(
                            os.path.dirname(
                                os.path.dirname(
                                    os.path.dirname(
                                        os.path.dirname(__file__))))),
                        "cnf/repository/repository.yaml",
                    ))
            ]
        else:
            self.configpaths = [
                os.path.join(
                    portage.const.EPREFIX or "/",
                    "usr/share/repoman/repository/repository.yaml",
                )
            ]
        logging.debug("ModuleConfig; configpaths: %s", self.configpaths)

        self.controller = Modules(path=MODULES_PATH,
                                  namepath="repoman.modules.scan")
        logging.debug("ModuleConfig; module_names: %s",
                      self.controller.module_names)

        self._configs = None
        self.enabled = []
        self.pkgs_loop = []
        self.ebuilds_loop = []
        self.final_loop = []
        self.modules_forced = ["ebuild", "mtime"]
        self.load_configs(valid_versions=valid_versions)
        for loop in ["pkgs", "ebuilds", "final"]:
            logging.debug("ModuleConfig; Processing loop %s", loop)
            setattr(self, "%s_loop" % loop, self._determine_list(loop))
        self.linechecks = stack_lists(c["linechecks_modules"].split()
                                      for c in self._configs)
Пример #19
0
 def getKeywords(self, cpv, slot, keywords, repo):
     try:
         cpv.slot
     except AttributeError:
         pkg = _pkg_str(cpv, slot=slot, repo=repo)
     else:
         pkg = cpv
     cp = pkg.cp
     keywords = [[x for x in keywords.split() if x != "-*"]]
     for pkeywords_dict in self._pkeywords_list:
         cpdict = pkeywords_dict.get(cp)
         if cpdict:
             pkg_keywords = ordered_by_atom_specificity(cpdict, pkg)
             if pkg_keywords:
                 keywords.extend(pkg_keywords)
     return stack_lists(keywords, incremental=True)
Пример #20
0
	def getKeywords(self, cpv, slot, keywords, repo):
		try:
			cpv.slot
		except AttributeError:
			pkg = _pkg_str(cpv, slot=slot, repo=repo)
		else:
			pkg = cpv
		cp = pkg.cp
		keywords = [[x for x in keywords.split() if x != "-*"]]
		for pkeywords_dict in self._pkeywords_list:
			cpdict = pkeywords_dict.get(cp)
			if cpdict:
				pkg_keywords = ordered_by_atom_specificity(cpdict, pkg)
				if pkg_keywords:
					keywords.extend(pkg_keywords)
		return stack_lists(keywords, incremental=True)
Пример #21
0
	def load(self):
		debug = self._debug
		if debug:
			writemsg_level("\nPackagesSystemSet: profile paths: %s\n" % \
				(self._profile_paths,), level=logging.DEBUG, noiselevel=-1)

		mylist = [grabfile_package(os.path.join(x, "packages")) for x in self._profile_paths]

		if debug:
			writemsg_level("\nPackagesSystemSet: raw packages: %s\n" % \
				(mylist,), level=logging.DEBUG, noiselevel=-1)

		mylist = stack_lists(mylist, incremental=1)

		if debug:
			writemsg_level("\nPackagesSystemSet: stacked packages: %s\n" % \
				(mylist,), level=logging.DEBUG, noiselevel=-1)

		self._setAtoms([x[1:] for x in mylist if x[0] == "*"])
Пример #22
0
	def load(self):
		debug = self._debug
		if debug:
			writemsg_level("\nPackagesSystemSet: profile paths: %s\n" % \
				(self._profile_paths,), level=logging.DEBUG, noiselevel=-1)

		mylist = [grabfile_package(os.path.join(x, "packages"), verify_eapi=True) for x in self._profile_paths]

		if debug:
			writemsg_level("\nPackagesSystemSet: raw packages: %s\n" % \
				(mylist,), level=logging.DEBUG, noiselevel=-1)

		mylist = stack_lists(mylist, incremental=1)

		if debug:
			writemsg_level("\nPackagesSystemSet: stacked packages: %s\n" % \
				(mylist,), level=logging.DEBUG, noiselevel=-1)

		self._setAtoms([x[1:] for x in mylist if x[0] == "*"])
Пример #23
0
	def _determine_list(self, loop):
		'''Determine the ordered list from the config data and
		the moule_runsIn value in the module_spec

		@returns: list of modules
		'''
		lists = [c['scan_modules'].split() for c in self._configs]
		stacked = self.modules_forced + stack_lists(lists)
		mlist = []
		try:
			for mod in stacked:
				logging.debug("ModuleConfig; checking loop %s, module: %s, in: %s",
					loop, mod, self.controller.get_spec(mod, 'module_runsIn'))
				if loop in self.controller.get_spec(mod, 'module_runsIn'):
					mlist.append(mod)
		except InvalidModuleName:
			logging.error("ModuleConfig; unknown module: %s, skipping", mod)

		logging.debug("ModuleConfig; mlist: %s", mlist)
		return mlist
Пример #24
0
	def load(self):
		debug = self._debug
		if debug:
			writemsg_level("\nPackagesSystemSet: profiles: %s\n" %
				(self._profiles,), level=logging.DEBUG, noiselevel=-1)

		mylist = [grabfile_package(os.path.join(x.location, "packages"),
			verify_eapi=True, eapi=x.eapi, eapi_default=None)
			for x in self._profiles]

		if debug:
			writemsg_level("\nPackagesSystemSet: raw packages: %s\n" % \
				(mylist,), level=logging.DEBUG, noiselevel=-1)

		mylist = stack_lists(mylist, incremental=1)

		if debug:
			writemsg_level("\nPackagesSystemSet: stacked packages: %s\n" % \
				(mylist,), level=logging.DEBUG, noiselevel=-1)

		self._setAtoms([x[1:] for x in mylist if x[0] == "*"])
Пример #25
0
    def getUseForce(self, pkg=None):
        if pkg is None:
            return frozenset(stack_lists(self._useforce_list,
                                         incremental=True))

        cp = getattr(pkg, "cp", None)
        if cp is None:
            slot = dep_getslot(pkg)
            repo = dep_getrepo(pkg)
            pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
            cp = pkg.cp

        try:
            stable = pkg.stable
        except AttributeError:
            # KEYWORDS is unavailable (prior to "depend" phase)
            stable = False

        useforce = []

        if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
            repos = []
            try:
                repos.extend(repo.name
                             for repo in self.repositories[pkg.repo].masters)
            except KeyError:
                pass
            repos.append(pkg.repo)
            for repo in repos:
                useforce.append(self._repo_useforce_dict.get(repo, {}))
                if stable:
                    useforce.append(
                        self._repo_usestableforce_dict.get(repo, {}))
                cpdict = self._repo_puseforce_dict.get(repo, {}).get(cp)
                if cpdict:
                    pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
                    if pkg_useforce:
                        useforce.extend(pkg_useforce)
                if stable:
                    cpdict = self._repo_pusestableforce_dict.get(repo,
                                                                 {}).get(cp)
                    if cpdict:
                        pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
                        if pkg_useforce:
                            useforce.extend(pkg_useforce)

        for i, puseforce_dict in enumerate(self._puseforce_list):
            if self._useforce_list[i]:
                useforce.append(self._useforce_list[i])
            if stable and self._usestableforce_list[i]:
                useforce.append(self._usestableforce_list[i])
            cpdict = puseforce_dict.get(cp)
            if cpdict:
                pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
                if pkg_useforce:
                    useforce.extend(pkg_useforce)
            if stable:
                cpdict = self._pusestableforce_list[i].get(cp)
                if cpdict:
                    pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
                    if pkg_useforce:
                        useforce.extend(pkg_useforce)

        return frozenset(stack_lists(useforce, incremental=True))
Пример #26
0
	def __init__(self, repositories, profiles, abs_user_config,
		user_config=True, strict_umatched_removal=False):
		self._punmaskdict = ExtendedAtomDict(list)
		self._pmaskdict = ExtendedAtomDict(list)
		# Preserves atoms that are eliminated by negative
		# incrementals in user_pkgmasklines.
		self._pmaskdict_raw = ExtendedAtomDict(list)

		#Read profile/package.mask from every repo.
		#Repositories inherit masks from their parent profiles and
		#are able to remove mask from them with -atoms.
		#Such a removal affects only the current repo, but not the parent.
		#Add ::repo specs to every atom to make sure atoms only affect
		#packages from the current repo.

		# Cache the repository-wide package.mask files as a particular
		# repo may be often referenced by others as the master.
		pmask_cache = {}

		def grab_pmask(loc, repo_config):
			if loc not in pmask_cache:
				path = os.path.join(loc, 'profiles', 'package.mask')
				pmask_cache[loc] = grabfile_package(path,
						recursive=repo_config.portage1_profiles,
						remember_source_file=True, verify_eapi=True,
						eapi_default=repo_config.eapi,
						allow_build_id=("build-id"
						in repo_config.profile_formats))
				if repo_config.portage1_profiles_compat and os.path.isdir(path):
					warnings.warn(_("Repository '%(repo_name)s' is implicitly using "
						"'portage-1' profile format in its profiles/package.mask, but "
						"the repository profiles are not marked as that format.  This will break "
						"in the future.  Please either convert the following paths "
						"to files, or add\nprofile-formats = portage-1\nto the "
						"repository's layout.conf.\n")
						% dict(repo_name=repo_config.name))

			return pmask_cache[loc]

		repo_pkgmasklines = []
		for repo in repositories.repos_with_profiles():
			lines = []
			repo_lines = grab_pmask(repo.location, repo)
			removals = frozenset(line[0][1:] for line in repo_lines
				if line[0][:1] == "-")
			matched_removals = set()
			for master in repo.masters:
				master_lines = grab_pmask(master.location, master)
				for line in master_lines:
					if line[0] in removals:
						matched_removals.add(line[0])
				# Since we don't stack masters recursively, there aren't any
				# atoms earlier in the stack to be matched by negative atoms in
				# master_lines. Also, repo_lines may contain negative atoms
				# that are intended to negate atoms from a different master
				# than the one with which we are currently stacking. Therefore,
				# we disable warn_for_unmatched_removal here (see bug #386569).
				lines.append(stack_lists([master_lines, repo_lines], incremental=1,
					remember_source_file=True, warn_for_unmatched_removal=False))

			# It's safe to warn for unmatched removal if masters have not
			# been overridden by the user, which is guaranteed when
			# user_config is false (when called by repoman).
			if repo.masters:
				unmatched_removals = removals.difference(matched_removals)
				if unmatched_removals and not user_config:
					source_file = os.path.join(repo.location,
						"profiles", "package.mask")
					unmatched_removals = list(unmatched_removals)
					if len(unmatched_removals) > 3:
						writemsg(
							_("--- Unmatched removal atoms in %s: %s and %s more\n") %
							(source_file,
							", ".join("-" + x for x in unmatched_removals[:3]),
							len(unmatched_removals) - 3), noiselevel=-1)
					else:
						writemsg(
							_("--- Unmatched removal atom(s) in %s: %s\n") %
							(source_file,
							", ".join("-" + x for x in unmatched_removals)),
							noiselevel=-1)

			else:
				lines.append(stack_lists([repo_lines], incremental=1,
					remember_source_file=True, warn_for_unmatched_removal=not user_config,
					strict_warn_for_unmatched_removal=strict_umatched_removal))
			repo_pkgmasklines.extend(append_repo(stack_lists(lines), repo.name, remember_source_file=True))

		repo_pkgunmasklines = []
		for repo in repositories.repos_with_profiles():
			if not repo.portage1_profiles:
				continue
			repo_lines = grabfile_package(os.path.join(repo.location, "profiles", "package.unmask"), \
				recursive=1, remember_source_file=True,
				verify_eapi=True, eapi_default=repo.eapi,
				allow_build_id=("build-id" in repo.profile_formats))
			lines = stack_lists([repo_lines], incremental=1, \
				remember_source_file=True, warn_for_unmatched_removal=True,
				strict_warn_for_unmatched_removal=strict_umatched_removal)
			repo_pkgunmasklines.extend(append_repo(lines, repo.name, remember_source_file=True))

		#Read package.mask from the user's profile. Stack them in the end
		#to allow profiles to override masks from their parent profiles.
		profile_pkgmasklines = []
		profile_pkgunmasklines = []
		for x in profiles:
			profile_pkgmasklines.append(grabfile_package(
				os.path.join(x.location, "package.mask"),
				recursive=x.portage1_directories,
				remember_source_file=True, verify_eapi=True,
				eapi=x.eapi, eapi_default=None,
				allow_build_id=x.allow_build_id))
			if x.portage1_directories:
				profile_pkgunmasklines.append(grabfile_package(
					os.path.join(x.location, "package.unmask"),
					recursive=x.portage1_directories,
					remember_source_file=True, verify_eapi=True,
					eapi=x.eapi, eapi_default=None,
					allow_build_id=x.allow_build_id))
		profile_pkgmasklines = stack_lists(profile_pkgmasklines, incremental=1, \
			remember_source_file=True, warn_for_unmatched_removal=True,
			strict_warn_for_unmatched_removal=strict_umatched_removal)
		profile_pkgunmasklines = stack_lists(profile_pkgunmasklines, incremental=1, \
			remember_source_file=True, warn_for_unmatched_removal=True,
			strict_warn_for_unmatched_removal=strict_umatched_removal)

		#Read /etc/portage/package.mask. Don't stack it to allow the user to
		#remove mask atoms from everywhere with -atoms.
		user_pkgmasklines = []
		user_pkgunmasklines = []
		if user_config:
			user_pkgmasklines = grabfile_package(
				os.path.join(abs_user_config, "package.mask"), recursive=1, \
				allow_wildcard=True, allow_repo=True,
				remember_source_file=True, verify_eapi=False,
				allow_build_id=True)
			user_pkgunmasklines = grabfile_package(
				os.path.join(abs_user_config, "package.unmask"), recursive=1, \
				allow_wildcard=True, allow_repo=True,
				remember_source_file=True, verify_eapi=False,
				allow_build_id=True)

		#Stack everything together. At this point, only user_pkgmasklines may contain -atoms.
		#Don't warn for unmatched -atoms here, since we don't do it for any other user config file.
		raw_pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
		pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines, user_pkgmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
		pkgunmasklines = stack_lists([repo_pkgunmasklines, profile_pkgunmasklines, user_pkgunmasklines], \
			incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)

		for x, source_file in raw_pkgmasklines:
			self._pmaskdict_raw.setdefault(x.cp, []).append(x)

		for x, source_file in pkgmasklines:
			self._pmaskdict.setdefault(x.cp, []).append(x)

		for x, source_file in pkgunmasklines:
			self._punmaskdict.setdefault(x.cp, []).append(x)

		for d in (self._pmaskdict_raw, self._pmaskdict, self._punmaskdict):
			for k, v in d.items():
				d[k] = tuple(v)
Пример #27
0
	def getUseForce(self, pkg=None):
		if pkg is None:
			return frozenset(stack_lists(
				self._useforce_list, incremental=True))

		cp = getattr(pkg, "cp", None)
		if cp is None:
			slot = dep_getslot(pkg)
			repo = dep_getrepo(pkg)
			pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
			cp = pkg.cp

		try:
			stable = pkg.stable
		except AttributeError:
			# KEYWORDS is unavailable (prior to "depend" phase)
			stable = False

		useforce = []

		if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
			repos = []
			try:
				repos.extend(repo.name for repo in
					self.repositories[pkg.repo].masters)
			except KeyError:
				pass
			repos.append(pkg.repo)
			for repo in repos:
				useforce.append(self._repo_useforce_dict.get(repo, {}))
				if stable:
					useforce.append(self._repo_usestableforce_dict.get(repo, {}))
				cpdict = self._repo_puseforce_dict.get(repo, {}).get(cp)
				if cpdict:
					pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
					if pkg_useforce:
						useforce.extend(pkg_useforce)
				if stable:
					cpdict = self._repo_pusestableforce_dict.get(repo, {}).get(cp)
					if cpdict:
						pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
						if pkg_useforce:
							useforce.extend(pkg_useforce)

		for i, puseforce_dict in enumerate(self._puseforce_list):
			if self._useforce_list[i]:
				useforce.append(self._useforce_list[i])
			if stable and self._usestableforce_list[i]:
				useforce.append(self._usestableforce_list[i])
			cpdict = puseforce_dict.get(cp)
			if cpdict:
				pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
				if pkg_useforce:
					useforce.extend(pkg_useforce)
			if stable:
				cpdict = self._pusestableforce_list[i].get(cp)
				if cpdict:
					pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
					if pkg_useforce:
						useforce.extend(pkg_useforce)

		return frozenset(stack_lists(useforce, incremental=True))
Пример #28
0
def scan_profile(path):
    return stack_lists([
        grabfile_package(os.path.join(x, path))
        for x in portage.settings.profiles
    ],
                       incremental=1)
Пример #29
0
    def __init__(self,
                 repositories,
                 profiles,
                 abs_user_config,
                 user_config=True,
                 strict_umatched_removal=False):
        self._punmaskdict = ExtendedAtomDict(list)
        self._pmaskdict = ExtendedAtomDict(list)
        # Preserves atoms that are eliminated by negative
        # incrementals in user_pkgmasklines.
        self._pmaskdict_raw = ExtendedAtomDict(list)

        #Read profile/package.mask from every repo.
        #Repositories inherit masks from their parent profiles and
        #are able to remove mask from them with -atoms.
        #Such a removal affects only the current repo, but not the parent.
        #Add ::repo specs to every atom to make sure atoms only affect
        #packages from the current repo.

        # Cache the repository-wide package.mask files as a particular
        # repo may be often referenced by others as the master.
        pmask_cache = {}

        def grab_pmask(loc, repo_config):
            if loc not in pmask_cache:
                path = os.path.join(loc, 'profiles', 'package.mask')
                pmask_cache[loc] = grabfile_package(
                    path,
                    recursive=repo_config.portage1_profiles,
                    remember_source_file=True,
                    verify_eapi=True)
                if repo_config.portage1_profiles_compat and os.path.isdir(
                        path):
                    warnings.warn(
                        _("Repository '%(repo_name)s' is implicitly using "
                          "'portage-1' profile format in its profiles/package.mask, but "
                          "the repository profiles are not marked as that format.  This will break "
                          "in the future.  Please either convert the following paths "
                          "to files, or add\nprofile-formats = portage-1\nto the "
                          "repositories layout.conf.\n") %
                        dict(repo_name=repo_config.name))

            return pmask_cache[loc]

        repo_pkgmasklines = []
        for repo in repositories.repos_with_profiles():
            lines = []
            repo_lines = grab_pmask(repo.location, repo)
            removals = frozenset(line[0][1:] for line in repo_lines
                                 if line[0][:1] == "-")
            matched_removals = set()
            for master in repo.masters:
                master_lines = grab_pmask(master.location, master)
                for line in master_lines:
                    if line[0] in removals:
                        matched_removals.add(line[0])
                # Since we don't stack masters recursively, there aren't any
                # atoms earlier in the stack to be matched by negative atoms in
                # master_lines. Also, repo_lines may contain negative atoms
                # that are intended to negate atoms from a different master
                # than the one with which we are currently stacking. Therefore,
                # we disable warn_for_unmatched_removal here (see bug #386569).
                lines.append(
                    stack_lists([master_lines, repo_lines],
                                incremental=1,
                                remember_source_file=True,
                                warn_for_unmatched_removal=False))

            # It's safe to warn for unmatched removal if masters have not
            # been overridden by the user, which is guaranteed when
            # user_config is false (when called by repoman).
            if repo.masters:
                unmatched_removals = removals.difference(matched_removals)
                if unmatched_removals and not user_config:
                    source_file = os.path.join(repo.location, "profiles",
                                               "package.mask")
                    unmatched_removals = list(unmatched_removals)
                    if len(unmatched_removals) > 3:
                        writemsg(_(
                            "--- Unmatched removal atoms in %s: %s and %s more\n"
                        ) % (source_file, ", ".join(
                            "-" + x for x in unmatched_removals[:3]),
                             len(unmatched_removals) - 3),
                                 noiselevel=-1)
                    else:
                        writemsg(
                            _("--- Unmatched removal atom(s) in %s: %s\n") %
                            (source_file, ", ".join(
                                "-" + x for x in unmatched_removals)),
                            noiselevel=-1)

            else:
                lines.append(
                    stack_lists([repo_lines],
                                incremental=1,
                                remember_source_file=True,
                                warn_for_unmatched_removal=not user_config,
                                strict_warn_for_unmatched_removal=
                                strict_umatched_removal))
            repo_pkgmasklines.extend(
                append_repo(stack_lists(lines),
                            repo.name,
                            remember_source_file=True))

        repo_pkgunmasklines = []
        for repo in repositories.repos_with_profiles():
            if not repo.portage1_profiles:
                continue
            repo_lines = grabfile_package(os.path.join(repo.location, "profiles", "package.unmask"), \
             recursive=1, remember_source_file=True, verify_eapi=True)
            lines = stack_lists([repo_lines], incremental=1, \
             remember_source_file=True, warn_for_unmatched_removal=True,
             strict_warn_for_unmatched_removal=strict_umatched_removal)
            repo_pkgunmasklines.extend(
                append_repo(lines, repo.name, remember_source_file=True))

        #Read package.mask from the user's profile. Stack them in the end
        #to allow profiles to override masks from their parent profiles.
        profile_pkgmasklines = []
        profile_pkgunmasklines = []
        for x in profiles:
            profile_pkgmasklines.append(
                grabfile_package(os.path.join(x.location, "package.mask"),
                                 recursive=x.portage1_directories,
                                 remember_source_file=True,
                                 verify_eapi=True))
            if x.portage1_directories:
                profile_pkgunmasklines.append(
                    grabfile_package(os.path.join(x.location,
                                                  "package.unmask"),
                                     recursive=x.portage1_directories,
                                     remember_source_file=True,
                                     verify_eapi=True))
        profile_pkgmasklines = stack_lists(profile_pkgmasklines, incremental=1, \
         remember_source_file=True, warn_for_unmatched_removal=True,
         strict_warn_for_unmatched_removal=strict_umatched_removal)
        profile_pkgunmasklines = stack_lists(profile_pkgunmasklines, incremental=1, \
         remember_source_file=True, warn_for_unmatched_removal=True,
         strict_warn_for_unmatched_removal=strict_umatched_removal)

        #Read /etc/portage/package.mask. Don't stack it to allow the user to
        #remove mask atoms from everywhere with -atoms.
        user_pkgmasklines = []
        user_pkgunmasklines = []
        if user_config:
            user_pkgmasklines = grabfile_package(
             os.path.join(abs_user_config, "package.mask"), recursive=1, \
             allow_wildcard=True, allow_repo=True, remember_source_file=True, verify_eapi=False)
            user_pkgunmasklines = grabfile_package(
             os.path.join(abs_user_config, "package.unmask"), recursive=1, \
             allow_wildcard=True, allow_repo=True, remember_source_file=True, verify_eapi=False)

        #Stack everything together. At this point, only user_pkgmasklines may contain -atoms.
        #Don't warn for unmatched -atoms here, since we don't do it for any other user config file.
        raw_pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines], \
         incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
        pkgmasklines = stack_lists([repo_pkgmasklines, profile_pkgmasklines, user_pkgmasklines], \
         incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)
        pkgunmasklines = stack_lists([repo_pkgunmasklines, profile_pkgunmasklines, user_pkgunmasklines], \
         incremental=1, remember_source_file=True, warn_for_unmatched_removal=False, ignore_repo=True)

        for x, source_file in raw_pkgmasklines:
            self._pmaskdict_raw.setdefault(x.cp, []).append(x)

        for x, source_file in pkgmasklines:
            self._pmaskdict.setdefault(x.cp, []).append(x)

        for x, source_file in pkgunmasklines:
            self._punmaskdict.setdefault(x.cp, []).append(x)

        for d in (self._pmaskdict_raw, self._pmaskdict, self._punmaskdict):
            for k, v in d.items():
                d[k] = tuple(v)
Пример #30
0
	def __init__(self, paths, settings):
		"""Load config from files in paths"""

		prepos = {}
		location_map = {}
		treemap = {}
		ignored_map = {}
		ignored_location_map = {}

		portdir = settings.get('PORTDIR', '')
		portdir_overlay = settings.get('PORTDIR_OVERLAY', '')

		self._parse(paths, prepos, ignored_map, ignored_location_map)

		# If PORTDIR_OVERLAY contains a repo with the same repo_name as
		# PORTDIR, then PORTDIR is overridden.
		portdir = self._add_overlays(portdir, portdir_overlay, prepos,
			ignored_map, ignored_location_map)
		if portdir and portdir.strip():
			portdir = os.path.realpath(portdir)

		ignored_repos = tuple((repo_name, tuple(paths)) \
			for repo_name, paths in ignored_map.items())

		self.missing_repo_names = frozenset(repo.location
			for repo in prepos.values()
			if repo.location is not None and repo.missing_repo_name)

		#Parse layout.conf and read masters key.
		for repo in prepos.values():
			if not repo.location:
				continue
			layout_filename = os.path.join(repo.location, "metadata", "layout.conf")
			layout_data, layout_errors = parse_layout_conf(repo.location, repo.name)

			# layout.conf masters may be overridden here if we have a masters
			# setting from the user's repos.conf
			if repo.masters is None:
				repo.masters = layout_data['masters']

			if layout_data['aliases']:
				aliases = repo.aliases
				if aliases is None:
					aliases = ()
				# repos.conf aliases come after layout.conf aliases, giving
				# them the ability to do incremental overrrides
				repo.aliases = layout_data['aliases'] + tuple(aliases)

			for value in ('allow-missing-manifest', 'cache-formats',
				'create-manifest', 'disable-manifest', 'manifest-hashes',
				'sign-manifest', 'thin-manifest', 'update-changelog'):
				setattr(repo, value.lower().replace("-", "_"), layout_data[value])

			repo.portage1_profiles = any(x.startswith("portage-1") \
				for x in layout_data['profile-formats'])
			repo.portage1_profiles_compat = layout_data['profile-formats'] == ('portage-1-compat',)

		#Take aliases into account.
		new_prepos = {}
		for repo_name, repo in prepos.items():
			names = set()
			names.add(repo_name)
			if repo.aliases:
				aliases = stack_lists([repo.aliases], incremental=True)
				names.update(aliases)

			for name in names:
				if name in new_prepos:
					writemsg_level(_("!!! Repository name or alias '%s', " + \
						"defined for repository '%s', overrides " + \
						"existing alias or repository.\n") % (name, repo_name), level=logging.WARNING, noiselevel=-1)
				new_prepos[name] = repo
		prepos = new_prepos

		for (name, r) in prepos.items():
			if r.location is not None:
				location_map[r.location] = name
				treemap[name] = r.location

		# filter duplicates from aliases, by only including
		# items where repo.name == key

		prepos_order = sorted(prepos.items(), key=lambda r:r[1].priority or 0)

		prepos_order = [repo.name for (key, repo) in prepos_order
			if repo.name == key and repo.location is not None]

		if portdir in location_map:
			portdir_repo = prepos[location_map[portdir]]
			portdir_sync = settings.get('SYNC', '')
			#if SYNC variable is set and not overwritten by repos.conf
			if portdir_sync and not portdir_repo.sync:
				portdir_repo.sync = portdir_sync

		if prepos['DEFAULT'].main_repo is None or \
			prepos['DEFAULT'].main_repo not in prepos:
			#setting main_repo if it was not set in repos.conf
			if portdir in location_map:
				prepos['DEFAULT'].main_repo = location_map[portdir]
			elif portdir in ignored_location_map:
				prepos['DEFAULT'].main_repo = ignored_location_map[portdir]
			else:
				prepos['DEFAULT'].main_repo = None
				writemsg(_("!!! main-repo not set in DEFAULT and PORTDIR is empty. \n"), noiselevel=-1)

		self.prepos = prepos
		self.prepos_order = prepos_order
		self.ignored_repos = ignored_repos
		self.location_map = location_map
		self.treemap = treemap
		self._prepos_changed = True
		self._repo_location_list = []

		#The 'masters' key currently contains repo names. Replace them with the matching RepoConfig.
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue
			if repo.masters is None:
				if self.mainRepo() and repo_name != self.mainRepo().name:
					repo.masters = self.mainRepo(),
				else:
					repo.masters = ()
			else:
				if repo.masters and isinstance(repo.masters[0], RepoConfig):
					# This one has already been processed
					# because it has an alias.
					continue
				master_repos = []
				for master_name in repo.masters:
					if master_name not in prepos:
						layout_filename = os.path.join(repo.user_location,
							"metadata", "layout.conf")
						writemsg_level(_("Unavailable repository '%s' " \
							"referenced by masters entry in '%s'\n") % \
							(master_name, layout_filename),
							level=logging.ERROR, noiselevel=-1)
					else:
						master_repos.append(prepos[master_name])
				repo.masters = tuple(master_repos)

		#The 'eclass_overrides' key currently contains repo names. Replace them with the matching repo paths.
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue

			eclass_locations = []
			eclass_locations.extend(master_repo.location for master_repo in repo.masters)
			eclass_locations.append(repo.location)

			if repo.eclass_overrides:
				for other_repo_name in repo.eclass_overrides:
					if other_repo_name in self.treemap:
						eclass_locations.append(self.get_location_for_name(other_repo_name))
					else:
						writemsg_level(_("Unavailable repository '%s' " \
							"referenced by eclass-overrides entry for " \
							"'%s'\n") % (other_repo_name, repo_name), \
							level=logging.ERROR, noiselevel=-1)
			repo.eclass_locations = tuple(eclass_locations)

		self._prepos_changed = True
		self._repo_location_list = []

		self._check_locations()
Пример #31
0
	def __init__(self, paths, settings):
		"""Load config from files in paths"""

		prepos = {}
		location_map = {}
		treemap = {}
		ignored_map = {}
		default_opts = {
			"EPREFIX" : settings["EPREFIX"],
			"EROOT" : settings["EROOT"],
			"PORTAGE_CONFIGROOT" : settings["PORTAGE_CONFIGROOT"],
			"ROOT" : settings["ROOT"],
		}

		if "PORTAGE_REPOSITORIES" in settings:
			portdir = ""
			portdir_overlay = ""
			# deprecated portdir_sync
			portdir_sync = ""
		else:
			portdir = settings.get("PORTDIR", "")
			portdir_overlay = settings.get("PORTDIR_OVERLAY", "")
			# deprecated portdir_sync
			portdir_sync = settings.get("SYNC", "")

		default_opts['sync-rsync-extra-opts'] = \
			settings.get("PORTAGE_RSYNC_EXTRA_OPTS", "")

		try:
			self._parse(paths, prepos, settings.local_config, default_opts)
		except ConfigParserError as e:
			writemsg(
				_("!!! Error while reading repo config file: %s\n") % e,
				noiselevel=-1)
			# The configparser state is unreliable (prone to quirky
			# exceptions) after it has thrown an error, so use empty
			# config and try to fall back to PORTDIR{,_OVERLAY}.
			prepos.clear()
			prepos['DEFAULT'] = RepoConfig('DEFAULT',
				{}, local_config=settings.local_config)
			location_map.clear()
			treemap.clear()

		default_portdir = os.path.join(os.sep,
			settings['EPREFIX'].lstrip(os.sep), 'usr', 'portage')

		# If PORTDIR_OVERLAY contains a repo with the same repo_name as
		# PORTDIR, then PORTDIR is overridden.
		portdir = self._add_repositories(portdir, portdir_overlay, prepos,
			ignored_map, settings.local_config,
			default_portdir)
		if portdir and portdir.strip():
			portdir = os.path.realpath(portdir)

		ignored_repos = tuple((repo_name, tuple(paths)) \
			for repo_name, paths in ignored_map.items())

		self.missing_repo_names = frozenset(repo.location
			for repo in prepos.values()
			if repo.location is not None and repo.missing_repo_name)

		# Do this before expanding aliases, so that location_map and
		# treemap consistently map unaliased names whenever available.
		for repo_name, repo in list(prepos.items()):
			if repo.location is None:
				if repo_name != 'DEFAULT':
					# Skip this warning for repoman (bug #474578).
					if settings.local_config and paths:
						writemsg_level("!!! %s\n" % _("Section '%s' in repos.conf is missing location attribute") %
							repo.name, level=logging.ERROR, noiselevel=-1)
					del prepos[repo_name]
					continue
			else:
				if not portage._sync_mode:
					if not isdir_raise_eaccess(repo.location):
						writemsg_level("!!! %s\n" % _("Section '%s' in repos.conf has location attribute set "
							"to nonexistent directory: '%s'") %
							(repo_name, repo.location), level=logging.ERROR, noiselevel=-1)

						# Ignore missing directory for 'gentoo' so that
						# first sync with emerge-webrsync is possible.
						if repo.name != 'gentoo':
							del prepos[repo_name]
							continue

					# After removing support for PORTDIR_OVERLAY, the following check can be:
					# if repo.missing_repo_name:
					if repo.missing_repo_name and repo.name != repo_name:
						writemsg_level("!!! %s\n" % _("Section '%s' in repos.conf refers to repository "
							"without repository name set in '%s'") %
							(repo_name, os.path.join(repo.location, REPO_NAME_LOC)), level=logging.ERROR, noiselevel=-1)
						del prepos[repo_name]
						continue

					if repo.name != repo_name:
						writemsg_level("!!! %s\n" % _("Section '%s' in repos.conf has name different "
							"from repository name '%s' set inside repository") %
							(repo_name, repo.name), level=logging.ERROR, noiselevel=-1)
						del prepos[repo_name]
						continue

				location_map[repo.location] = repo_name
				treemap[repo_name] = repo.location

		# Add alias mappings, but never replace unaliased mappings.
		for repo_name, repo in list(prepos.items()):
			names = set()
			names.add(repo_name)
			if repo.aliases:
				aliases = stack_lists([repo.aliases], incremental=True)
				names.update(aliases)

			for name in names:
				if name in prepos and prepos[name].location is not None:
					if name == repo_name:
						# unaliased names already handled earlier
						continue
					writemsg_level(_("!!! Repository name or alias '%s', " + \
						"defined for repository '%s', overrides " + \
						"existing alias or repository.\n") % (name, repo_name), level=logging.WARNING, noiselevel=-1)
					# Never replace an unaliased mapping with
					# an aliased mapping.
					continue
				prepos[name] = repo
				if repo.location is not None:
					if repo.location not in location_map:
						# Never replace an unaliased mapping with
						# an aliased mapping.
						location_map[repo.location] = name
					treemap[name] = repo.location

		main_repo = prepos['DEFAULT'].main_repo
		if main_repo is None or main_repo not in prepos:
			#setting main_repo if it was not set in repos.conf
			main_repo = location_map.get(portdir)
			if main_repo is not None:
				prepos['DEFAULT'].main_repo = main_repo
			else:
				prepos['DEFAULT'].main_repo = None
				if portdir and not portage._sync_mode:
					writemsg(_("!!! main-repo not set in DEFAULT and PORTDIR is empty.\n"), noiselevel=-1)

		if main_repo is not None and prepos[main_repo].priority is None:
			# This happens if main-repo has been set in repos.conf.
			prepos[main_repo].priority = -1000

		# DEPRECATED Backward compatible SYNC support for old mirrorselect.
		# Feb. 2, 2015.  Version 2.2.16
		if portdir_sync and main_repo is not None:
			writemsg(_("!!! SYNC setting found in make.conf.\n    "
				"This setting is Deprecated and no longer used.  "
				"Please ensure your 'sync-type' and 'sync-uri' are set correctly"
				" in /etc/portage/repos.conf/gentoo.conf\n"),
				noiselevel=-1)


		# Include repo.name in sort key, for predictable sorting
		# even when priorities are equal.
		prepos_order = sorted(prepos.items(),
			key=lambda r:(r[1].priority or 0, r[1].name))

		# filter duplicates from aliases, by only including
		# items where repo.name == key
		prepos_order = [repo.name for (key, repo) in prepos_order
			if repo.name == key and key != 'DEFAULT' and
			repo.location is not None]

		self.prepos = prepos
		self.prepos_order = prepos_order
		self.ignored_repos = ignored_repos
		self.location_map = location_map
		self.treemap = treemap
		self._prepos_changed = True
		self._repo_location_list = []

		#The 'masters' key currently contains repo names. Replace them with the matching RepoConfig.
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue
			if repo.masters is None:
				if self.mainRepo() and repo_name != self.mainRepo().name:
					repo.masters = self.mainRepo(),
				else:
					repo.masters = ()
			else:
				if repo.masters and isinstance(repo.masters[0], RepoConfig):
					# This one has already been processed
					# because it has an alias.
					continue
				master_repos = []
				for master_name in repo.masters:
					if master_name not in prepos:
						layout_filename = os.path.join(repo.location,
							"metadata", "layout.conf")
						writemsg_level(_("Unavailable repository '%s' " \
							"referenced by masters entry in '%s'\n") % \
							(master_name, layout_filename),
							level=logging.ERROR, noiselevel=-1)
					else:
						master_repos.append(prepos[master_name])
				repo.masters = tuple(master_repos)

		#The 'eclass_overrides' key currently contains repo names. Replace them with the matching repo paths.
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue

			eclass_locations = []
			eclass_locations.extend(master_repo.location for master_repo in repo.masters)
			# Only append the current repo to eclass_locations if it's not
			# there already. This allows masters to have more control over
			# eclass override order, which may be useful for scenarios in
			# which there is a plan to migrate eclasses to a master repo.
			if repo.location not in eclass_locations:
				eclass_locations.append(repo.location)

			if repo.eclass_overrides:
				for other_repo_name in repo.eclass_overrides:
					if other_repo_name in self.treemap:
						eclass_locations.append(self.get_location_for_name(other_repo_name))
					else:
						writemsg_level(_("Unavailable repository '%s' " \
							"referenced by eclass-overrides entry for " \
							"'%s'\n") % (other_repo_name, repo_name), \
							level=logging.ERROR, noiselevel=-1)
			repo.eclass_locations = tuple(eclass_locations)

		eclass_dbs = {}
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue

			eclass_db = None
			for eclass_location in repo.eclass_locations:
				tree_db = eclass_dbs.get(eclass_location)
				if tree_db is None:
					tree_db = eclass_cache.cache(eclass_location)
					eclass_dbs[eclass_location] = tree_db
				if eclass_db is None:
					eclass_db = tree_db.copy()
				else:
					eclass_db.append(tree_db)
			repo.eclass_db = eclass_db

		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue

			if repo._masters_orig is None and self.mainRepo() and \
				repo.name != self.mainRepo().name and not portage._sync_mode:
				# TODO: Delete masters code in pym/portage/tests/resolver/ResolverPlayground.py when deleting this warning.
				writemsg_level("!!! %s\n" % _("Repository '%s' is missing masters attribute in '%s'") %
					(repo.name, os.path.join(repo.location, "metadata", "layout.conf")) +
					"!!! %s\n" % _("Set 'masters = %s' in this file for future compatibility") %
					self.mainRepo().name, level=logging.WARNING, noiselevel=-1)

		self._prepos_changed = True
		self._repo_location_list = []

		self._check_locations()
Пример #32
0
    def __init__(self, paths, settings):
        """Load config from files in paths"""

        prepos = {}
        location_map = {}
        treemap = {}
        ignored_map = {}
        default_opts = {
            "EPREFIX": settings["EPREFIX"],
            "EROOT": settings["EROOT"],
            "PORTAGE_CONFIGROOT": settings["PORTAGE_CONFIGROOT"],
            "ROOT": settings["ROOT"],
        }

        if "PORTAGE_REPOSITORIES" in settings:
            portdir = ""
            portdir_overlay = ""
            # deprecated portdir_sync
            portdir_sync = ""
        else:
            portdir = settings.get("PORTDIR", "")
            portdir_overlay = settings.get("PORTDIR_OVERLAY", "")
            # deprecated portdir_sync
            portdir_sync = settings.get("SYNC", "")

        default_opts['sync-rsync-extra-opts'] = \
         settings.get("PORTAGE_RSYNC_EXTRA_OPTS", None)

        try:
            self._parse(paths, prepos, settings.local_config, default_opts)
        except ConfigParserError as e:
            writemsg(_("!!! Error while reading repo config file: %s\n") % e,
                     noiselevel=-1)
            # The configparser state is unreliable (prone to quirky
            # exceptions) after it has thrown an error, so use empty
            # config and try to fall back to PORTDIR{,_OVERLAY}.
            prepos.clear()
            prepos['DEFAULT'] = RepoConfig('DEFAULT', {},
                                           local_config=settings.local_config)
            location_map.clear()
            treemap.clear()

        default_portdir = os.path.join(os.sep,
                                       settings['EPREFIX'].lstrip(os.sep),
                                       'usr', 'portage')

        # If PORTDIR_OVERLAY contains a repo with the same repo_name as
        # PORTDIR, then PORTDIR is overridden.
        portdir = self._add_repositories(portdir, portdir_overlay, prepos,
                                         ignored_map, settings.local_config,
                                         default_portdir)
        if portdir and portdir.strip():
            portdir = os.path.realpath(portdir)

        ignored_repos = tuple((repo_name, tuple(paths)) \
         for repo_name, paths in ignored_map.items())

        self.missing_repo_names = frozenset(
            repo.location for repo in prepos.values()
            if repo.location is not None and repo.missing_repo_name)

        # Do this before expanding aliases, so that location_map and
        # treemap consistently map unaliased names whenever available.
        for repo_name, repo in list(prepos.items()):
            if repo.location is None:
                if repo_name != 'DEFAULT':
                    # Skip this warning for repoman (bug #474578).
                    if settings.local_config and paths:
                        writemsg_level("!!! %s\n" % _(
                            "Section '%s' in repos.conf is missing location attribute"
                        ) % repo.name,
                                       level=logging.ERROR,
                                       noiselevel=-1)
                    del prepos[repo_name]
                    continue
            else:
                if not portage._sync_mode:
                    if not isdir_raise_eaccess(repo.location):
                        writemsg_level("!!! %s\n" % _(
                            "Section '%s' in repos.conf has location attribute set "
                            "to nonexistent directory: '%s'") %
                                       (repo_name, repo.location),
                                       level=logging.ERROR,
                                       noiselevel=-1)

                        # Ignore missing directory for 'gentoo' so that
                        # first sync with emerge-webrsync is possible.
                        if repo.name != 'gentoo':
                            del prepos[repo_name]
                            continue

                    # After removing support for PORTDIR_OVERLAY, the following check can be:
                    # if repo.missing_repo_name:
                    if repo.missing_repo_name and repo.name != repo_name:
                        writemsg_level("!!! %s\n" % _(
                            "Section '%s' in repos.conf refers to repository "
                            "without repository name set in '%s'") %
                                       (repo_name,
                                        os.path.join(repo.location,
                                                     REPO_NAME_LOC)),
                                       level=logging.ERROR,
                                       noiselevel=-1)
                        del prepos[repo_name]
                        continue

                    if repo.name != repo_name:
                        writemsg_level("!!! %s\n" % _(
                            "Section '%s' in repos.conf has name different "
                            "from repository name '%s' set inside repository")
                                       % (repo_name, repo.name),
                                       level=logging.ERROR,
                                       noiselevel=-1)
                        del prepos[repo_name]
                        continue

                location_map[repo.location] = repo_name
                treemap[repo_name] = repo.location

        # Add alias mappings, but never replace unaliased mappings.
        for repo_name, repo in list(prepos.items()):
            names = set()
            names.add(repo_name)
            if repo.aliases:
                aliases = stack_lists([repo.aliases], incremental=True)
                names.update(aliases)

            for name in names:
                if name in prepos and prepos[name].location is not None:
                    if name == repo_name:
                        # unaliased names already handled earlier
                        continue
                    writemsg_level(_("!!! Repository name or alias '%s', " + \
                     "defined for repository '%s', overrides " + \
                     "existing alias or repository.\n") % (name, repo_name), level=logging.WARNING, noiselevel=-1)
                    # Never replace an unaliased mapping with
                    # an aliased mapping.
                    continue
                prepos[name] = repo
                if repo.location is not None:
                    if repo.location not in location_map:
                        # Never replace an unaliased mapping with
                        # an aliased mapping.
                        location_map[repo.location] = name
                    treemap[name] = repo.location

        main_repo = prepos['DEFAULT'].main_repo
        if main_repo is None or main_repo not in prepos:
            #setting main_repo if it was not set in repos.conf
            main_repo = location_map.get(portdir)
            if main_repo is not None:
                prepos['DEFAULT'].main_repo = main_repo
            else:
                prepos['DEFAULT'].main_repo = None
                if portdir and not portage._sync_mode:
                    writemsg(_(
                        "!!! main-repo not set in DEFAULT and PORTDIR is empty.\n"
                    ),
                             noiselevel=-1)

        if main_repo is not None and prepos[main_repo].priority is None:
            # This happens if main-repo has been set in repos.conf.
            prepos[main_repo].priority = -1000

        # DEPRECATED Backward compatible SYNC support for old mirrorselect.
        # Feb. 2, 2015.  Version 2.2.16
        if portdir_sync and main_repo is not None:
            writemsg(_(
                "!!! SYNC setting found in make.conf.\n    "
                "This setting is Deprecated and no longer used.  "
                "Please ensure your 'sync-type' and 'sync-uri' are set correctly"
                " in /etc/portage/repos.conf/gentoo.conf\n"),
                     noiselevel=-1)

        # Include repo.name in sort key, for predictable sorting
        # even when priorities are equal.
        prepos_order = sorted(prepos.items(),
                              key=lambda r: (r[1].priority or 0, r[1].name))

        # filter duplicates from aliases, by only including
        # items where repo.name == key
        prepos_order = [
            repo.name for (key, repo) in prepos_order if repo.name == key
            and key != 'DEFAULT' and repo.location is not None
        ]

        self.prepos = prepos
        self.prepos_order = prepos_order
        self.ignored_repos = ignored_repos
        self.location_map = location_map
        self.treemap = treemap
        self._prepos_changed = True
        self._repo_location_list = []

        #The 'masters' key currently contains repo names. Replace them with the matching RepoConfig.
        for repo_name, repo in prepos.items():
            if repo_name == "DEFAULT":
                continue
            if repo.masters is None:
                if self.mainRepo() and repo_name != self.mainRepo().name:
                    repo.masters = self.mainRepo(),
                else:
                    repo.masters = ()
            else:
                if repo.masters and isinstance(repo.masters[0], RepoConfig):
                    # This one has already been processed
                    # because it has an alias.
                    continue
                master_repos = []
                for master_name in repo.masters:
                    if master_name not in prepos:
                        layout_filename = os.path.join(repo.location,
                                                       "metadata",
                                                       "layout.conf")
                        writemsg_level(_("Unavailable repository '%s' " \
                         "referenced by masters entry in '%s'\n") % \
                         (master_name, layout_filename),
                         level=logging.ERROR, noiselevel=-1)
                    else:
                        master_repos.append(prepos[master_name])
                repo.masters = tuple(master_repos)

        #The 'eclass_overrides' key currently contains repo names. Replace them with the matching repo paths.
        for repo_name, repo in prepos.items():
            if repo_name == "DEFAULT":
                continue

            eclass_locations = []
            eclass_locations.extend(master_repo.location
                                    for master_repo in repo.masters)
            # Only append the current repo to eclass_locations if it's not
            # there already. This allows masters to have more control over
            # eclass override order, which may be useful for scenarios in
            # which there is a plan to migrate eclasses to a master repo.
            if repo.location not in eclass_locations:
                eclass_locations.append(repo.location)

            if repo.eclass_overrides:
                for other_repo_name in repo.eclass_overrides:
                    if other_repo_name in self.treemap:
                        eclass_locations.append(
                            self.get_location_for_name(other_repo_name))
                    else:
                        writemsg_level(_("Unavailable repository '%s' " \
                         "referenced by eclass-overrides entry for " \
                         "'%s'\n") % (other_repo_name, repo_name), \
                         level=logging.ERROR, noiselevel=-1)
            repo.eclass_locations = tuple(eclass_locations)

        eclass_dbs = {}
        for repo_name, repo in prepos.items():
            if repo_name == "DEFAULT":
                continue

            eclass_db = None
            for eclass_location in repo.eclass_locations:
                tree_db = eclass_dbs.get(eclass_location)
                if tree_db is None:
                    tree_db = eclass_cache.cache(eclass_location)
                    eclass_dbs[eclass_location] = tree_db
                if eclass_db is None:
                    eclass_db = tree_db.copy()
                else:
                    eclass_db.append(tree_db)
            repo.eclass_db = eclass_db

        for repo_name, repo in prepos.items():
            if repo_name == "DEFAULT":
                continue

            if repo._masters_orig is None and self.mainRepo() and \
             repo.name != self.mainRepo().name and not portage._sync_mode:
                # TODO: Delete masters code in pym/portage/tests/resolver/ResolverPlayground.py when deleting this warning.
                writemsg_level(
                    "!!! %s\n" %
                    _("Repository '%s' is missing masters attribute in '%s'") %
                    (repo.name,
                     os.path.join(repo.location, "metadata", "layout.conf")) +
                    "!!! %s\n" %
                    _("Set 'masters = %s' in this file for future compatibility"
                      ) % self.mainRepo().name,
                    level=logging.WARNING,
                    noiselevel=-1)

        self._prepos_changed = True
        self._repo_location_list = []

        self._check_locations()
Пример #33
0
	def __init__(self, paths, settings):
		"""Load config from files in paths"""

		prepos = {}
		location_map = {}
		treemap = {}
		ignored_map = {}
		ignored_location_map = {}

		portdir = settings.get('PORTDIR', '')
		portdir_overlay = settings.get('PORTDIR_OVERLAY', '')

		self._parse(paths, prepos, ignored_map, ignored_location_map)

		# If PORTDIR_OVERLAY contains a repo with the same repo_name as
		# PORTDIR, then PORTDIR is overridden.
		portdir = self._add_repositories(portdir, portdir_overlay, prepos,
			ignored_map, ignored_location_map)
		if portdir and portdir.strip():
			portdir = os.path.realpath(portdir)

		ignored_repos = tuple((repo_name, tuple(paths)) \
			for repo_name, paths in ignored_map.items())

		self.missing_repo_names = frozenset(repo.location
			for repo in prepos.values()
			if repo.location is not None and repo.missing_repo_name)

		#Take aliases into account.
		new_prepos = {}
		for repo_name, repo in prepos.items():
			names = set()
			names.add(repo_name)
			if repo.aliases:
				aliases = stack_lists([repo.aliases], incremental=True)
				names.update(aliases)

			for name in names:
				if name in new_prepos:
					writemsg_level(_("!!! Repository name or alias '%s', " + \
						"defined for repository '%s', overrides " + \
						"existing alias or repository.\n") % (name, repo_name), level=logging.WARNING, noiselevel=-1)
				new_prepos[name] = repo
		prepos = new_prepos

		for (name, r) in prepos.items():
			if r.location is not None:
				location_map[r.location] = name
				treemap[name] = r.location

		# filter duplicates from aliases, by only including
		# items where repo.name == key

		prepos_order = sorted(prepos.items(), key=lambda r:r[1].priority or 0)

		prepos_order = [repo.name for (key, repo) in prepos_order
			if repo.name == key and repo.location is not None]

		if prepos['DEFAULT'].main_repo is None or \
			prepos['DEFAULT'].main_repo not in prepos:
			#setting main_repo if it was not set in repos.conf
			if portdir in location_map:
				prepos['DEFAULT'].main_repo = location_map[portdir]
			elif portdir in ignored_location_map:
				prepos['DEFAULT'].main_repo = ignored_location_map[portdir]
			else:
				prepos['DEFAULT'].main_repo = None
				writemsg(_("!!! main-repo not set in DEFAULT and PORTDIR is empty. \n"), noiselevel=-1)

		self.prepos = prepos
		self.prepos_order = prepos_order
		self.ignored_repos = ignored_repos
		self.location_map = location_map
		self.treemap = treemap
		self._prepos_changed = True
		self._repo_location_list = []

		#The 'masters' key currently contains repo names. Replace them with the matching RepoConfig.
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue
			if repo.masters is None:
				if self.mainRepo() and repo_name != self.mainRepo().name:
					repo.masters = self.mainRepo(),
				else:
					repo.masters = ()
			else:
				if repo.masters and isinstance(repo.masters[0], RepoConfig):
					# This one has already been processed
					# because it has an alias.
					continue
				master_repos = []
				for master_name in repo.masters:
					if master_name not in prepos:
						layout_filename = os.path.join(repo.user_location,
							"metadata", "layout.conf")
						writemsg_level(_("Unavailable repository '%s' " \
							"referenced by masters entry in '%s'\n") % \
							(master_name, layout_filename),
							level=logging.ERROR, noiselevel=-1)
					else:
						master_repos.append(prepos[master_name])
				repo.masters = tuple(master_repos)

		#The 'eclass_overrides' key currently contains repo names. Replace them with the matching repo paths.
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue

			eclass_locations = []
			eclass_locations.extend(master_repo.location for master_repo in repo.masters)
			# Only append the current repo to eclass_locations if it's not
			# there already. This allows masters to have more control over
			# eclass override order, which may be useful for scenarios in
			# which there is a plan to migrate eclasses to a master repo.
			if repo.location not in eclass_locations:
				eclass_locations.append(repo.location)

			if repo.eclass_overrides:
				for other_repo_name in repo.eclass_overrides:
					if other_repo_name in self.treemap:
						eclass_locations.append(self.get_location_for_name(other_repo_name))
					else:
						writemsg_level(_("Unavailable repository '%s' " \
							"referenced by eclass-overrides entry for " \
							"'%s'\n") % (other_repo_name, repo_name), \
							level=logging.ERROR, noiselevel=-1)
			repo.eclass_locations = tuple(eclass_locations)

		eclass_dbs = {}
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue

			eclass_db = None
			for eclass_location in repo.eclass_locations:
				tree_db = eclass_dbs.get(eclass_location)
				if tree_db is None:
					tree_db = eclass_cache.cache(eclass_location)
					eclass_dbs[eclass_location] = tree_db
				if eclass_db is None:
					eclass_db = tree_db.copy()
				else:
					eclass_db.append(tree_db)
			repo.eclass_db = eclass_db

		self._prepos_changed = True
		self._repo_location_list = []

		self._check_locations()
Пример #34
0
	def __init__(self, paths, settings):
		"""Load config from files in paths"""

		prepos = {}
		location_map = {}
		treemap = {}
		ignored_map = {}
		ignored_location_map = {}

		portdir = settings.get('PORTDIR', '')
		portdir_overlay = settings.get('PORTDIR_OVERLAY', '')

		self._parse(paths, prepos, ignored_map, ignored_location_map)

		# If PORTDIR_OVERLAY contains a repo with the same repo_name as
		# PORTDIR, then PORTDIR is overridden.
		portdir = self._add_repositories(portdir, portdir_overlay, prepos,
			ignored_map, ignored_location_map)
		if portdir and portdir.strip():
			portdir = os.path.realpath(portdir)

		ignored_repos = tuple((repo_name, tuple(paths)) \
			for repo_name, paths in ignored_map.items())

		self.missing_repo_names = frozenset(repo.location
			for repo in prepos.values()
			if repo.location is not None and repo.missing_repo_name)

		#Take aliases into account.
		new_prepos = {}
		for repo_name, repo in prepos.items():
			names = set()
			names.add(repo_name)
			if repo.aliases:
				aliases = stack_lists([repo.aliases], incremental=True)
				names.update(aliases)

			for name in names:
				if name in new_prepos:
					writemsg_level(_("!!! Repository name or alias '%s', " + \
						"defined for repository '%s', overrides " + \
						"existing alias or repository.\n") % (name, repo_name), level=logging.WARNING, noiselevel=-1)
				new_prepos[name] = repo
		prepos = new_prepos

		for (name, r) in prepos.items():
			if r.location is not None:
				location_map[r.location] = name
				treemap[name] = r.location

		# filter duplicates from aliases, by only including
		# items where repo.name == key

		prepos_order = sorted(prepos.items(), key=lambda r:r[1].priority or 0)

		prepos_order = [repo.name for (key, repo) in prepos_order
			if repo.name == key and repo.location is not None]

		if prepos['DEFAULT'].main_repo is None or \
			prepos['DEFAULT'].main_repo not in prepos:
			#setting main_repo if it was not set in repos.conf
			if portdir in location_map:
				prepos['DEFAULT'].main_repo = location_map[portdir]
			elif portdir in ignored_location_map:
				prepos['DEFAULT'].main_repo = ignored_location_map[portdir]
			else:
				prepos['DEFAULT'].main_repo = None
				if not portage._sync_disabled_warnings:
					writemsg(_("Portage repository is currently empty.\n"), noiselevel=-1)

		self.prepos = prepos
		self.prepos_order = prepos_order
		self.ignored_repos = ignored_repos
		self.location_map = location_map
		self.treemap = treemap
		self._prepos_changed = True
		self._repo_location_list = []

		#The 'masters' key currently contains repo names. Replace them with the matching RepoConfig.
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue
			if repo.masters is None:
				if self.mainRepo() and repo_name != self.mainRepo().name:
					repo.masters = self.mainRepo(),
				else:
					repo.masters = ()
			else:
				if repo.masters and isinstance(repo.masters[0], RepoConfig):
					# This one has already been processed
					# because it has an alias.
					continue
				master_repos = []
				for master_name in repo.masters:
					if master_name not in prepos:
						layout_filename = os.path.join(repo.user_location,
							"metadata", "layout.conf")
						writemsg_level(_("Unavailable repository '%s' " \
							"referenced by masters entry in '%s'\n") % \
							(master_name, layout_filename),
							level=logging.ERROR, noiselevel=0)
					else:
						master_repos.append(prepos[master_name])
				repo.masters = tuple(master_repos)

		#The 'eclass_overrides' key currently contains repo names. Replace them with the matching repo paths.
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue

			eclass_locations = []
			eclass_locations.extend(master_repo.location for master_repo in repo.masters)
			# Only append the current repo to eclass_locations if it's not
			# there already. This allows masters to have more control over
			# eclass override order, which may be useful for scenarios in
			# which there is a plan to migrate eclasses to a master repo.
			if repo.location not in eclass_locations:
				eclass_locations.append(repo.location)

			if repo.eclass_overrides:
				for other_repo_name in repo.eclass_overrides:
					if other_repo_name in self.treemap:
						eclass_locations.append(self.get_location_for_name(other_repo_name))
					else:
						writemsg_level(_("Unavailable repository '%s' " \
							"referenced by eclass-overrides entry for " \
							"'%s'\n") % (other_repo_name, repo_name), \
							level=logging.ERROR, noiselevel=0)
			repo.eclass_locations = tuple(eclass_locations)

		eclass_dbs = {}
		for repo_name, repo in prepos.items():
			if repo_name == "DEFAULT":
				continue

			eclass_db = None
			for eclass_location in repo.eclass_locations:
				tree_db = eclass_dbs.get(eclass_location)
				if tree_db is None:
					tree_db = eclass_cache.cache(eclass_location)
					eclass_dbs[eclass_location] = tree_db
				if eclass_db is None:
					eclass_db = tree_db.copy()
				else:
					eclass_db.append(tree_db)
			repo.eclass_db = eclass_db

		self._prepos_changed = True
		self._repo_location_list = []

		self._check_locations()