def setup_repos(namespace, attr): # Get repo(s) to operate on. if namespace.repo: # The store repo machinery handles --raw and --unfiltered for # us, thus it being the first check. repos = [namespace.repo] elif (namespace.contents or namespace.size or namespace._owns or namespace._owns_re or namespace.installed): repos = namespace.domain.installed_repos elif namespace.unfiltered: if namespace.all_repos: repos = list(namespace.domain.installed_repos) repos.extend(namespace.domain.unfiltered_repos) elif namespace.ebuild_repos: repos = namespace.domain.ebuild_repos_raw elif namespace.binary_repos: repos = namespace.domain.binary_repos_raw else: repos = namespace.domain.unfiltered_repos elif namespace.all_repos: repos = namespace.domain.repos elif namespace.ebuild_repos: repos = namespace.domain.ebuild_repos elif namespace.binary_repos: repos = namespace.domain.binary_repos else: repos = namespace.domain.source_repos if namespace.raw or namespace.virtuals: repos = get_raw_repos(repos) if namespace.virtuals: repos = get_virtual_repos(repos, namespace.virtuals == 'only') setattr(namespace, attr, repos)
def setup_repos(namespace, attr): # Get repo(s) to operate on. if namespace.repo: # The store repo machinery handles --raw and --unfiltered for # us, thus it being the first check. repos = [namespace.repo] elif (namespace.contents or namespace.size or namespace._owns or namespace._owns_re or namespace.installed): repos = namespace.domain.installed_repos elif namespace.unfiltered: if namespace.all_repos: repos = list(namespace.domain.installed_repos) repos.extend(namespace.domain.unfiltered_repos) elif namespace.ebuild_repos: repos = namespace.domain.ebuild_repos_raw elif namespace.binary_repos: repos = namespace.domain.binary_repos_raw else: repos = namespace.domain.unfiltered_repos elif namespace.all_repos: repos = namespace.domain.repos elif namespace.ebuild_repos: repos = namespace.domain.ebuild_repos elif namespace.binary_repos: repos = namespace.domain.binary_repos else: repos = namespace.domain.source_repos if namespace.raw or namespace.virtuals: repos = get_raw_repos(repos) if namespace.virtuals: repos = get_virtual_repos( repos, namespace.virtuals == 'only') setattr(namespace, attr, repos)
def check_args(parser, namespace): # Get repo(s) to operate on. if namespace.repo: repos = (namespace.repo,) else: repos = namespace.domain.repos namespace.repos = get_virtual_repos(get_raw_repos(repos), False)
def _dist_validate_args(parser, namespace): distdir = namespace.domain.fetcher.distdir repo = multiplex.tree(*get_virtual_repos(namespace.domain.repos, False)) all_dist_files = set(os.path.basename(f) for f in listdir_files(distdir)) target_files = set() if namespace.restrict: for pkg in repo.itermatch(namespace.restrict, sorter=sorted): if ((namespace.installed and pkg.versioned_atom in namespace.livefs_repo) or (namespace.fetch_restricted and 'fetch' in pkg.restrict)): continue try: target_files.update(fetchable.filename for fetchable in iflatten_instance( pkg.fetchables, fetch.fetchable)) except errors.MetadataException as e: if not namespace.ignore_failures: dist.error("got corruption error '%s', with package %s " % (e, pkg.cpvstr)) except Exception as e: dist.error("got error '%s', parsing package %s in repo '%s'" % (e, pkg.cpvstr, pkg.repo)) else: target_files = all_dist_files targets = (pjoin(distdir, f) for f in sorted(all_dist_files.intersection(target_files))) removal_func = partial(os.remove) namespace.remove = ((removal_func, f) for f in ifilter(namespace.filters.run, targets))
def __init__(self, src): """ :param src: where to get the glsa from :type src: must be either full path to glsa dir, or a repo object to pull it from """ if not isinstance(src, str): src = tuple( sorted( filter(os.path.isdir, (pjoin(repo.base, 'metadata', 'glsa') for repo in get_virtual_repos(src, False) if hasattr(repo, 'base'))))) else: src = [src] self.paths = src
def _dist_validate_args(parser, namespace): distdir = namespace.domain.fetcher.distdir repo = namespace.repo if repo is None: repo = multiplex.tree( *get_virtual_repos(namespace.domain.source_repos, False)) all_dist_files = set(os.path.basename(f) for f in listdir_files(distdir)) target_files = set() installed_dist = set() exists_dist = set() excludes_dist = set() restricted_dist = set() # exclude distfiles used by installed packages -- note that this uses the # distfiles attr with USE settings bound to it if namespace.exclude_installed: for pkg in namespace.domain.all_installed_repos: installed_dist.update(iflatten_instance(pkg.distfiles)) # exclude distfiles for existing ebuilds or fetch restrictions if namespace.exclude_fetch_restricted or (namespace.exclude_exists and not namespace.restrict): for pkg in repo: exists_dist.update( iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles)) if 'fetch' in pkg.restrict: restricted_dist.update( iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles)) # exclude distfiles from specified restrictions if namespace.exclude_restrict: for pkg in repo.itermatch(namespace.exclude_restrict, sorter=sorted): excludes_dist.update( iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles)) # determine dist files for custom restrict targets if namespace.restrict: target_dist = defaultdict(lambda: defaultdict(set)) for pkg in repo.itermatch(namespace.restrict, sorter=sorted): s = set(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles)) target_dist[pkg.unversioned_atom][pkg].update(s) if namespace.exclude_exists: exists_dist.update(s) extra_regex_prefixes = defaultdict(set) pkg_regex_prefixes = set() for catpn, pkgs in target_dist.items(): pn_regex = r'\W'.join(re.split(r'\W', catpn.package)) pkg_regex = re.compile( r'(%s)(\W\w+)+([\W?(0-9)+])*(\W\w+)*(\.\w+)*' % pn_regex, re.IGNORECASE) pkg_regex_prefixes.add(pn_regex) for pkg, files in pkgs.items(): files = sorted(files) for f in files: if (pkg_regex.match(f) or (extra_regex_prefixes and re.match( r'(%s)([\W?(0-9)+])+(\W\w+)*(\.\w+)+' % '|'.join(extra_regex_prefixes[catpn]), f))): continue else: pieces = re.split(r'([\W?(0-9)+])+(\W\w+)*(\.\w+)+', f) if pieces[-1] == '': pieces.pop() if len(pieces) > 1: extra_regex_prefixes[catpn].add(pieces[0]) if target_dist: regexes = [] # build regexes to match distfiles for older ebuilds no longer in the tree if pkg_regex_prefixes: pkg_regex_prefixes_str = '|'.join(sorted(pkg_regex_prefixes)) regexes.append( re.compile(r'(%s)(\W\w+)+([\W?(0-9)+])*(\W\w+)*(\.\w+)*' % (pkg_regex_prefixes_str, ))) if extra_regex_prefixes: extra_regex_prefixes_str = '|'.join( sorted( chain.from_iterable( v for k, v in extra_regex_prefixes.items()))) regexes.append( re.compile(r'(%s)([\W?(0-9)+])+(\W\w+)*(\.\w+)+' % (extra_regex_prefixes_str, ))) if regexes: for f in all_dist_files: if any(r.match(f) for r in regexes): target_files.add(f) else: target_files = all_dist_files # exclude files tagged for saving saving_files = installed_dist | exists_dist | excludes_dist | restricted_dist target_files.difference_update(saving_files) targets = (pjoin(distdir, f) for f in sorted(all_dist_files.intersection(target_files))) removal_func = partial(os.remove) namespace.remove = ((removal_func, f) for f in filter(namespace.file_filters.run, targets))
def _dist_validate_args(parser, namespace): distdir = namespace.domain.fetcher.distdir repo = namespace.repo if repo is None: repo = multiplex.tree(*get_virtual_repos(namespace.domain.source_repos, False)) all_dist_files = set(os.path.basename(f) for f in listdir_files(distdir)) target_files = set() installed_dist = set() exists_dist = set() excludes_dist = set() restricted_dist = set() # exclude distfiles used by installed packages -- note that this uses the # distfiles attr with USE settings bound to it if namespace.exclude_installed: for pkg in namespace.domain.all_installed_repos: installed_dist.update(iflatten_instance(pkg.distfiles)) # exclude distfiles for existing ebuilds or fetch restrictions if namespace.exclude_fetch_restricted or (namespace.exclude_exists and not namespace.restrict): for pkg in repo: exists_dist.update(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles)) if 'fetch' in pkg.restrict: restricted_dist.update(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles)) # exclude distfiles from specified restrictions if namespace.exclude_restrict: for pkg in repo.itermatch(namespace.exclude_restrict, sorter=sorted): excludes_dist.update(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles)) # determine dist files for custom restrict targets if namespace.restrict: target_dist = defaultdict(lambda: defaultdict(set)) for pkg in repo.itermatch(namespace.restrict, sorter=sorted): s = set(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles)) target_dist[pkg.unversioned_atom][pkg].update(s) if namespace.exclude_exists: exists_dist.update(s) extra_regex_prefixes = defaultdict(set) pkg_regex_prefixes = set() for catpn, pkgs in target_dist.items(): pn_regex = r'\W'.join(re.split(r'\W', catpn.package)) pkg_regex = re.compile(r'(%s)(\W\w+)+([\W?(0-9)+])*(\W\w+)*(\.\w+)*' % pn_regex, re.IGNORECASE) pkg_regex_prefixes.add(pn_regex) for pkg, files in pkgs.items(): files = sorted(files) for f in files: if (pkg_regex.match(f) or ( extra_regex_prefixes and re.match(r'(%s)([\W?(0-9)+])+(\W\w+)*(\.\w+)+' % '|'.join(extra_regex_prefixes[catpn]), f))): continue else: pieces = re.split(r'([\W?(0-9)+])+(\W\w+)*(\.\w+)+', f) if pieces[-1] == '': pieces.pop() if len(pieces) > 1: extra_regex_prefixes[catpn].add(pieces[0]) if target_dist: regexes = [] # build regexes to match distfiles for older ebuilds no longer in the tree if pkg_regex_prefixes: pkg_regex_prefixes_str = '|'.join(sorted(pkg_regex_prefixes)) regexes.append(re.compile(r'(%s)(\W\w+)+([\W?(0-9)+])*(\W\w+)*(\.\w+)*' % ( pkg_regex_prefixes_str,))) if extra_regex_prefixes: extra_regex_prefixes_str = '|'.join(sorted(chain.from_iterable( v for k, v in extra_regex_prefixes.items()))) regexes.append(re.compile(r'(%s)([\W?(0-9)+])+(\W\w+)*(\.\w+)+' % ( extra_regex_prefixes_str,))) if regexes: for f in all_dist_files: if any(r.match(f) for r in regexes): target_files.add(f) else: target_files = all_dist_files # exclude files tagged for saving saving_files = installed_dist | exists_dist | excludes_dist | restricted_dist target_files.difference_update(saving_files) targets = (pjoin(distdir, f) for f in sorted(all_dist_files.intersection(target_files))) removal_func = partial(os.remove) namespace.remove = ( (removal_func, f) for f in filter(namespace.file_filters.run, targets))