def main(): base = yum.YumBase() conf = base.doConfigSetup() base.setCacheDir(force=True) base.doLock() storage = base.doRepoSetup() myrepos = [] repo = sys.argv[1] myrepos.extend(base.repos.findRepos(repo)) for repo in base.repos.repos.values(): repo.disable() for repo in myrepos: repo.enable() arches = rpmUtils.arch.getArchList('x86_64') arches.extend(rpmUtils.arch.getArchList('ppc64')) base.doSackSetup(arches) for repo in base.repos.listEnabled(): reposack = ListPackageSack(base.pkgSack.returnPackages(repoid=repo.id)) download_list = reposack.returnNewestByNameArch() pkg_list = [pkg.relativepath for pkg in download_list] pkg_list.sort() for pkg in pkg_list: print(pkg) base.closeRpmDB()
def __whatProvides(self, name, flags, version): # pylint: disable=W0702 try: return ListPackageSack(self._repostore.pkgSack.searchProvides((name, flags, version))) except: #perhaps we're on older version of yum try old style return ListPackageSack(self._repostore.pkgSack.searchProvides(name))
def list_packages(self, filters, latest): """ list packages""" try: self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) except yum.Errors.RepoError as e: if "No more mirrors" in str(e): reqFile = re.search('failure:\s+(.+)\s+from', str(e)).groups()[0] raise ChannelTimeoutException( "Retrieving '%s' failed: File not found in repository '%s'" % (reqFile, self.repo)) else: raise pkglist = ListPackageSack(self.repo.getPackageSack().returnPackages()) self.num_packages = len(pkglist) if latest: pkglist = pkglist.returnNewestByNameArch() pkglist = yum.misc.unique(pkglist) pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database # check repository config file for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) filters = self._expand_package_groups(filters) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies( self.repo.getPackageSack(), pkglist) # do not pull in dependencies if they're explicitly excluded pkglist = self._filter_packages(pkglist, filters, True) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksums[0][0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.checksums[0][1] for cs in pack.checksums: new_pack.checksums[cs[0]] = cs[1] to_return.append(new_pack) return to_return
def list_packages(self, filters, latest): """ list packages""" self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) pkglist = ListPackageSack(self.repo.getPackageSack().returnPackages()) self.num_packages = len(pkglist) if latest: pkglist = pkglist.returnNewestByNameArch() pkglist = yum.misc.unique(pkglist) pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database # check repository config file for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) filters = self._expand_package_groups(filters) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies( self.repo.getPackageSack(), pkglist) # do not pull in dependencies if they're explicitly excluded pkglist = self._filter_packages(pkglist, filters, True) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: if pack.arch == 'src': continue new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksums[0][0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.checksums[0][1] to_return.append(new_pack) return to_return
def list_packages(self, filters, latest): """ list packages""" self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) pkglist = ListPackageSack(self.repo.getPackageSack().returnPackages()) self.num_packages = len(pkglist) if latest: pkglist = pkglist.returnNewestByNameArch() pkglist = yum.misc.unique(pkglist) pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database # check repository config file for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) filters = self._expand_package_groups(filters) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies(self.repo.getPackageSack(), pkglist) # do not pull in dependencies if they're explicitly excluded pkglist = self._filter_packages(pkglist, filters, True) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: if pack.arch == 'src': continue new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksums[0][0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.checksums[0][1] to_return.append(new_pack) return to_return
def getBrokenDeps(self, newest=False): unresolved = {} resolved = {} pkgs = self.pkgSack if newest: pkgs = self.pkgSack.returnNewestByNameArch() mypkgSack = ListPackageSack(pkgs) pkgtuplist = mypkgSack.simplePkgList() # toss out any of the obsoleted pkgs so we can't depsolve with them self.up = rpmUtils.updates.Updates([], pkgtuplist) self.up.rawobsoletes = mypkgSack.returnObsoletes() for pkg in pkgs: fo = self.up.checkForObsolete([pkg.pkgtup]) if fo: # useful debug to make sure the obsoletes is sane #print "ignoring obsolete pkg %s" % pkg #for i in fo[pkg.pkgtup]: # print i self.pkgSack.delPackage(pkg) # we've deleted items so remake the pkgs pkgs = self.pkgSack.returnNewestByNameArch() pkgtuplist = mypkgSack.simplePkgList() if self.builddeps: pkgs = filter(lambda x: x.arch == 'src', pkgs) pkglist = self.pkgonly if self.grouponly: if not pkglist: pkglist = [] for group in self.grouponly: groupobj = self.comps.return_group(group) if not groupobj: continue pkglist.extend(groupobj.packages) if pkglist: pkgs = filter(lambda x: x.name in pkglist, pkgs) for pkg in pkgs: if pkg.repoid in self.lookaside: # don't attempt to resolve dependency issues for # packages from lookaside repositories continue for (req, flags, (reqe, reqv, reqr)) in pkg.returnPrco('requires'): if req.startswith('rpmlib'): continue # ignore rpmlib deps ver = self.evrTupletoVer((reqe, reqv, reqr)) if (req, flags, ver) in resolved: continue resolve_sack = [] # make it empty try: resolve_sack = self.whatProvides(req, flags, ver) except yum.Errors.RepoError, e: pass if len(resolve_sack) < 1: if pkg not in unresolved: unresolved[pkg] = [] unresolved[pkg].append((req, flags, ver)) continue if newest: resolved_by_newest = False for po in resolve_sack: # look through and make sure all our answers are newest-only if po.pkgtup in pkgtuplist: resolved_by_newest = True break if resolved_by_newest: resolved[(req, flags, ver)] = 1 else: if pkg not in unresolved: unresolved[pkg] = [] unresolved[pkg].append((req, flags, ver))
my.repos.setProgressBar(TextMeter(fo=sys.stdout)) my.doRpmDBSetup() my.doRepoSetup() try: arches = rpmUtils.arch.getArchList(opts.arch) if opts.source: arches += ['src'] my.doSackSetup(arches) except yum.Errors.RepoError, e: print >> sys.stderr, "Error setting up repositories: %s" % e # maybe this shouldn't be entirely fatal sys.exit(1) for repo in my.repos.listEnabled(): reposack = ListPackageSack(my.pkgSack.returnPackages(repoid=repo.id)) if opts.newest: download_list = reposack.returnNewestByNameArch() else: download_list = list(reposack) if opts.norepopath: local_repo_path = opts.destdir else: local_repo_path = opts.destdir + '/' + repo.id if opts.delete and os.path.exists(local_repo_path): current_pkgs = localpkgs(local_repo_path) download_set = {}
def getBrokenDeps(self, newest=False): unresolved = {} resolved = {} pkgs = self.pkgSack if newest: pkgs = self.pkgSack.returnNewestByNameArch() mypkgSack = ListPackageSack(pkgs) pkgtuplist = mypkgSack.simplePkgList() # toss out any of the obsoleted pkgs so we can't depsolve with them self.up = rpmUtils.updates.Updates([], pkgtuplist) self.up.rawobsoletes = mypkgSack.returnObsoletes() for pkg in pkgs: fo = self.up.checkForObsolete([pkg.pkgtup]) if fo: # useful debug to make sure the obsoletes is sane #print "ignoring obsolete pkg %s" % pkg #for i in fo[pkg.pkgtup]: # print i self.pkgSack.delPackage(pkg) # we've deleted items so remake the pkgs pkgs = self.pkgSack.returnNewestByNameArch() pkgtuplist = mypkgSack.simplePkgList() if self.builddeps: pkgs = filter(lambda x: x.arch == 'src', pkgs) pkglist = self.pkgonly if self.grouponly: if not pkglist: pkglist = [] for group in self.grouponly: groupobj = self.comps.return_group(group) if not groupobj: continue pkglist.extend(groupobj.packages) if pkglist: pkgs = filter(lambda x: x.name in pkglist, pkgs) for pkg in pkgs: if pkg.repoid in self.lookaside: # don't attempt to resolve dependancy issues for # packages from lookaside repositories continue for (req, flags, (reqe, reqv, reqr)) in pkg.returnPrco('requires'): if req.startswith('rpmlib'): continue # ignore rpmlib deps ver = self.evrTupletoVer((reqe, reqv, reqr)) if (req,flags,ver) in resolved: continue resolve_sack = [] # make it empty try: resolve_sack = self.whatProvides(req, flags, ver) except yum.Errors.RepoError, e: pass if len(resolve_sack) < 1: if pkg not in unresolved: unresolved[pkg] = [] unresolved[pkg].append((req, flags, ver)) continue if newest: resolved_by_newest = False for po in resolve_sack:# look through and make sure all our answers are newest-only if po.pkgtup in pkgtuplist: resolved_by_newest = True break if resolved_by_newest: resolved[(req,flags,ver)] = 1 else: if pkg not in unresolved: unresolved[pkg] = [] unresolved[pkg].append((req, flags, ver))
"Error: Can't use --norepopath with multiple repositories") sys.exit(1) try: arches = rpmUtils.arch.getArchList(opts.arch) if opts.source: arches += ['src'] my.doSackSetup(arches) except yum.Errors.RepoError, e: print >> sys.stderr, _("Error setting up repositories: %s") % e # maybe this shouldn't be entirely fatal sys.exit(1) exit_code = 0 for repo in my.repos.listEnabled(): reposack = ListPackageSack(my.pkgSack.returnPackages(repoid=repo.id)) if opts.newest: download_list = reposack.returnNewestByNameArch() else: download_list = list(reposack) if opts.norepopath: local_repo_path = opts.destdir else: local_repo_path = opts.destdir + '/' + repo.id # Ensure we don't traverse out of local_repo_path by dropping any # packages whose remote_path is absolute or contains up-level # references (unless explicitly allowed). # See RHBZ#1600221 for details.
my._getSacks(archlist=archlist, thisrepo=repo.id) my.doRepoSetup() my._getSacks(archlist=archlist) unprocessed_pkgs = {} final_pkgs = {} pkg_list = [] avail = my.pkgSack.returnPackages() for item in user_pkg_list: exactmatch, matched, unmatched = parsePackages(avail, [item]) pkg_list.extend(exactmatch) pkg_list.extend(matched) if opts.newest: this_sack = ListPackageSack() this_sack.addList(pkg_list) pkg_list = this_sack.returnNewestByNameArch() del this_sack if len(pkg_list) == 0: print >> sys.stderr, "Nothing found to download matching packages specified" sys.exit(1) for po in pkg_list: unprocessed_pkgs[po.pkgtup] = po while more_to_check(unprocessed_pkgs): for pkgtup in unprocessed_pkgs.keys():
my._getSacks(archlist=archlist) except yum.Errors.RepoError, e: my.logger.error(e) sys.exit(1) unprocessed_pkgs = {} final_pkgs = {} pkg_list = [] avail = my.pkgSack.returnPackages() for item in user_pkg_list: exactmatch, matched, unmatched = parsePackages(avail, [item]) pkg_list.extend(exactmatch) pkg_list.extend(matched) if opts.newest: this_sack = ListPackageSack() this_sack.addList(pkg_list) pkg_list = this_sack.returnNewestByNameArch() del this_sack if len(pkg_list) == 0: print >> sys.stderr, "Nothing found to download matching packages specified" sys.exit(1) for po in pkg_list: unprocessed_pkgs[po.pkgtup] = po while more_to_check(unprocessed_pkgs): for pkgtup in unprocessed_pkgs.keys():
class ContentSource(object): def __init__(self, url, name, insecure=False, interactive=True, yumsrc_conf=YUMSRC_CONF, org="1", channel_label="", no_mirrors=False, ca_cert_file=None, client_cert_file=None, client_key_file=None): # pylint can't see inside the SplitResult class # pylint: disable=E1103 if urlparse.urlsplit(url).scheme: self.url = url else: self.url = "file://%s" % url self.name = name self.insecure = insecure self.interactive = interactive self.yumbase = yum.YumBase() self.yumbase.preconf.fn = yumsrc_conf if not os.path.exists(yumsrc_conf): self.yumbase.preconf.fn = '/dev/null' self.configparser = ConfigParser() if org: self.org = org else: self.org = "NULL" self.proxy_url = None self.proxy_user = None self.proxy_pass = None self.authtoken = None # read the proxy configuration # /etc/rhn/rhn.conf has more priority than yum.conf initCFG('server.satellite') # keep authtokens for mirroring (_scheme, _netloc, _path, query, _fragid) = urlparse.urlsplit(url) if query: self.authtoken = query if CFG.http_proxy: self.proxy_url, self.proxy_user, self.proxy_pass = get_proxy( self.url) else: yb_cfg = self.yumbase.conf.cfg section_name = None if yb_cfg.has_section(self.name): section_name = self.name elif yb_cfg.has_section('main'): section_name = 'main' if section_name: if yb_cfg.has_option(section_name, option='proxy'): self.proxy_url = "http://%s" % yb_cfg.get(section_name, option='proxy') if yb_cfg.has_option(section_name, 'proxy_username'): self.proxy_user = yb_cfg.get(section_name, 'proxy_username') if yb_cfg.has_option(section_name, 'proxy_password'): self.proxy_pass = yb_cfg.get(section_name, 'proxy_password') self._authenticate(url) # Check for settings in yum configuration files (for custom repos/channels only) if org: repos = self.yumbase.repos.repos else: repos = None if repos and name in repos: repo = repos[name] elif repos and channel_label in repos: repo = repos[channel_label] # In case we are using Repo object based on channel config, override it's id to name of the repo # To not create channel directories in cache directory repo.id = name else: # Not using values from config files repo = yum.yumRepo.YumRepository(name) repo.populate(self.configparser, name, self.yumbase.conf) self.repo = repo self.setup_repo(repo, no_mirrors, ca_cert_file, client_cert_file, client_key_file) self.num_packages = 0 self.num_excluded = 0 self.gpgkey_autotrust = None # if '?' not in url: # real_urls.append(url) #self.repo.urls = real_urls self.groupsfile = None def __del__(self): # close log files for yum plugin for handler in logging.getLogger("yum.filelogging").handlers: handler.close() self.repo.close() def _authenticate(self, url): pass @staticmethod def interrupt_callback(*args, **kwargs): # pylint: disable=W0613 # Just re-raise e = sys.exc_info()[1] raise e def setup_repo(self, repo, no_mirrors, ca_cert_file, client_cert_file, client_key_file): """Fetch repository metadata""" repo.cache = 0 repo.mirrorlist = self.url repo.baseurl = [self.url] repo.basecachedir = os.path.join(CACHE_DIR, self.org) repo.setAttribute('_override_sigchecks', False) if self.insecure: repo.repo_gpgcheck = False else: repo.repo_gpgcheck = True # base_persistdir have to be set before pkgdir if hasattr(repo, 'base_persistdir'): repo.base_persistdir = repo.basecachedir pkgdir = os.path.join(CFG.MOUNT_POINT, CFG.PREPENDED_DIR, self.org, 'stage') if not os.path.isdir(pkgdir): fileutils.makedirs(pkgdir, user='******', group='www') repo.pkgdir = pkgdir repo.sslcacert = ca_cert_file repo.sslclientcert = client_cert_file repo.sslclientkey = client_key_file repo.proxy = None repo.proxy_username = None repo.proxy_password = None if "file://" in self.url: repo.copy_local = 1 if self.proxy_url is not None: repo.proxy = self.proxy_url repo.proxy_username = self.proxy_user repo.proxy_password = self.proxy_pass # Do not try to expand baseurl to other mirrors if no_mirrors: repo.urls = repo.baseurl # FIXME: SUSE # Make sure baseurl ends with / and urljoin will work correctly if repo.urls[0][-1] != '/': repo.urls[0] += '/' else: warnings = YumWarnings() warnings.disable() try: repo.baseurlSetup() except: warnings.restore() raise warnings.restore() # if self.url is metalink it will be expanded into # real urls in repo.urls and also save this metalink # in begin of the url list ("for repolist -v ... or anything else wants to know the baseurl") # Remove it from the list, we don't need it to download content of repo # # SUSE uses tokens which have ? and this must stay # #repo.urls = [url for url in repo.urls if '?' not in url] for burl in repo.baseurl: (scheme, netloc, path, query, fragid) = urlparse.urlsplit(burl) repo.gpgkey = [ urlparse.urlunsplit( (scheme, netloc, path + '/repodata/repomd.xml.key', query, fragid)) ] repo.setup(0, None, gpg_import_func=self.getKeyForRepo, confirm_func=self.askImportKey) # use a fix dir for repo metadata sig checks repo.gpgdir = GPG_DIR self.initgpgdir(repo.gpgdir) def get_md_checksum_type(self): """Return the checksum_type of primary.xml""" if 'primary' in self.repo.repoXML.repoData: checksum = self.repo.repoXML.repoData['primary'].checksum return checksum[0] #tuple (checksum_type,checksum) else: return "sha1" def number_of_packages(self): for dummy_index in range(3): try: self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) break except YumErrors.RepoError: pass return len(self.repo.getPackageSack().returnPackages()) def raw_list_packages(self, filters=None): for dummy_index in range(3): try: self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) break except YumErrors.RepoError: pass rawpkglist = self.repo.getPackageSack().returnPackages() self.num_packages = len(rawpkglist) if not filters: filters = [] # if there's no include/exclude filter on command line or in database for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) if filters: rawpkglist = self._filter_packages(rawpkglist, filters) rawpkglist = self._get_package_dependencies( self.repo.getPackageSack(), rawpkglist) # do not pull in dependencies if they're explicitly excluded rawpkglist = self._filter_packages(rawpkglist, filters, True) self.num_excluded = self.num_packages - len(rawpkglist) return rawpkglist def list_packages(self, filters, latest): """ list packages""" try: self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) except yum.Errors.RepoError, e: if "No more mirrors" in str(e): reqFile = re.search('failure:\s+(.+)\s+from', str(e)).groups()[0] raise ChannelTimeoutException( "Retrieving '%s' failed: File not found in repository '%s'" % (reqFile, self.repo)) else: raise pkglist = ListPackageSack(self.repo.getPackageSack().returnPackages()) self.num_packages = len(pkglist) if latest: pkglist = pkglist.returnNewestByNameArch() pkglist = yum.misc.unique(pkglist) pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database # check repository config file for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) filters = self._expand_package_groups(filters) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies( self.repo.getPackageSack(), pkglist) # do not pull in dependencies if they're explicitly excluded pkglist = self._filter_packages(pkglist, filters, True) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: if pack.arch == 'src': continue new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksums[0][0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.checksums[0][1] for cs in pack.checksums: new_pack.checksums[cs[0]] = cs[1] to_return.append(new_pack) return to_return