def add(self, obj, mdtype='updateinfo'): """ Parse a metadata from a given YumRepository, file, or filename. """ if not obj: raise UpdateNoticeException if type(obj) in (type(''), type(u'')): unfile = decompress(obj) infile = open(unfile, 'rt') elif isinstance(obj, YumRepository): if obj.id not in self._repos: self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() unfile = repo_gen_decompress(md, 'updateinfo.xml') infile = open(unfile, 'rt') elif isinstance(obj, FakeRepository): raise Errors.RepoMDError, "No updateinfo for local pkg" else: # obj is a file object infile = obj for event, elem in iterparse(infile): if elem.tag == 'update': try: un = UpdateNotice(elem) except UpdateNoticeException, e: print >> sys.stderr, "An update notice is broken, skipping." # what else should we do? continue self.add_notice(un)
def add(self, obj, mdtype='updateinfo'): """ Parse a metadata from a given YumRepository, file, or filename. """ def _rid(repoid, fmt=_(' (from %s)')): if not repoid: return '' return fmt % repoid if not obj: raise UpdateNoticeException repoid = None if type(obj) in (type(''), type(u'')): unfile = decompress(obj) infile = open(unfile, 'rt') elif isinstance(obj, YumRepository): if obj.id not in self._repos: repoid = obj.id self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() unfile = repo_gen_decompress(md, 'updateinfo.xml') infile = open(unfile, 'rt') elif isinstance(obj, FakeRepository): raise Errors.RepoMDError, "No updateinfo for local pkg" else: # obj is a file object infile = obj have_dup = False for event, elem in safe_iterparse(infile, logger=self._logger): if elem.tag == 'update': try: un = UpdateNotice(elem, repoid, self._vlogger) except UpdateNoticeException, e: msg = _("An update notice%s is broken, skipping.") % _rid( repoid) if self._vlogger: self._vlogger.log(logginglevels.DEBUG_1, "%s", msg) else: print >> sys.stderr, msg continue if not self.add_notice(un): msg = _( "Update notice %s%s is broken, or a bad duplicate, skipping." ) % (un['update_id'], _rid(repoid)) if not have_dup: msg += _( '\nYou should report this problem to the owner of the %srepository.' ) % _rid(repoid, "%s ") msg += _( '\nIf you are the owner, consider re-running the same command with --verbose to see the ' 'exact data that caused the conflict.') have_dup = True if self._vlogger: self._vlogger.warn("%s", msg) else: print >> sys.stderr, msg
def add(self, obj, mdtype="updateinfo"): """ Parse a metadata from a given YumRepository, file, or filename. """ def _rid(repoid, fmt=_(" (from %s)")): if not repoid: return "" return fmt % repoid if not obj: raise UpdateNoticeException repoid = None if type(obj) in (type(""), type(u"")): unfile = decompress(obj) infile = open(unfile, "rt") elif isinstance(obj, YumRepository): if obj.id not in self._repos: repoid = obj.id self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() unfile = repo_gen_decompress(md, "updateinfo.xml") infile = open(unfile, "rt") elif isinstance(obj, FakeRepository): raise Errors.RepoMDError, "No updateinfo for local pkg" else: # obj is a file object infile = obj have_dup = False for event, elem in safe_iterparse(infile, logger=self._logger): if elem.tag == "update": try: un = UpdateNotice(elem) except UpdateNoticeException, e: msg = _("An update notice%s is broken, skipping.") % _rid(repoid) if self._vlogger: self._vlogger.log(logginglevels.DEBUG_1, "%s", msg) else: print >> sys.stderr, msg continue if not self.add_notice(un): msg = _("Update notice %s%s is broken, or a bad duplicate, skipping.") % ( un["update_id"], _rid(repoid), ) if not have_dup: msg += _("\nYou should report this problem to the owner of the %srepository.") % _rid( repoid, "%s " ) have_dup = True if self._vlogger: self._vlogger.warn("%s", msg) else: print >> sys.stderr, msg
kwargs = {} if async and repo._async: kwargs["failfunc"] = failfunc kwargs["async"] = True try: mdpath[repo] = repo._retrieveMD(name, **kwargs) except RepoError, e: failfunc(e) if async: grabber.parallel_wait() # parse metadata, create DeltaPackage instances for repo, cpath in mdpath.items(): pinfo_repo = pinfo[repo] path = repo_gen_decompress(cpath, "prestodelta.xml", cached=repo.cache) for ev, el in iterparse(path): if el.tag != "newpackage": continue name = el.get("name") arch = el.get("arch") new = name, arch, el.get("epoch"), el.get("version"), el.get("release") index = pinfo_repo.get(new) if index is not None: po = pkgs[index] perc = repo.deltarpm_percentage if perc is None: perc = ayum.conf.deltarpm_percentage best = po.size * (perc / 100.0) have = oldrpms.get(repo, {}).get((name, arch), {}) for el in el.findall("delta"):
kwargs = {} if async and repo._async: kwargs['failfunc'] = failfunc kwargs['async'] = True try: mdpath[repo] = repo._retrieveMD(name, **kwargs) except RepoError, e: failfunc(e) if async: grabber.parallel_wait() # parse metadata, create DeltaPackage instances for repo, cpath in mdpath.items(): pinfo_repo = pinfo[repo] path = repo_gen_decompress(cpath, 'prestodelta.xml', cached=repo.cache) for ev, el in iterparse(path): if el.tag != 'newpackage': continue name = el.get('name') arch = el.get('arch') new = name, arch, el.get('epoch'), el.get('version'), el.get( 'release') index = pinfo_repo.get(new) if index is not None: po = pkgs[index] perc = repo.deltarpm_percentage if perc is None: perc = ayum.conf.deltarpm_percentage best = po.size * (perc / 100.0) have = oldrpms.get(repo, {}).get((name, arch), {})
class DeltaInfo: def __init__(self, ayum, pkgs, adderror): self.verbose_logger = ayum.verbose_logger self.adderror = adderror self.jobs = {} self._future_jobs = [] self.progress = None self.limit = ayum.conf.deltarpm if self.limit < 0: nprocs = _num_cpus_online() self.limit *= -nprocs if not self.limit: # Turned off. return # calculate update sizes oldrpms = {} pinfo = {} reposize = {} for index, po in enumerate(pkgs): perc = po.repo.deltarpm_percentage if perc is None: urls = po.repo.urls perc = ayum.conf.deltarpm_percentage if len(urls) == 1 and urls[0].startswith('file:'): perc = 0 # for local repos, default to off. if perc == 0: continue # Allow people to turn off a repo. (meh) if po.state == TS_UPDATE: pass elif po.name in ayum.conf.installonlypkgs: pass else: names = oldrpms.get(po.repo) if names is None: # load all locally cached rpms names = oldrpms[po.repo] = {} for rpmfn in os.listdir(po.repo.pkgdir): m = re.match('^(.+)-(.+)-(.+)\.(.+)\.rpm$', rpmfn) if m: n, v, r, a = m.groups() names.setdefault((n, a), set()).add((v, r)) if (po.name, po.arch) not in names: continue pinfo.setdefault(po.repo, {})[po.pkgtup] = index reposize[po.repo] = reposize.get(po.repo, 0) + po.size # don't use deltas when deltarpm not installed if reposize and not os.access(APPLYDELTA, os.X_OK): self.verbose_logger.info( _('Delta RPMs disabled because %s not installed.'), APPLYDELTA) return # download delta metadata mdpath = {} for repo in reposize: for name in ('prestodelta', 'deltainfo'): try: data = repo.repoXML.getData(name) break except: pass else: self.verbose_logger.info( _('No Presto metadata available for %s'), repo) continue path = repo.cachedir + '/' + os.path.basename(data.location[1]) perc = repo.deltarpm_metadata_percentage data_size = int(data.size) * (perc / 100.0) if perc and not os.path.exists( path) and data_size > reposize[repo]: msg = _( 'Not downloading deltainfo for %s, MD is %s and rpms are %s' ) self.verbose_logger.info( msg, repo, progress.format_number(data_size), progress.format_number(reposize[repo])) continue def failfunc(e, name=name, repo=repo): mdpath.pop(repo, None) if hasattr(e, 'exception'): e = e.exception self.verbose_logger.warn( _('Failed to download %s for repository %s: %s'), name, repo, exception2msg(e)) kwargs = {} if async and repo._async: kwargs['failfunc'] = failfunc kwargs['async'] = True try: mdpath[repo] = repo._retrieveMD(name, **kwargs) except RepoError as e: failfunc(e) if async: grabber.parallel_wait() # parse metadata, create DeltaPackage instances for repo, cpath in list(mdpath.items()): pinfo_repo = pinfo[repo] path = repo_gen_decompress(cpath, 'prestodelta.xml', cached=repo.cache) for ev, el in iterparse(path): if el.tag != 'newpackage': continue name = el.get('name') arch = el.get('arch') new = name, arch, el.get('epoch'), el.get('version'), el.get( 'release') index = pinfo_repo.get(new) if index is not None: po = pkgs[index] perc = repo.deltarpm_percentage if perc is None: perc = ayum.conf.deltarpm_percentage best = po.size * (perc / 100.0) have = oldrpms.get(repo, {}).get((name, arch), {}) for el in el.findall('delta'): size = int(el.find('size').text) if size >= best: continue # can we use this delta? epoch = el.get('oldepoch') ver = el.get('oldversion') rel = el.get('oldrelease') if (ver, rel) in have: oldrpm = '%s/%s-%s-%s.%s.rpm' % (repo.pkgdir, name, ver, rel, arch) else: if not ayum.rpmdb.searchNevra( name, epoch, ver, rel, arch): continue oldrpm = None best = size remote = el.find('filename').text csum = el.find('checksum') csum = csum.get('type'), csum.text pkgs[index] = DeltaPackage(po, size, remote, csum, oldrpm) el.clear()