def add(self, obj, mdtype='updateinfo'): """ Parse a metadata from a given YumRepository, file, or filename. """ if not obj: raise UpdateNoticeException if type(obj) in (type(''), type(u'')): unfile = decompress(obj) infile = open(unfile, 'rt') elif isinstance(obj, YumRepository): if obj.id not in self._repos: self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() unfile = decompress(md) infile = open(unfile, 'rt') elif isinstance(obj, FakeRepository): raise Errors.RepoMDError, "No updateinfo for local pkg" else: # obj is a file object infile = obj for event, elem in iterparse(infile): if elem.tag == 'update': try: un = UpdateNotice(elem) except UpdateNoticeException, e: print >> sys.stderr, "An update notice is broken, skipping." # what else should we do? continue self.add_notice(un)
def add(self, obj, mdtype='updateinfo'): """ Parse a metadata from a given YumRepository, file, or filename. """ if not obj: raise UpdateNoticeException if type(obj) in (type(''), type(u'')): unfile = decompress(obj) infile = open(unfile, 'rt') elif isinstance(obj, YumRepository): if obj.id not in self._repos: self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() unfile = repo_gen_decompress(md, 'updateinfo.xml') infile = open(unfile, 'rt') elif isinstance(obj, FakeRepository): raise Errors.RepoMDError, "No updateinfo for local pkg" else: # obj is a file object infile = obj for event, elem in iterparse(infile): if elem.tag == 'update': try: un = UpdateNotice(elem) except UpdateNoticeException, e: print >> sys.stderr, "An update notice is broken, skipping." # what else should we do? continue self.add_notice(un)
def add(self, obj, mdtype='updateinfo'): """ Parse a metadata from a given YumRepository, file, or filename. """ def _rid(repoid, fmt=_('(from %s)'), unknown=_("<unknown>")): if not repoid: repoid = unknown return fmt % repoid if not obj: raise UpdateNoticeException repoid = None if type(obj) in (type(''), type('')): unfile = decompress(obj) infile = open(unfile, 'rt') elif isinstance(obj, YumRepository): if obj.id not in self._repos: repoid = obj.id self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() unfile = repo_gen_decompress(md, 'updateinfo.xml') infile = open(unfile, 'rt') elif isinstance(obj, FakeRepository): raise Errors.RepoMDError("No updateinfo for local pkg") else: # obj is a file object infile = obj have_dup = False for event, elem in safe_iterparse(infile, logger=self._logger): if elem.tag == 'update': try: un = UpdateNotice(elem, repoid, self._vlogger) except UpdateNoticeException as e: msg = _("An update notice %s is broken, skipping.") % _rid( repoid) if self._vlogger: self._vlogger.log(logginglevels.DEBUG_1, "%s", msg) else: print(msg, file=sys.stderr) continue if not self.add_notice(un): msg = _( "Update notice %s %s is broken, or a bad duplicate, skipping." ) % (un['update_id'], _rid(repoid)) if not have_dup: msg += _( '\nYou should report this problem to the owner of the %s repository.' ) % _rid(repoid, "%s") msg += _( '\nIf you are the owner, consider re-running the same command with --verbose to see the ' 'exact data that caused the conflict.') have_dup = True if self._vlogger: self._vlogger.warn("%s", msg) else: print(msg, file=sys.stderr)
def add(self, obj, mdtype="updateinfo"): """ Parse a metadata from a given YumRepository, file, or filename. """ def _rid(repoid, fmt=_(" (from %s)")): if not repoid: return "" return fmt % repoid if not obj: raise UpdateNoticeException repoid = None if type(obj) in (type(""), type(u"")): unfile = decompress(obj) infile = open(unfile, "rt") elif isinstance(obj, YumRepository): if obj.id not in self._repos: repoid = obj.id self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() unfile = repo_gen_decompress(md, "updateinfo.xml") infile = open(unfile, "rt") elif isinstance(obj, FakeRepository): raise Errors.RepoMDError, "No updateinfo for local pkg" else: # obj is a file object infile = obj have_dup = False for event, elem in safe_iterparse(infile, logger=self._logger): if elem.tag == "update": try: un = UpdateNotice(elem) except UpdateNoticeException, e: msg = _("An update notice%s is broken, skipping.") % _rid(repoid) if self._vlogger: self._vlogger.log(logginglevels.DEBUG_1, "%s", msg) else: print >> sys.stderr, msg continue if not self.add_notice(un): msg = _("Update notice %s%s is broken, or a bad duplicate, skipping.") % ( un["update_id"], _rid(repoid), ) if not have_dup: msg += _("\nYou should report this problem to the owner of the %srepository.") % _rid( repoid, "%s " ) have_dup = True if self._vlogger: self._vlogger.warn("%s", msg) else: print >> sys.stderr, msg
def sanity_check_repodata(myurl): """ Sanity check the repodata for a given repository. Initial implementation by Seth Vidal. """ myurl = str(myurl) tempdir = tempfile.mkdtemp() errorstrings = [] if myurl[-1] != '/': myurl += '/' baseurl = myurl if not myurl.endswith('repodata/'): myurl += 'repodata/' else: baseurl = baseurl.replace('repodata/', '/') rf = myurl + 'repomd.xml' try: rm = urlgrabber.urlopen(rf) repomd = repoMDObject.RepoMD('foo', rm) for t in repomd.fileTypes(): data = repomd.getData(t) base, href = data.location if base: loc = base + '/' + href else: loc = baseurl + href destfn = tempdir + '/' + os.path.basename(href) dest = urlgrabber.urlgrab(loc, destfn) ctype, known_csum = data.checksum csum = checksum(ctype, dest) if csum != known_csum: errorstrings.append("checksum: %s" % t) if href.find('xml') != -1: decompressed = decompress(dest) retcode = subprocess.call( ['/usr/bin/xmllint', '--noout', decompressed]) if retcode != 0: errorstrings.append("failed xml read: %s" % t) except urlgrabber.grabber.URLGrabError, e: errorstrings.append('Error accessing repository %s' % e)
def sanity_check_repodata(myurl): """ Sanity check the repodata for a given repository. Initial implementation by Seth Vidal. """ myurl = str(myurl) tempdir = tempfile.mkdtemp() errorstrings = [] if myurl[-1] != '/': myurl += '/' baseurl = myurl if not myurl.endswith('repodata/'): myurl += 'repodata/' else: baseurl = baseurl.replace('repodata/', '/') rf = myurl + 'repomd.xml' try: rm = urlgrabber.urlopen(rf) repomd = repoMDObject.RepoMD('foo', rm) for t in repomd.fileTypes(): data = repomd.getData(t) base, href = data.location if base: loc = base + '/' + href else: loc = baseurl + href destfn = tempdir + '/' + os.path.basename(href) dest = urlgrabber.urlgrab(loc, destfn) ctype, known_csum = data.checksum csum = checksum(ctype, dest) if csum != known_csum: errorstrings.append("checksum: %s" % t) if href.find('xml') != -1: decompressed = decompress(dest) retcode = subprocess.call(['/usr/bin/xmllint', '--noout', decompressed]) if retcode != 0: errorstrings.append("failed xml read: %s" % t) except urlgrabber.grabber.URLGrabError, e: errorstrings.append('Error accessing repository %s' % e)