def add(self, obj, mdtype='updateinfo', all=False): """ Parse a metadata from a given YumRepository, file, or filename. """ if not obj: raise UpdateNoticeException if type(obj) in (type(''), type(u'')): infile = fileutils.decompress_open(obj) elif isinstance(obj, YumRepository): if obj.id not in self._repos: self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() infile = fileutils.decompress_open(md) else: # obj is a file object infile = obj for _event, elem in iterparse(infile): if elem.tag == 'update': un = UpdateNotice(elem) key = un['update_id'] if all: key = "%s-%s" % (un['update_id'], un['version']) if not self._notices.has_key(key): self._notices[key] = un for pkg in un['pkglist']: for pkgfile in pkg['packages']: self._cache['%s-%s-%s' % (pkgfile['name'], pkgfile['version'], pkgfile['release'])] = un no = self._no_cache.setdefault( pkgfile['name'], set()) no.add(un)
def add(self, obj, mdtype='updateinfo', all_versions=False): """ Parse a metadata from a given YumRepository, file, or filename. """ if not obj: raise UpdateNoticeException if isinstance(obj, (type(''), type(u''))): infile = fileutils.decompress_open(obj) elif isinstance(obj, YumRepository): if obj.id not in self._repos: self._repos.append(obj.id) md = obj.retrieveMD(mdtype) if not md: raise UpdateNoticeException() infile = fileutils.decompress_open(md) else: # obj is a file object infile = obj for _event, elem in iterparse(infile): if elem.tag == 'update': un = UpdateNotice(elem) key = un['update_id'] if all_versions: key = "%s-%s" % (un['update_id'], un['version']) if key not in self._notices: self._notices[key] = un for pkg in un['pkglist']: for pkgfile in pkg['packages']: self._cache['%s-%s-%s' % (pkgfile['name'], pkgfile['version'], pkgfile['release'])] = un no = self._no_cache.setdefault(pkgfile['name'], set()) no.add(un)
def import_groups(self, plug, url): groupsfile = plug.get_groups() if groupsfile: basename = os.path.basename(groupsfile) self.print_msg("Repo %s has comps file %s." % (url, basename)) relativedir = os.path.join(relative_comps_dir, self.channel_label) absdir = os.path.join(CFG.MOUNT_POINT, relativedir) if not os.path.exists(absdir): os.makedirs(absdir) relativepath = os.path.join(relativedir, basename) abspath = os.path.join(absdir, basename) for suffix in [".gz", ".bz", ".xz"]: if basename.endswith(suffix): abspath = abspath.rstrip(suffix) relativepath = relativepath.rstrip(suffix) src = fileutils.decompress_open(groupsfile) print src dst = open(abspath, "w") shutil.copyfileobj(src, dst) dst.close() src.close() # update or insert hu = rhnSQL.prepare( """update rhnChannelComps set relative_filename = :relpath, modified = current_timestamp where channel_id = :cid""" ) hu.execute(cid=self.channel["id"], relpath=relativepath) hi = rhnSQL.prepare( """insert into rhnChannelComps (id, channel_id, relative_filename) (select sequence_nextval('rhn_channelcomps_id_seq'), :cid, :relpath from dual where not exists (select 1 from rhnChannelComps where channel_id = :cid))""" ) hi.execute(cid=self.channel["id"], relpath=relativepath)
def import_groups(self, plug, url): groupsfile = plug.get_groups() if groupsfile: basename = os.path.basename(groupsfile) self.print_msg("Repo %s has comps file %s." % (url, basename)) relativedir = os.path.join(relative_comps_dir, self.channel_label) absdir = os.path.join(CFG.MOUNT_POINT, relativedir) if not os.path.exists(absdir): os.makedirs(absdir) relativepath = os.path.join(relativedir, basename) abspath = os.path.join(absdir, basename) for suffix in ['.gz', '.bz', '.xz']: if basename.endswith(suffix): abspath = abspath.rstrip(suffix) relativepath = relativepath.rstrip(suffix) src = fileutils.decompress_open(groupsfile) print src dst = open(abspath, "w") shutil.copyfileobj(src, dst) dst.close() src.close() # update or insert hu = rhnSQL.prepare("""update rhnChannelComps set relative_filename = :relpath, modified = current_timestamp where channel_id = :cid""") hu.execute(cid=self.channel['id'], relpath=relativepath) hi = rhnSQL.prepare("""insert into rhnChannelComps (id, channel_id, relative_filename) (select sequence_nextval('rhn_channelcomps_id_seq'), :cid, :relpath from dual where not exists (select 1 from rhnChannelComps where channel_id = :cid))""" ) hi.execute(cid=self.channel['id'], relpath=relativepath)
def get_package_list(self): decompressed = None packages_raw = [] to_return = [] for extension in FORMAT_PRIORITY: url = self.url + '/Packages' + extension filename = self._download(url) if filename: decompressed = fileutils.decompress_open(filename) break if decompressed: for pkg in decompressed.read().split("\n\n"): packages_raw.append(pkg) decompressed.close() else: print("ERROR: Download of package list failed.") # Parse and format package metadata for chunk in packages_raw: package = DebPackage() package.epoch = "" lines = chunk.split("\n") checksums = {} for line in lines: pair = line.split(" ", 1) if pair[0] == "Package:": package.name = pair[1] elif pair[0] == "Architecture:": package.arch = pair[1] + '-deb' elif pair[0] == "Version:": package['epoch'] = '' version = pair[1] if version.find(':') != -1: package['epoch'], version = version.split(':') if version.find('-') != -1: tmp = version.split('-') package['version'] = '-'.join(tmp[:-1]) package['release'] = tmp[-1] else: package['version'] = version package['release'] = 'X' elif pair[0] == "Filename:": package.relativepath = pair[1] elif pair[0] == "SHA256:": checksums['sha256'] = pair[1] elif pair[0] == "SHA1:": checksums['sha1'] = pair[1] elif pair[0] == "MD5sum:": checksums['md5'] = pair[1] # Pick best available checksum if 'sha256' in checksums: package.checksum_type = 'sha256' package.checksum = checksums['sha256'] elif 'sha1' in checksums: package.checksum_type = 'sha1' package.checksum = checksums['sha1'] elif 'md5' in checksums: package.checksum_type = 'md5' package.checksum = checksums['md5'] if package.is_populated(): to_return.append(package) return to_return
def get_package_list(self): decompressed = None packages_raw = [] to_return = [] for extension in FORMAT_PRIORITY: url = self.url + '/Packages' + extension filename = self._download(url) if filename: decompressed = fileutils.decompress_open(filename) break if decompressed: for pkg in decompressed.read().split("\n\n"): packages_raw.append(pkg) decompressed.close() else: print("ERROR: Download of package list failed.") # Parse and format package metadata for chunk in packages_raw: package = {} package['epoch'] = "" lines = chunk.split("\n") checksums = {} for line in lines: pair = line.split(" ", 1) if pair[0] == "Package:": package['name'] = pair[1] elif pair[0] == "Architecture:": package['arch'] = pair[1] + '-deb' elif pair[0] == "Version:": version = pair[1].split('-', 1) if len(version) == 1: package['version'] = version[0] package['release'] = 'X' else: package['version'] = version[0] package['release'] = version[1] elif pair[0] == "Filename:": package['path'] = pair[1] elif pair[0] == "SHA256:": checksums['sha256'] = pair[1] elif pair[0] == "SHA1:": checksums['sha1'] = pair[1] elif pair[0] == "MD5sum:": checksums['md5'] = pair[1] # Pick best available checksum if 'sha256' in checksums: package['checksum_type'] = 'sha256' package['checksum'] = checksums['sha256'] elif 'sha1' in checksums: package['checksum_type'] = 'sha1' package['checksum'] = checksums['sha1'] elif 'md5' in checksums: package['checksum_type'] = 'md5' package['checksum'] = checksums['md5'] if all(k in package for k in ('name', 'epoch', 'version', 'release', 'arch', 'path', 'checksum_type', 'checksum')): to_return.append(package) return to_return
def get_package_list(self): decompressed = None packages_raw = [] to_return = [] for extension in FORMAT_PRIORITY: url = self.url + "/Packages" + extension filename = self._download(url) if filename: decompressed = fileutils.decompress_open(filename) break if decompressed: for pkg in decompressed.read().split("\n\n"): packages_raw.append(pkg) decompressed.close() else: print ("ERROR: Download of package list failed.") # Parse and format package metadata for chunk in packages_raw: package = {} package["epoch"] = "" lines = chunk.split("\n") checksums = {} for line in lines: pair = line.split(" ", 1) if pair[0] == "Package:": package["name"] = pair[1] elif pair[0] == "Architecture:": package["arch"] = pair[1] + "-deb" elif pair[0] == "Version:": version = pair[1].split("-", 1) if len(version) == 1: package["version"] = version[0] package["release"] = "X" else: package["version"] = version[0] package["release"] = version[1] elif pair[0] == "Filename:": package["path"] = pair[1] elif pair[0] == "SHA256:": checksums["sha256"] = pair[1] elif pair[0] == "SHA1:": checksums["sha1"] = pair[1] elif pair[0] == "MD5sum:": checksums["md5"] = pair[1] # Pick best available checksum if "sha256" in checksums: package["checksum_type"] = "sha256" package["checksum"] = checksums["sha256"] elif "sha1" in checksums: package["checksum_type"] = "sha1" package["checksum"] = checksums["sha1"] elif "md5" in checksums: package["checksum_type"] = "md5" package["checksum"] = checksums["md5"] if all( k in package for k in ("name", "epoch", "version", "release", "arch", "path", "checksum_type", "checksum") ): to_return.append(package) return to_return