def _tags2pkgs(tags): tpkgs = [] for tag in tags: stag = str(tag) if not stag.startswith("imports/c8"): continue stag = stag[len("imports/c8"):] # Eg. See: https://git.centos.org/rpms/ongres-scram/releases stag = stag.replace('%7e', '~') if '%' in stag: # FIXME? panic? continue if stag.startswith("s/"): stream = True stag = stag[len("s/"):] elif stag.startswith("/"): stream = False stag = stag[len("/"):] else: continue # Tag is now N-V-R pkg = spkg.nvr2pkg(stag) pkg.stream = stream tpkgs.append(pkg) return tpkgs
def koji_pkgs2archsigs(kapi, pkgs, filter=None): if len(pkgs) > _koji_max_query: ret = [] for i in range(0, len(pkgs), _koji_max_query): npkgs = pkgs[i:i + _koji_max_query] ret.extend(koji_pkgs2archsigs(kapi, npkgs, filter)) return ret if filter is None: filter = lambda x: False kapi.multicall = True for pkg in pkgs: kapi.listRPMs(buildID=pkg._koji_build_id) ret = [] results = kapi.multiCall() for ([rpms], bpkg) in zip(results, pkgs): for rpm in rpms: epoch = spkg.epochnum2epoch(rpm['epoch']) pkg = spkg.nvr2pkg(rpm['nvr'], arch=rpm['arch'], epoch=epoch) pkg._koji_rpm_id = rpm['id'] pkg._koji_build_id = bpkg._koji_build_id if filter(pkg): continue ret.append(pkg) koji_archpkgs2sigs(kapi, ret) return ret
def _koji_buildinfo2pkg(kapi, binfo): epoch = spkg.epochnum2epoch(binfo['epoch']) pkg = spkg.nvr2pkg(binfo['nvr'], epoch=epoch) pkg._koji_build_id = binfo['build_id'] if 'task_id' in binfo: pkg._koji_task_id = binfo['task_id'] pkg._kapi = kapi return pkg
def _builds2bpkgs(builds): bpkgs = [] for build in builds: bpkg = spkg.nvr2pkg(build['nvr']) if '_git-branch' in build: bpkg._git_branch = build['_git-branch'] bpkgs.append(bpkg) bpkgs.sort() return bpkgs
def koji_nvr2srpm(session, nvr): """ Given an rpm nvr, convert it into an srpm nvr for CVE checker. Also takes a build_id, due to API leakage. """ buildinfo = session.getBuild(nvr) if buildinfo is None: return None buildinfo['name'] = buildinfo['package_name'] buildinfo['arch'] = 'src' epoch = buildinfo['epoch'] if buildinfo['epoch'] is None: buildinfo['epoch'] = "" epoch = '0' else: buildinfo['epoch'] = str(buildinfo['epoch']) + ":" snvr = buildinfo['name'] snvr += '-' snvr += buildinfo['version'] snvr += '-' snvr += buildinfo['release'] return spkg.nvr2pkg(snvr)
def koji_pkgs2archsigs(kapi, pkgs): if len(pkgs) > _koji_max_query: ret = [] for i in range(0, len(pkgs), _koji_max_query): ret.extend(koji_pkgs2archsigs(kapi, pkgs[i:i + _koji_max_query])) return ret kapi.multicall = True for pkg in pkgs: kapi.listRPMs(buildID=pkg._koji_build_id) ret = [] results = kapi.multiCall() for ([rpms], bpkg) in zip(results, pkgs): for rpm in rpms: epoch = spkg.epochnum2epoch(rpm['epoch']) pkg = spkg.nvr2pkg(rpm['nvr'], epoch=epoch) pkg.arch = rpm['arch'] pkg._koji_rpm_id = rpm['id'] pkg._koji_build_id = bpkg._koji_build_id ret.append(pkg) koji_archpkgs2sigs(kapi, ret) return ret
def koji_tag2pkgs(kapi, tag): """ Return a list of latest builds that are tagged with certain tag """ return [spkg.nvr2pkg(x['nvr']) for x in kapi.listTagged(tag, latest=True)]
def main(): if not sys.stdout.isatty(): print(" -- Beg:", _curtime()) parser = OptionParser() parser.add_option( "", "--koji-host", dest="koji_host", help="Host to connect to", default="https://brewhub.engineering.redhat.com/brewhub/") parser.add_option("", "--sync-packages", dest="sync_packages", help="Sync packages to streams", default=False, action="store_true") parser.add_option("", "--sync-modules", dest="sync_modules", help="Sync modules to streams", default=False, action="store_true") parser.add_option("", "--summary-modules", dest="summary_modules", help="Summary of sync modules to streams", default=False, action="store_true") parser.add_option("", "--packages-tag", dest="packages_tag", help="Specify package tag to sync", default="rhel-8.2.0-candidate") parser.add_option("", "--modules-tag", dest="modules_tag", help="Specify module tag to sync", default="rhel-8.2.0-modules-candidate") parser.add_option("", "--packages-compose", dest="packages_compose", help="Specify package compose to sync", default=None) parser.add_option("", "--modules-compose", dest="modules_compose", help="Specify module compose to sync", default=None) parser.add_option("", "--download-only", dest="download_only", help="Just download, always safe", default=False, action="store_true") parser.add_option("", "--nocache", dest="nocache", help="Don't cache any results", default=False, action="store_true") (options, args) = parser.parse_args() # Internal brew has a non-external signed cert. and only does HTTPS now. kapi = brew.ClientSession(options.koji_host, opts={'no_ssl_verify': True}) packages_to_track = load_package_list() modules_to_track = load_module_list() denylist = load_package_denylist() denylist = set(denylist) if options.nocache: global conf_cache_builds conf_cache_builds = False if options.download_only: global conf_data_downloadonly conf_data_downloadonly = True else: print(" ** Warning: This will run alt-src to push packages/modules.") if not args: pass elif args[0] in ('force-push-module', 'force-push-modules'): import compose builds = [] for arg in args[1:]: nsvc = arg.split(':') if len(nsvc) != 4: print( " ** Module format is traditional (N:S:V:C), not compatible (N-S-V.C)" ) sys.exit(1) n, s, v, c = arg.split(':') mod = compose.Module() mod.name = n mod.stream = s mod.version = v mod.context = c ent = { 'package_name': mod.name, 'nvr': mod.nsvc(), # These aren't used atm. 'name': mod.name, 'version': mod.stream, 'release': mod.vc(), 'epoch': None } builds.append(ent) modules_to_track = set() for build in builds: modules_to_track.add(build['package_name']) unsynced_builds, extra_pkgs = check_unsynced_modules( kapi, builds, modules_to_track) # Don't do CVE check here... extra_pkg2 = sync_modules_directly(kapi, unsynced_builds) # These are the extra rpms needed for already pushed modules... sync_directly(extra_pkgs) sync_directly(extra_pkg2) elif args[0] in ('force-push-package', 'force-push-pkg', 'force-push-packages', 'force-push-pkgs'): bpkgs = [] for arg in args[1:]: try: bpkg = spkg.nvr2pkg(arg) except: print(" ** Package format is traditional (N-V-R)") sys.exit(1) bpkgs.append(bpkg) track = set() for bpkg in bpkgs: track.add(bpkg.name) bpkgs = check_denylist_builds(bpkgs, denylist) bpkgs = check_unsynced_builds(bpkgs, track) # Don't do CVE check here... sync_directly(bpkgs) elif args[0] in ('push', ): if options.sync_packages: tag = options.packages_tag comp = options.packages_compose sync_packages(tag, comp, kapi, packages_to_track, denylist) if options.sync_modules: tag = options.modules_tag comp = options.modules_compose sync_modules(tag, comp, kapi, modules_to_track, options.summary_modules) elif args[0] in ('download', ): if options.sync_packages: tag = options.packages_tag comp = options.packages_compose download_packages(tag, comp, kapi, args[1:]) if options.sync_modules: tag = options.modules_tag comp = options.modules_compose if comp is None: mods = get_tagged_modules(kapi, tag) else: mods = get_composed_modules(comp) mods = composed_modules2tagged_builds(mods) for mod in sorted(mods, key=lambda x: x['package_name']): if len(args) > 1: import fnmatch skip = True for arg in args[1:]: if fnmatch.fnmatch(mod['package_name'], arg): skip = False break if skip: continue tag, filename = download_module(mod) if filename is None: continue print("Downloaded:", filename) elif args[0] in ('download-synced', ): if options.sync_packages: pass # FIXME: If anyone cares? # Don't download things that aren't sync'd ... for migration. if options.sync_modules: tag = options.modules_tag comp = options.modules_compose if comp is None: mods = get_tagged_modules(kapi, tag) else: mods = get_composed_modules(comp) mods = composed_modules2tagged_builds(mods) mods = filter_synced_modules(kapi, mods) for mod in sorted(mods, key=lambda x: x['package_name']): if len(args) > 1: import fnmatch skip = True for arg in args[1:]: if fnmatch.fnmatch(mod['package_name'], arg): skip = False break if skip: continue tag, filename = download_module(mod) if filename is None: continue print("Downloaded (synced):", filename) if not sys.stdout.isatty(): print(" -- End:", _curtime())
def main(): parser = OptionParser() parser.add_option("", "--koji-host", dest="koji_host", help="Host to connect to", default="https://koji.mbox.centos.org/kojihub") parser.add_option("", "--packages-tag", dest="packages_tag", help="Specify package tag to sync", default="dist-c8-stream") # parser.add_option("", "--modules-tag", dest="modules_tag", # help="Specify module tag to sync", default="dist-c8-stream-module") parser.add_option("", "--packages-compose", dest="packages_compose", help="Specify package compose to sync", default=None) # parser.add_option("", "--modules-compose", dest="modules_compose", # help="Specify module compose to sync", default=None) parser.add_option("", "--download-only", dest="download_only", help="Just download, always safe", default=False, action="store_true") parser.add_option("", "--nocache", dest="nocache", help="Don't cache any results", default=False, action="store_true") parser.add_option("", "--wait", dest="wait", help="Wait time for tasks", default="") (options, args) = parser.parse_args() kapi = koji.ClientSession(options.koji_host) if options.koji_host == "https://koji.mbox.centos.org/kojihub": kapi.ssl_login("/compose/.koji/mbox_admin.pem", None, "/compose/.koji/ca.crt") load_package_list() load_package_denylist() if options.nocache: global conf_cache_builds conf_cache_builds = False if options.download_only: global conf_data_downloadonly conf_data_downloadonly = True if not args: pass elif args[0] in ('list-unsigned-pkgs', 'list-unsigned-packages', 'ls-unsigned-pkgs', 'ls-unsigned-packages'): args = args[1:] tag = options.packages_tag def _slen(x): return len(str(len(x))) def _out_pkg(prefix, bpkgs): bids = set() for bpkg in sorted(bpkgs): if hasattr(bpkg, 'signed'): if bpkg.signed: continue suffix = '' bids.add(bpkg._koji_build_id) prefix = "%*d | %*d |" prefix %= (lenmax, len(bids), 8, bpkg._koji_build_id) if hasattr(bpkg, 'stream') and bpkg.stream: suffix += '(stream)' if spkg._is_branch_el8(bpkg): suffix += '(branch)' if spkg._is_module(bpkg): suffix += '(module)' if spkg._is_rebuild(bpkg): suffix += '(rebuild)' print(prefix, bpkg, suffix) bpkgs = koji_tag2pkgs(kapi, tag) bpkgs = koji_pkgs2archsigs(kapi, bpkgs) lenmax = _slen(bpkgs) # Max size of printed num print("%*s | %*s | pkg" % (lenmax, "bids", 8, "build_id")) _out_pkg("Tag:", spkg.match_pkgs(args, bpkgs)) elif args[0] in ('nvra-unsigned-pkgs', 'nvra-unsigned-packages'): args = args[1:] tag = options.packages_tag def _out_pkg(prefix, bpkgs): bids = set() for bpkg in sorted(bpkgs): if hasattr(bpkg, 'signed'): if bpkg.signed: continue print(bpkg) bpkgs = koji_tag2pkgs(kapi, tag) bpkgs = koji_pkgs2archsigs(kapi, bpkgs) _out_pkg("Tag:", spkg.match_pkgs(args, bpkgs)) sys.exit(0) elif args[0] in ('list-packages', 'list-pkgs', 'ls-pkgs'): args = args[1:] tag = options.packages_tag comp = options.packages_compose def _out_pkg(prefix, bpkgs): prefix = "%8s" % prefix for bpkg in sorted(bpkgs): suffix = '' if hasattr(bpkg, 'stream') and bpkg.stream: suffix += '(stream)' if hasattr(bpkg, '_koji_build_id'): suffix += '(bid:%d)' % bpkg._koji_build_id if hasattr(bpkg, 'signed'): if bpkg.signed: suffix += '(sig:%s)' % bpkg.signed else: suffix += '(unsigned)' if spkg._is_branch_el8(bpkg): suffix += '(branch)' if spkg._is_module(bpkg): suffix += '(module)' if spkg._is_rebuild(bpkg): suffix += '(rebuild)' print(prefix, bpkg, suffix) bpkgs = koji_tag2pkgs(kapi, tag) bpkgs = koji_pkgs2archsigs(kapi, bpkgs) _out_pkg("Tag:", spkg.match_pkgs(args, bpkgs)) if comp is not None: cpkgs = composed_url2pkgs(comp) _out_pkg("Compose:", spkg.match_pkgs(args, cpkgs)) elif args[0] in ('summary-packages', 'summary-pkgs', 'sum-pkgs'): args = args[1:] tag = options.packages_tag comp = options.packages_compose bpkgs = koji_tag2pkgs(kapi, tag) print(" Tagged packages:", len(bpkgs)) if args: print(" Matched:", len(spkg.match_pkgs(args, bpkgs))) if comp is not None: cpkgs = composed_url2pkgs(comp) print("Composed packages:", len(cpkgs)) if args: print(" Matched:", len(spkg.match_pkgs(args, cpkgs))) elif args[0] in ('check-nvr', 'check-nvra'): tag = options.packages_tag comp = options.packages_compose if args[0] == 'check-nvra': bpkg = spkg.nvra2pkg(args[1]) else: bpkg = spkg.nvr2pkg(args[1]) print("Pkg:", bpkg) if ml_pkgdeny.nvr(bpkg.name, bpkg.version, bpkg.release): print("Denied!") def _out_pkg(prefix, pkg, bpkgs, signed=False): prefix = "%8s" % prefix tpkgs = [] for bpkg in sorted(bpkgs): if bpkg.name != pkg.name: continue tpkgs.append(bpkg) if signed: tpkgs = sorted(koji_pkgs2archsigs(kapi, tpkgs)) for bpkg in tpkgs: suffix = '' if hasattr(bpkg, 'stream') and bpkg.stream: suffix += '(stream)' if hasattr(bpkg, '_koji_build_id'): suffix += '(bid:%d)' % bpkg._koji_build_id if hasattr(bpkg, 'signed'): if bpkg.signed: suffix += '(sig:%s)' % bpkg.signed else: suffix += '(unsigned)' if spkg._is_branch_el8(bpkg): suffix += '(branch)' if spkg._is_module(bpkg): suffix += '(module)' if spkg._is_rebuild(bpkg): suffix += '(rebuild)' if ml_gitdeny.nvr(bpkg.name, bpkg.version, bpkg.release): suffix += '(git deny)' if False: pass elif bpkg.verGT(pkg): print(prefix, "Newer:", bpkg, suffix) elif bpkg.verEQ(pkg): print(prefix, " EQ:", bpkg, suffix) elif bpkg.verLT(pkg): print(prefix, "Older:", bpkg, suffix) else: print(prefix, "!!:", bpkg, suffix) bpkgs = koji_tag2pkgs(kapi, tag) bpkgs = koji_pkgs2archsigs(kapi, bpkgs) _out_pkg("Tag:", bpkg, bpkgs, signed=True) if comp is not None: cpkgs = composed_url2pkgs(comp) _out_pkg("Compose:", bpkg, cpkgs) tcoroot = tempfile.TemporaryDirectory(prefix="sync2build-chk-", dir="/tmp") corootdir = tcoroot.name + '/' codir = corootdir + bpkg.name tags = bpkg2git_tags(bpkg, codir) if os.path.exists(codir + '/README.debrand'): # Doesn't work print(" ** Debranding **") tpkgs = _tags2pkgs(tags) _out_pkg("GIT:", bpkg, tpkgs) elif args[0] in ('build-nvr', 'build-nvra'): if args[0] == 'build-nvra': pkg = spkg.nvra2pkg(args[1]) else: pkg = spkg.nvr2pkg(args[1]) print("Pkg:", pkg) if not check_denylist_builds([pkg]): print("Pkg in denylist:", pkg) sys.exit(1) # Allow force? tcoroot = tempfile.TemporaryDirectory(prefix="sync2build-chk-", dir="/tmp") corootdir = tcoroot.name + '/' codir = corootdir + pkg.name tags = bpkg2git_tags(pkg, codir) tpkgs = _tags2pkgs(tags) found = False for tpkg in sorted(tpkgs): if tpkg.name != pkg.name: continue suffix = '' if hasattr(tpkg, 'stream') and tpkg.stream: suffix = '(stream)' if tpkg.verGT(pkg): print("Newer version in GIT, building that!", pkg, tpkg, suffix) pkg = tpkg found = True # Allow building older packages?? elif tpkg.verEQ(pkg): pkg = tpkg found = True print("Found version in GIT:", tpkg, suffix) if not found: print("Didn't find (so can't build):", tpkg, suffix) else: bts = build_packages(kapi, [pkg], options.packages_tag) bts, dbts = bpids_wait_packages(kapi, bts, options.wait) bpids_print(bts) for bt in dbts: # Keep everything around for logs bts.append(bt) bpids_save(bts) sys.exit(0) elif args[0] in ('build-n', 'build-name'): pkgid = kapi.getPackageID(args[1]) pkgs = koji_pkgid2pkgs(kapi, pkgid) for pkg in sorted(pkgs): print(pkg, pkg._koji_task_state, pkg._koji_build_id) sys.exit(0) elif args[0] in ('bpids-list', 'bipds'): tids = bpids_load(kapi) bpids_print(tids) sys.exit(0) elif args[0] in ('bpids-wait',): bts = bpids_load(kapi) bts, dbts = bpids_wait_packages(kapi, bts, options.wait) bpids_print(bts) bpids_save(bts) sys.exit(0) elif args[0] in ('tag-hash', 'tag-rpms-hash', 'tag-srpms-hash'): tag = options.packages_tag csum = args[1] if args[0] == 'tag-srpms-hash': data = 's' + koji_tag2srpms_checksum(kapi, tag, csum) else: data = koji_tag2checksum(kapi, tag, csum) print(data) sys.exit(0) elif args[0] in ('packages', 'pkgs'): if not options.download_only: print(" ** Warning: This will build pkgs/mods in koji.") tag = options.packages_tag comp = options.packages_compose bts = sync_packages(tag, comp, kapi) bts, dbts = bpids_wait_packages(kapi, bts, options.wait) bpids_print(bts) for bt in dbts: if bt.state == 'CLOSED': # Drop successes continue # Keep failures around for a bit... if bt.since > conf_ttl_failed_builds: continue bts.append(bt) bpids_save(bts) if not sys.stdout.isatty(): print(" -- Done --")