def download_rpms_from_scratch_tasks(output_directory, dist): """ Download all RPMS from the scratch tasks for a given distribution :param output_directory: The root directory for the distribution files to be saved in :type output_directory: str :param dist: The distribution to get the tasks from :type dist: str """ # Get the tasks for the distribution parent_tasks = DISTRIBUTION_INFO[dist].get('scratch_tasks', []) for parent_task_id in parent_tasks: descendants = mysession.getTaskDescendents(int(parent_task_id)) for task_id in descendants: # Get the task arch taskinfo = mysession.getTaskInfo(int(task_id)) arch = taskinfo.get('arch') target_arch_dir = os.path.join(output_directory, arch) # Remove any rpm in the output directory that matches an RPM built by the scratch tasks scratch_rpm_files = [f for f in mysession.listTaskOutput(int(task_id)) if f.endswith('.rpm')] old_rpm_files = [f for f in os.listdir(target_arch_dir) if f.endswith('.rpm')] for old_rpm in old_rpm_files: for scratch_rpm in scratch_rpm_files: if splitFilename(scratch_rpm)[0] == splitFilename(old_rpm)[0]: os.remove(os.path.join(target_arch_dir, old_rpm)) print 'The scratch build is replacing %s with %s' % (old_rpm, scratch_rpm) print 'Downloading %s to %s' % (task_id, target_arch_dir) download_rpms_from_task_to_dir(task_id, target_arch_dir)
def main(self, args): src_name, dst_name = "src: " + args.src_tag, "dst: " + args.dst_tag src, src_pkgs = zuul_koji_lib.list_tag(args.src_tag, self.log) dst, dst_pkgs = zuul_koji_lib.list_tag(args.dst_tag, self.log) if args.rst: new_pkgs = [] updated_pkgs = [] deleted_pkgs = [] for nvr in dst: pkg_name = splitFilename(nvr)[0] if pkg_name not in src_pkgs: new_pkgs.append(nvr) elif nvr not in src: updated_pkgs.append(nvr) for nvr in src: pkg_name = splitFilename(nvr)[0] if pkg_name not in dst_pkgs: deleted_pkgs.append(nvr) print( zuul_koji_lib.format_rst_header( "Updated Packages different in " + dst_name + " (package name is in src)")) for pkg in sorted(updated_pkgs): print("- %s" % pkg) print("") print(zuul_koji_lib.format_rst_header("Not in " + src_name)) for pkg in sorted(new_pkgs): print("- %s" % pkg) print("") print(zuul_koji_lib.format_rst_header("Not in " + dst_name)) for pkg in sorted(deleted_pkgs): print("- %s" % pkg) else: for diff in difflib.ndiff(src, dst): print(diff)
def run(self): #for key, value in self.actions.iteritems(): for action in self.actions: answer = self.ui.promptConfirm(action[0]) if answer: while True: res = self.ui.prompt(action[1]) if os.path.exists(res): if action[2] == "self.rpm_build_dir": self.rpm_build_dir = res elif action[2] == "self.all_pkg_file": self.all_pkg_file = res break else: print "!! %s does not exist, please input again !!" %(res) os.system("clear") if os.path.exists(self.all_pkg_file) and \ os.path.exists(self.rpm_build_dir): print "### Starting Verifying ###" allPkgInISOOLD = list() not_match_list = list() f = open(self.all_pkg_file) allPkgInISOOLD = [i.strip('\n') for i in f.readlines()] f.close() for line in allPkgInISOOLD: if not os.path.exists(os.path.join(self.rpm_build_dir, line.strip())): not_match_list.append(line.strip()) if len(not_match_list) > 0: print "### FAIL: Following RPMs Do Not Exist In %s: ###" %(self.rpm_build_dir) for i in not_match_list: print "### %s ###" %(i) answer = self.ui.promptConfirm(">>> Would you like to re-generate %s ?"\ %(self.all_pkg_file)) if answer: allPkgInISONew = list() for j in os.listdir(self.rpm_build_dir): (n, v, r, e, a) = splitFilename(j) for k in not_match_list: (name, version, release, e, arch) = splitFilename(k) if (a == arch and n == name and ((v > version) or (v == version and r > release))): allPkgInISONew.append(j) break else: if j in allPkgInISOOLD: allPkgInISONew.append(j) f = open(os.path.join(os.path.dirname(self.all_pkg_file), "all.pkg.ppc64le.new"), "w+") for line in allPkgInISONew: f.write(line+'\n') f.close() print "### New all.pkg.ppc64le Has Been Generated (%s) ###" \ %(os.path.join(os.path.dirname(self.all_pkg_file), "all.pkg.ppc64le.new")) else: print "### PASS: Each Pkg in %s Do Exist In %s ###" %(self.all_pkg_file, self.rpm_build_dir) else: print "!! Error: file does not exist !!"
def run(self): """docstring for run""" print "### Starting Verifying ###" for files in os.listdir(self.rpm_build_dir): (n, v, r, e, a) = splitFilename(files) if n not in self.all_rpm_info.keys(): self.all_rpm_info[n] = files else: (name, version, release, e, arch) = splitFilename(self.all_rpm_info[n]) if (a == arch and ((v > version) or (v == version and r > release))): self.dup_rpm_list.append(self.all_rpm_info[n]) self.all_rpm_info[n] = files else: self.dup_rpm_list.append(files) if len(self.dup_rpm_list) > 0: print "### FAIL: Find Duplicated RPMs(see file (%s)) ..." % ( self.dup_rpm_list) f = open(self.rpm_dup_file, "w+") for line in self.dup_rpm_list: f.write(line + '\n') f.close() while True: res = self.ui.prompt( ">>> Please specify the locaiton of RPM remove target dir:" ) if os.path.exists(res): self.backup_des_dir = res break else: print "!! %s does not exist, please input again !!" % ( res) print "### Moving Duplicated RPMs ###" for rpms in self.dup_rpm_list: shutil.move(os.path.join(self.rpm_build_dir, rpms), self.backup_des_dir) print "### Moving Complete ###" else: print "### PASS: Do Not Find Any Duplicated RPMs ###" self.get_all_build_rpm() for files in os.listdir(self.backup_des_dir): (n, v, r, e, a) = splitFilename(files) if n not in self.all_build_rpm: print "### WARN: %s Do Not Exist In %s" % (n, self.rpm_build_dir)
def bpkgs(self): _bpkgs = [] for line in self.urls.splitlines(): rpm_name = os.path.basename(line.strip()) name, version, release, epoch, arch = splitFilename(rpm_name) _bpkgs.append(name) return _bpkgs
def check(basedir): print("------------------------------") print("Recursively check for old rpms") print("------------------------------") duplicates = [] dirlist = [] for root, dirs, files in os.walk(basedir): for d in dirs: dirlist.append(os.path.join(root, d)) for wdir in dirlist: repo = collections.defaultdict(dict) for filename in os.listdir(wdir): if filename.endswith(".rpm"): (n, v, r, e, a) = splitFilename(filename) if not e: e = 'e' if a in repo and e in repo[a] and n in repo[a][e]: sv = repo[a][e][n][0] if LooseVersion(sv) > LooseVersion(v): d = os.path.join(wdir, filename) duplicates.append(d) else: d = os.path.join(wdir, repo[a][e][n][1]) duplicates.append(d) repo[a][e][n] = (v, filename) else: if e not in repo[a]: repo[a][e] = {} repo[a][e][n] = (v, filename) duplicates.sort() for d in duplicates: print d
def postresolve_hook(conduit): _setup_changelog_from_cmdline(conduit) if not (changelog or updateinfo) or conduit.resultcode == 1: return # Find currently installed versions of packages we're about to update ts = conduit.getTsInfo() rpmdb = conduit.getRpmDB() if updateinfo: repos = set() for tsmem in ts.getMembers(): if tsmem.po.repoid == 'installed': continue repos.add(tsmem.po.repo) mdi = UpdateMetadata(repos=list(repos)) for tsmem in ts.getMembers(): for po in rpmdb.searchNevra(name=tsmem.po.name, arch=tsmem.po.arch): times = po['changelogtime'] try: n,v,r,e,a = splitFilename(po.sourcerpm) except TypeError: n = po.name if len(times) == 0: # deal with packages without changelog origpkgs[n] = 0 else: origpkgs[n] = times[0] if updateinfo: for (pkgtup, notice) in mdi.get_applicable_notices(po.pkgtup): orignots.add(notice) if conduit.confString('main', 'when', default='post') == 'pre': show_changes(conduit, 'Changes in packages about to be updated:')
def postresolve_hook(conduit): _setup_changelog_from_cmdline(conduit) if not (changelog or updateinfo) or conduit.resultcode == 1: return # Find currently installed versions of packages we're about to update ts = conduit.getTsInfo() rpmdb = conduit.getRpmDB() if updateinfo: repos = set() for tsmem in ts.getMembers(): if tsmem.po.repoid == 'installed': continue repos.add(tsmem.po.repo) mdi = UpdateMetadata(repos=list(repos)) for tsmem in ts.getMembers(): for po in rpmdb.searchNevra(name=tsmem.po.name, arch=tsmem.po.arch): times = po['changelogtime'] try: n, v, r, e, a = splitFilename(po.sourcerpm) except TypeError: n = po.name if len(times) == 0: # deal with packages without changelog origpkgs[n] = 0 else: origpkgs[n] = times[0] if updateinfo: for (pkgtup, notice) in mdi.get_applicable_notices(po.pkgtup): orignots.add(notice) if conduit.confString('main', 'when', default='post') == 'pre': show_changes(conduit, 'Changes in packages about to be updated:')
def get_rpmvalues(self): # could possibly use rpm -qa --last rpm_dict = {} rpm_qa_command = [self.config['rpm_binary_location'], "-qa"] try: rpm_list = subprocess.Popen( rpm_qa_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except ValueError as err: self.log.error("Invalid arguments provided to rpm query all") except OSError as err: self.log.error( "OSError while perfoming rpm query all, may have invalid arguments or locations") stderr = rpm_list.stderr.readlines() if len(stderr) == 0: pass else: self.log.error("stderr reported from rpm query all") for rpm in rpm_list.stdout: rpm_stripped = rpm.rstrip() (name, version, release, epoch,arch) = rpm_utils.splitFilename(rpm_stripped) if (name and version and release): rpm_dict[name] = "{0}-{1}".format( version, release) else: pass return rpm_dict
def __cleanup_arch(self, repos): query = 'SELECT * FROM packages WHERE repo_id IN ({0}) ' \ ' ORDER BY sourcerpm'.\ format(', '.join([str(repo_id) for repo_id in repos.keys()])) last_srpm_name = None versions = {} for pkg_row in self.__db.execute(query): try: srpm_name, srpm_version, srpm_release, _, _ = \ splitFilename(pkg_row['sourcerpm']) except Exception as e: # TODO: usually a error here means that we're processing an # src-RPM package which shouldn't be present in a binary # repository. It makes sense to delete it automatically. repo = repos[pkg_row['repo_id']] pkg_path = os.path.join(repo['path'], pkg_row['location']) print >> sys.stderr, u'skipping invalid {0} package'.\ format(pkg_path) continue if last_srpm_name != srpm_name: if len(versions) > 1: self.__cleanup_previous_versions(last_srpm_name, versions, repos) last_srpm_name = srpm_name versions = {} version_key = (srpm_version, srpm_release) pkg = {k: pkg_row[k] for k in pkg_row.keys()} if version_key not in versions: versions[version_key] = [pkg] else: versions[version_key].append(pkg)
def create_test_packages(self, test_pkgs): test_pkgs_objs = [] for pkg in test_pkgs: pkg_name, pkg_version, pkg_release, pkg_epoch, pkg_arch = rpmutils.splitFilename(pkg) pkg_vra = pkg_version + "-" + pkg_release + "." + pkg_arch pkg_tup = (pkg_name , pkg_arch, pkg_epoch, pkg_version, pkg_release) test_pkgs_objs.append(TestPackage(pkg_version, pkg_release, pkg_vra, pkg_tup)) return test_pkgs_objs
def parsePackage(filename): (n, v, r, e, a) = splitFilename(filename) return { 'name':n, 'version':v, 'release':r, 'arch':a }
def ParseRpmList( rpmList ): rpmPkgs = {} for rpmLine in rpmList: rpm = splitFilename( rpmLine ) rpmPkgs[ rpm[0] ] = { 'version': rpm[1], 'release': rpm[2], 'epoch': rpm[3], 'arch': rpm[4] } return rpmPkgs
def run(self): """docstring for run""" print "### Starting Verifying ###" for files in os.listdir(self.rpm_build_dir): (n, v, r, e, a) = splitFilename(files) if n not in self.all_rpm_info.keys(): self.all_rpm_info[n] = files else: (name, version, release, e, arch) = splitFilename(self.all_rpm_info[n]) if (a == arch and ((v > version) or (v == version and r > release))): self.dup_rpm_list.append(self.all_rpm_info[n]) self.all_rpm_info[n] = files else: self.dup_rpm_list.append(files) if len(self.dup_rpm_list) > 0: print "### FAIL: Find Duplicated RPMs(see file (%s)) ..." %(self.dup_rpm_list) f = open(self.rpm_dup_file, "w+") for line in self.dup_rpm_list: f.write(line+'\n') f.close() while True: res = self.ui.prompt(">>> Please specify the locaiton of RPM remove target dir:") if os.path.exists(res): self.backup_des_dir = res break else: print "!! %s does not exist, please input again !!" %(res) print "### Moving Duplicated RPMs ###" for rpms in self.dup_rpm_list: shutil.move(os.path.join(self.rpm_build_dir, rpms), self.backup_des_dir) print "### Moving Complete ###" else: print "### PASS: Do Not Find Any Duplicated RPMs ###" self.get_all_build_rpm() for files in os.listdir(self.backup_des_dir): (n, v, r, e, a) = splitFilename(files) if n not in self.all_build_rpm: print "### WARN: %s Do Not Exist In %s" %(n, self.rpm_build_dir)
def ParseRpmList(rpmList): rpmPkgs = {} for rpmLine in rpmList: rpm = splitFilename(rpmLine) rpmPkgs[rpm[0]] = { 'version': rpm[1], 'release': rpm[2], 'epoch': rpm[3], 'arch': rpm[4] } return rpmPkgs
def check_rpm_name(rpm_name): """ Checks whether the RPM with the given name has to be dowloaded and processed. @param rpm_name The name of RPM package """ file_name = None # If the given name is remote location, then we analyze whether we should # download it: url_parsed = urlparse.urlparse(rpm_name) if url_parsed.netloc is not None and len(url_parsed.netloc) > 0: logging.debug("Name {0} is detected as a URL " "location {1}.".format(rpm_name, url_parsed)) # All non-RPM files should be downloaded: if not rpm_name.endswith(".rpm"): return True # Not all RPMs should be downloaded: else: file_name = rpm_name.split('/')[-1].split('#')[0].split('?')[0] # The only other case when we process the file is the existing file in the # filesystem: elif os.path.isfile(rpm_name): file_name = os.path.basename(rpm_name) # In case if we are given RPM name from yum, we should make full name from # it before parsing: else: if not rpm_name.endswith(".rpm"): file_name = rpm_name + ".rpm" if os.path.basename(file_name) != file_name: file_name = os.path.basename(file_name) logging.debug("Processing argument {0} with RPM name " "{1} ...".format(rpm_name, file_name)) components = splitFilename(file_name) for component in components: if component is None: logging.error("Failed to parse argument {0} with RPM name " "{1}, result is {2}!".format(rpm_name, file_name, components)) return False (name, version, release, epoch, architecture) = components if architecture not in [target_arhcitecture, "noarch"]: logging.debug("Target architecture is {0}".format(target_arhcitecture)) logging.debug("It is indended for another architecture, skipping...") return False elif "debuginfo" in name or "debugsource" in name: logging.debug("It is debug package, skipping...") return False else: logging.debug("Passed...") return True
def parse_srpm_package(self): print "### Generating the srpm package list ###" src_rpms = list() f = open(self.all_pkg_arch_nofound) pattern = re.compile("xxx (.*) xxx") for line in f.readlines(): line = pattern.findall(line)[0] arch = line.split('.')[-1] package_name = line.split('.')[0] if arch == self.arch: for files in os.listdir(self.rh_cdrom_dir): #(n, v, r, e, a) = splitFilename(os.path.join("/media/cdrom/Packages/", files)) (n, v, r, e, a) = splitFilename(files) if a == self.arch and n == package_name: command = "rpm -qpi %s 2>/dev/null" % (os.path.join( self.rh_cdrom_dir, files)) output = os.popen(command).read() for line in output.split("\n"): if line.split(" : ")[0].strip() == "Source RPM": #srpmFile = os.path.join("/mnt/SRPMS", line.split(" : ")[1].strip()) srpmFile = line.split(" : ")[1].strip() break #print srpmFile (rpmName, rpmVersion, rpmRealse, rpme, rpmArch) = splitFilename(srpmFile) #print rpmName for srpm in os.listdir(self.srpm_dir): (n, v, r, e, a) = splitFilename(srpm) if n == rpmName and srpmFile not in src_rpms: src_rpms.append(srpmFile) f.close() print src_rpms f = file(self.pkg_not_build_srpm_file, "w+") for line in src_rpms: f.write(line + '\n') f.close()
def exclude_hook(conduit): if no_exclude: return conduit.info(3, 'Reading version lock configuration') if not fileurl: raise PluginYumExit('Locklist not set') for ent in _read_locklist(): neg = False if ent and ent[0] == '!': ent = ent[1:] neg = True (n, v, r, e, a) = splitFilename(ent) n = n.lower() v = v.lower() r = r.lower() e = e.lower() if e == '': e = '0' if neg: _version_lock_excluder_B_nevr.add("%s-%s:%s-%s" % (n, e, v, r)) continue _version_lock_excluder_n.add(n) _version_lock_excluder_nevr.add("%s-%s:%s-%s" % (n, e, v, r)) if (_version_lock_excluder_n and follow_obsoletes): # If anything obsoletes something that we have versionlocked ... then # remove all traces of that too. for (pkgtup, instTup) in conduit._base.up.getObsoletesTuples(): if instTup[0] not in _version_lock_excluder_n: continue _version_lock_excluder_n.add(pkgtup[0].lower()) total = len(_get_updates(conduit._base)) if show_hint else 0 if total: if total > 1: suffix = 's' what = 'them' else: suffix = '' what = 'it' conduit.info(2, 'Excluding %d update%s due to versionlock ' '(use "yum versionlock status" to show %s)' % (total, suffix, what)) if _version_lock_excluder_n: _add_versionlock_whitelist(conduit) if _version_lock_excluder_B_nevr: _add_versionlock_blacklist(conduit)
def transaction_exists(pkglist): """ checks the package list to see if any packages are involved in an incomplete transaction """ conflicts = [] if not transaction_helpers: return conflicts # first, we create a list of the package 'nvreas' # so we can compare the pieces later more easily pkglist_nvreas = [] for pkg in pkglist: pkglist_nvreas.append(splitFilename(pkg)) # next, we build the list of packages that are # contained within an unfinished transaction unfinished_transactions = find_unfinished_transactions() for trans in unfinished_transactions: steps = find_ts_remaining(trans) for step in steps: # the action is install/erase/etc., but we only # care about the package spec contained in the step (action, step_spec) = step (n, v, r, e, a) = splitFilename(step_spec) # and see if that spec is in the list of packages # requested for installation/updating for pkg in pkglist_nvreas: # if the name and arch match, we're going to assume # this package is part of a pending transaction # the label is just for display purposes label = "%s-%s" % (n, a) if n == pkg[0] and a == pkg[4]: if label not in conflicts: conflicts.append("%s-%s" % (n, a)) break return conflicts
def parse_srpm_package(self): print "### Generating the srpm package list ###" src_rpms = list() f = open(self.all_pkg_arch_nofound) pattern = re.compile("xxx (.*) xxx") for line in f.readlines(): line = pattern.findall(line)[0] arch = line.split('.')[-1] package_name = line.split('.')[0] if arch == self.arch: for files in os.listdir(self.rh_cdrom_dir): #(n, v, r, e, a) = splitFilename(os.path.join("/media/cdrom/Packages/", files)) (n, v, r, e, a) = splitFilename(files) if a == self.arch and n == package_name: command = "rpm -qpi %s 2>/dev/null" %(os.path.join(self.rh_cdrom_dir, files)) output = os.popen(command).read() for line in output.split("\n"): if line.split(" : ")[0].strip() == "Source RPM": #srpmFile = os.path.join("/mnt/SRPMS", line.split(" : ")[1].strip()) srpmFile = line.split(" : ")[1].strip() break #print srpmFile (rpmName, rpmVersion, rpmRealse, rpme, rpmArch) = splitFilename(srpmFile) #print rpmName for srpm in os.listdir(self.srpm_dir): (n, v, r, e, a) = splitFilename(srpm) if n == rpmName and srpmFile not in src_rpms: src_rpms.append(srpmFile) f.close() print src_rpms f = file(self.pkg_not_build_srpm_file, "w+") for line in src_rpms: f.write(line+'\n') f.close()
def run(self): """docstring for run""" for action in self.actions: answer = self.ui.promptConfirm(action[0]) if answer: while True: res = self.ui.prompt(action[1]) if os.path.exists(res): if action[2] == "self.rpm_build_dir": self.rpm_build_dir = res elif action[2] == "self.rh_cdrom_dir": self.rh_cdrom_dir = res break else: print "!! %s does not exist, please input again !!" %(res) os.system("clear") if os.path.exists(self.rh_cdrom_dir) and \ os.path.exists(self.rpm_build_dir): print "### Starting Verifying ###" isoft_build_rpm_list = list() not_build = False for files in os.listdir(self.rpm_build_dir): (n, v, r, e, a) = splitFilename(files) if a == self.arch and n not in isoft_build_rpm_list: isoft_build_rpm_list.append(n) for files in os.listdir(self.rh_cdrom_dir): (n, v, r, e, a) = splitFilename(files) if a == self.arch and n not in isoft_build_rpm_list: not_build = True print "### FAIL: %s Has Not Been Built ###" %(files) if not not_build: print "### PASS: All Arch Related RPMs Have Been Built ###" else: print "!! Error: file does not exist !!"
def parse_package_name(pkg): if pkg.count(".") >= 3 and pkg.count("-") >= 2: return splitFilename(pkg)[0] # doesn"t seem like valid pkg string, try to guess package name result = "" pkg = pkg.replace(".rpm", "").replace(".src", "") for delim in ["-", "."]: if delim in pkg: parts = pkg.split(delim) for part in parts: if any(map(lambda x: x.isdigit(), part)): return result[:-1] result += part + "-" return result[:-1] return pkg
def install_from_scm(self, force=False): packages = self.build_from_scm() # Strip out any .src.rpm files non_src_pkgs = [p for p in packages if splitFilename(p)[4] != 'src'] logging.info("Installing SCM-built packages for '%s'" % self.name) for pkg in non_src_pkgs: logging.info("... %s" % pkg) # Install packages if force: rpm_install(non_src_pkgs) else: yum_install(non_src_pkgs)
def get_packages(): packages = [] package_location_file = "/tmp/packagesinfp" os.system("rpm -qa > " + package_location_file) content = Utils.get_content(package_location_file) for line in content: package_info = splitFilename(line) current_package = Package(package_info[0], package_info[1], package_info[2]) print("Adding package " + str(current_package)) packages.append(current_package) os.system("rm -f " + package_location_file) return packages
def _match(ent, patterns): # there should be an API for this in Yum (n, v, r, e, a) = splitFilename(ent) for name in ( '%s' % n, '%s.%s' % (n, a), '%s-%s' % (n, v), '%s-%s-%s' % (n, v, r), '%s-%s-%s.%s' % (n, v, r, a), '%s:%s-%s-%s.%s' % (e, n, v, r, a), '%s-%s:%s-%s.%s' % (n, e, v, r, a), ): for pat in patterns: if fnmatch.fnmatch(name, pat): return True return False
def get_srpm_for_package(source_query, pkg): # Get just the base name of the SRPM try: (sourcename, _, _, _, _) = splitFilename(pkg.sourcerpm) except Exception: print("Failure: %s(%s)" % (pkg.sourcerpm, pkg.name)) raise matched = source_query.filter(name=sourcename, latest=True, arch='src') if len(matched) > 1: raise TooManyPackagesException(pkg.name) if len(matched) == 1: # Exactly one package matched return matched[0] raise NoSuchPackageException(pkg.name)
def discover_nvr_from_koji(self, packages, tag): self.log.info("===== Discovering package from koji tag %s", tag) tag_content = zuul_koji_lib.get_tag_content(tag, self.log) (_, koji_tag_pkgs) = zuul_koji_lib.list_tag_content(tag_content) koji_tag_pkgs_set = set(koji_tag_pkgs.keys()) distro_pkgs_set = set() # Remove 9999 package rpms = map(lambda x: (x, splitFilename(x)), filter(lambda x: "9999" not in x, tag_content)) for package in packages: name = os.path.basename(package["name"].replace( "rpms/python-", "rpms/python3-")) if package.get("scl"): name = "%s-%s" % (package["scl"], name) pkgs = [rpm for rpm in rpms if rpm[1][0] == name] if len(pkgs) > 1: pkg = most_recent(pkgs) self.log.info("Picked %s out of %s" % (pkg, pkgs)) elif len(pkgs) == 1: pkg = pkgs[0] else: package["valid_nvr"] = False self.log.warning("Package %s doesn't exists in %s" % (name, tag)) continue distro_pkgs_set.add(name) package["nvr"] = pkg[0] v = pkg[1][1] def invalid_nvr(): return (name != "python3-devnest" # It's ok, this one is on me and (v.startswith("0.0.0.0") or re.search("[a-zA-Z]", v))) if invalid_nvr(): self.log.warning("Package %s doesn't look like to be " "tagged: %s" % (name, package["nvr"])) package["valid_nvr"] = False else: package["valid_nvr"] = True zuul_koji_lib.diff_ensure(koji_tag_pkgs_set, distro_pkgs_set, "Package in koji not in distro file", self.log) zuul_koji_lib.diff_ensure(distro_pkgs_set, koji_tag_pkgs_set, "Package in distro not in koji", self.log)
def list_tag_content(tag_content, log=None): # Remove 9999 package rpms = map(lambda x: (x, splitFilename(x)), filter(lambda x: "9999" not in x, tag_content)) pkgs = set() pkgs_list = {} for _, inf in rpms: pkgs.add(inf[0]) pkgs_list[inf[0]] = inf nvrs = [] for name in sorted(list(pkgs)): pkgs = [rpm for rpm in rpms if rpm[1][0] == name] if len(pkgs) > 1: pkg = most_recent(pkgs)[0] if log: log.info("Picked %s out of %s" % (pkg, pkgs)) elif len(pkgs) == 1: pkg = pkgs[0][0] nvrs.append(pkg) return nvrs, pkgs_list
def get_packages(): """ Extract packages. :return: list """ packages = [] package_location_file = "/tmp/packagesinfp" os.system("rpm -qa > " + package_location_file) content = Utils.get_content(package_location_file) for line in content: package_info = splitFilename(line) current_package = Package(package_info[0], package_info[1], package_info[2]) print("Adding package " + str(current_package)) packages.append(current_package) os.system("rm -f " + package_location_file) return packages
def spread_packages (sourcedir, destdir): """ :param sourcedir: The directory on which all the downloaded rpms are stored. :param destdir : The directory in which the packages will be kept based on fedora and epel version :return: """ destdir=os.path.abspath(destdir) number = 0 orig_path=sourcedir+'/'+'*.rpm' pack_list = glob.glob(orig_path) for pck in pack_list: number +=1 (n, v, r, e, a) = splitFilename(pck) distribution = r.split('.')[-1] # validating whether its an fedora/epel rpm. if "fc" in distribution: print "\t Fedora rpm --> %s %s %s %s %s" %(n, v, r, e, a) if 'el' in distribution: print "\t EPEL package --> %s %s %s %s %s" %(n, v, r, e, a) try: if not os.path.exists(destdir+'/'+distribution+'/'+a): os.makedirs(destdir+'/'+distribution+'/'+a) source_file = pck if "gluster" in pck: destination_file = destdir+'/'+distribution+'/'+a os.system("cp"+" " +source_file+" "+destination_file) else: print "Not a Gluster RPM :%s" %(pck) except: raise post_spread(destdir) print " ******* LISTING PACKAGES FOR U!! ********" tree_it(destdir) return True
def handle_wi(self, wid): """Handle workitem.""" if not wid.fields.versions: return versions = wid.fields.versions.as_dict() for pkg, d in versions.items(): target = "%s/%s" % (d["repository"], d["arch"]) for binary in self.obs.getBinaryList(d["project"], target, pkg): n, v, r, e, a = splitFilename(binary) if n != d.get("binary", pkg): continue bininfo = self.obs.getBinaryInfo(d["project"], target, pkg, binary) versions[pkg].update({"version": v, "release": r.split("."), "epoch": e, "arch": a, "summary": bininfo["summary"], "description": bininfo["description"]}) wid.fields.versions = versions
def latest_release_of_package(self, directory, package, version): """ Return latest release of the given package """ self.logger.debug("Looking for latest %s - %s under %s", package, version, directory) latest_found_release = 0 if os.path.isdir(directory): for occurence in os.listdir(directory): filefullpath = os.path.join(directory, occurence) if os.path.isfile(filefullpath) \ and filefullpath.endswith(".rpm") \ and not filefullpath.endswith(".src.rpm"): (rpmname, rpmversion, rpmrelease, _, _) = splitFilename(occurence) if rpmname == package and rpmversion == version: self.logger.debug("Found rpm " + filefullpath) if latest_found_release < rpmrelease: self.logger.debug( "Found rpm to match and to be the latest") latest_found_release = rpmrelease if latest_found_release == 0: self.logger.debug("Did not find any previous releases of %s", package) return str(latest_found_release)
def exclude_hook(conduit): conduit.info(3, 'Reading version lock configuration') if not fileurl: raise PluginYumExit('Locklist not set') for ent in _read_locklist(): neg = False if ent and ent[0] == '!': ent = ent[1:] neg = True (n, v, r, e, a) = splitFilename(ent) n = n.lower() v = v.lower() r = r.lower() e = e.lower() if e == '': e = '0' if neg: _version_lock_excluder_B_nevr.add("%s-%s:%s-%s" % (n, e, v, r)) continue _version_lock_excluder_n.add(n) _version_lock_excluder_nevr.add("%s-%s:%s-%s" % (n, e, v, r)) if (_version_lock_excluder_n and conduit.confBool('main', 'follow_obsoletes', default=False)): # If anything obsoletes something that we have versionlocked ... then # remove all traces of that too. for (pkgtup, instTup) in conduit._base.up.getObsoletesTuples(): if instTup[0] not in _version_lock_excluder_n: continue _version_lock_excluder_n.add(pkgtup[0].lower()) if _version_lock_excluder_n: _add_versionlock_whitelist(conduit) if _version_lock_excluder_B_nevr: _add_versionlock_blacklist(conduit)
def spread_packages(sourcedir, destdir): """ :param sourcedir: The directory on which all the downloaded rpms are stored. :param destdir : The directory in which the packages will be kept based on fedora and epel version :return: """ destdir = os.path.abspath(destdir) number = 0 orig_path = sourcedir + '/' + '*.rpm' pack_list = glob.glob(orig_path) for pck in pack_list: number += 1 (n, v, r, e, a) = splitFilename(pck) distribution = r.split('.')[-1] # validating whether its an fedora/epel rpm. if "fc" in distribution: print "\t Fedora rpm --> %s %s %s %s %s" % (n, v, r, e, a) if 'el' in distribution: print "\t EPEL package --> %s %s %s %s %s" % (n, v, r, e, a) try: if not os.path.exists(destdir + '/' + distribution + '/' + a): os.makedirs(destdir + '/' + distribution + '/' + a) source_file = pck if "gluster" in pck: destination_file = destdir + '/' + distribution + '/' + a os.system("cp" + " " + source_file + " " + destination_file) else: print "Not a Gluster RPM :%s" % (pck) except: raise post_spread(destdir) print " ******* LISTING PACKAGES FOR U!! ********" tree_it(destdir) return True
def str2NVR(s): '''Convenience method to convert an rpm filename to just NVR''' (n,v,r,e,a) = splitFilename(os.path.basename(s)) return '%s-%s-%s' % (n,v,r)
def update_repodata(bucketName, key, operation): if key.rfind("/") > -1: fileName = key[key.rfind("/")+1:] repoPath = key[:key.rfind("/")] else: fileName = key repoPath = "" (name, version, release, epoch, arch) = splitFilename(fileName) logger.debug("fileName={0}".format(fileName)) logger.debug("repoPath={0}".format(repoPath)) tmpdir = tempfile.mkdtemp() s3base = urlparse.urlunsplit(("s3", bucketName, repoPath, "", "")) s3grabber = S3Grabber(s3base) # Set up temporary repo that will fetch repodata from s3 yumbase = yum.YumBase() yumbase.preconf.disabled_plugins = '*' yumbase.conf.cachedir = os.path.join(tmpdir, 'cache') yumbase.repos.disableRepo('*') repo = yumbase.add_enable_repo('s3') repo._grab = s3grabber repo._urls = [os.path.join(s3base, '')] # Ensure that missing base path doesn't cause trouble repo._sack = yum.sqlitesack.YumSqlitePackageSack( createrepo.readMetadata.CreaterepoPkgOld) # Create metadata generator mdconf = createrepo.MetaDataConfig() mdconf.directory = tmpdir mdconf.pkglist = yum.packageSack.MetaSack() mdgen = createrepo.MetaDataGenerator(mdconf, LoggerCallback()) mdgen.tempdir = tmpdir mdgen._grabber = s3grabber new_packages = yum.packageSack.PackageSack() if operation == "add": # Combine existing package sack with new rpm file list newpkg = mdgen.read_in_package(os.path.join(s3base, fileName)) newpkg._baseurl = '' # don't leave s3 base urls in primary metadata new_packages.addPackage(newpkg) else: # Remove deleted package logger.debug("Delete package {0}".format(key)) older_pkgs = yumbase.pkgSack.searchNevra(name=name) for i, older in enumerate(older_pkgs, 1): if older.version == version and older.release == release: yumbase.pkgSack.delPackage(older) mdconf.pkglist.addSack('existing', yumbase.pkgSack) mdconf.pkglist.addSack('new', new_packages) # Write out new metadata to tmpdir mdgen.doPkgMetadata() mdgen.doRepoMetadata() mdgen.doFinalMove() # Replace metadata on s3 s3grabber.syncdir(os.path.join(tmpdir, 'repodata'), 'repodata') shutil.rmtree(tmpdir)
def update_repodata(bucketName, key, operation): logger.debug("key={0}".format(key)) if key.rfind("/") > -1: fileName = key[key.rfind("/") + 1:] (repoPath, relativeFileName) = extract_repo_file(key) packagePath = relativeFileName[:relativeFileName.rfind("/")] else: fileName = key relativeFileName = fileName repoPath = "" packagePath = '' (name, version, release, epoch, arch) = splitFilename(fileName) logger.debug("fileName={0}".format(fileName)) logger.debug("relativeFileName={0}".format(relativeFileName)) logger.debug("packagePath={0}".format(packagePath)) logger.debug("repoPath={0}".format(repoPath)) tmpdir = tempfile.mkdtemp() os.makedirs(os.path.join(tmpdir, packagePath)) s3base = urlparse.urlunsplit(("s3", bucketName, repoPath, "", "")) overridekey = os.path.join(tmpdir, fileName) overrideval = os.path.join(tmpdir, relativeFileName) s3grabber = S3Grabber(s3base, {overridekey: overrideval}) # Set up temporary repo that will fetch repodata from s3 yumbase = yum.YumBase() yumbase.preconf.disabled_plugins = '*' yumbase.conf.cachedir = os.path.join(tmpdir, 'cache') yumbase.repos.disableRepo('*') repo = yumbase.add_enable_repo('s3') repo._grab = s3grabber repo._urls = [os.path.join(s3base, '')] # Ensure that missing base path doesn't cause trouble repo._sack = yum.sqlitesack.YumSqlitePackageSack( createrepo.readMetadata.CreaterepoPkgOld) # Create metadata generator mdconf = createrepo.MetaDataConfig() mdconf.directory = tmpdir mdconf.pkglist = yum.packageSack.MetaSack() mdgen = createrepo.MetaDataGenerator(mdconf, LoggerCallback()) mdgen.tempdir = tmpdir mdgen._grabber = s3grabber new_packages = yum.packageSack.PackageSack() if operation == "add": # Combine existing package sack with new rpm file list newpkg = mdgen.read_in_package(os.path.join(s3base, relativeFileName)) newpkg._baseurl = '' # don't leave s3 base urls in primary metadata new_packages.addPackage(newpkg) else: # Remove deleted package logger.debug("Delete package {0}".format(key)) older_pkgs = yumbase.pkgSack.searchNevra(name=name) for i, older in enumerate(older_pkgs, 1): if older.version == version and older.release == release: yumbase.pkgSack.delPackage(older) mdconf.pkglist.addSack('existing', yumbase.pkgSack) mdconf.pkglist.addSack('new', new_packages) # Write out new metadata to tmpdir mdgen.doPkgMetadata() mdgen.doRepoMetadata() mdgen.doFinalMove() # Replace metadata on s3 s3grabber.syncdir(os.path.join(tmpdir, 'repodata'), 'repodata') shutil.rmtree(tmpdir)
def get_all_build_rpm(self): """docstring for get_all_rpm_info""" self.all_build_rpm = list() for files in os.listdir(self.rpm_build_dir): self.all_build_rpm.append(splitFilename(files)[0])
def install(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos, installroot='/', allow_downgrade=False): pkgs = [] downgrade_pkgs = [] res = {} res['results'] = [] res['msg'] = '' res['rc'] = 0 res['changed'] = False for spec in items: pkg = None downgrade_candidate = False # check if pkgspec is installed (if possible for idempotence) # localpkg if spec.endswith('.rpm') and '://' not in spec: # get the pkg name-v-r.arch if not os.path.exists(spec): res['msg'] += "No RPM file matching '%s' found on system" % spec res['results'].append("No RPM file matching '%s' found on system" % spec) res['rc'] = 127 # Ensure the task fails in with-loop module.fail_json(**res) envra = local_envra(spec) # look for them in the rpmdb if is_installed(module, repoq, envra, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot): # if they are there, skip it continue pkg = spec # URL elif '://' in spec: # download package so that we can check if it's already installed package = fetch_rpm_from_url(spec, module=module) envra = local_envra(package) if is_installed(module, repoq, envra, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot): # if it's there, skip it continue pkg = package # groups elif spec.startswith('@'): if is_group_installed(spec): continue pkg = spec # range requires or file-requires or pkgname :( else: # most common case is the pkg is already installed and done # short circuit all the bs - and search for it as a pkg in is_installed # if you find it then we're done if not set(['*', '?']).intersection(set(spec)): installed_pkgs = is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos, is_pkg=True, installroot=installroot) if installed_pkgs: res['results'].append('%s providing %s is already installed' % (installed_pkgs[0], spec)) continue # look up what pkgs provide this pkglist = what_provides(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot) if not pkglist: res['msg'] += "No package matching '%s' found available, installed or updated" % spec res['results'].append("No package matching '%s' found available, installed or updated" % spec) res['rc'] = 126 # Ensure the task fails in with-loop module.fail_json(**res) # if any of the packages are involved in a transaction, fail now # so that we don't hang on the yum operation later conflicts = transaction_exists(pkglist) if conflicts: res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts) res['rc'] = 125 # Ensure the task fails in with-loop module.fail_json(**res) # if any of them are installed # then nothing to do found = False for this in pkglist: if is_installed(module, repoq, this, conf_file, en_repos=en_repos, dis_repos=dis_repos, is_pkg=True, installroot=installroot): found = True res['results'].append('%s providing %s is already installed' % (this, spec)) break # if the version of the pkg you have installed is not in ANY repo, but there are # other versions in the repos (both higher and lower) then the previous checks won't work. # so we check one more time. This really only works for pkgname - not for file provides or virt provides # but virt provides should be all caught in what_provides on its own. # highly irritating if not found: if is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot): found = True res['results'].append('package providing %s is already installed' % (spec)) if found: continue # Downgrade - The yum install command will only install or upgrade to a spec version, it will # not install an older version of an RPM even if specified by the install spec. So we need to # determine if this is a downgrade, and then use the yum downgrade command to install the RPM. if allow_downgrade: for package in pkglist: # Get the NEVRA of the requested package using pkglist instead of spec because pkglist # contains consistently-formatted package names returned by yum, rather than user input # that is often not parsed correctly by splitFilename(). (name, ver, rel, epoch, arch) = splitFilename(package) # Check if any version of the requested package is installed inst_pkgs = is_installed(module, repoq, name, conf_file, en_repos=en_repos, dis_repos=dis_repos, is_pkg=True) if inst_pkgs: (cur_name, cur_ver, cur_rel, cur_epoch, cur_arch) = splitFilename(inst_pkgs[0]) compare = compareEVR((cur_epoch, cur_ver, cur_rel), (epoch, ver, rel)) if compare > 0: downgrade_candidate = True else: downgrade_candidate = False break # If package needs to be installed/upgraded/downgraded, then pass in the spec # we could get here if nothing provides it but that's not # the error we're catching here pkg = spec if downgrade_candidate and allow_downgrade: downgrade_pkgs.append(pkg) else: pkgs.append(pkg) if downgrade_pkgs: res = exec_install(module, items, 'downgrade', downgrade_pkgs, res, yum_basecmd) if pkgs: res = exec_install(module, items, 'install', pkgs, res, yum_basecmd) return res
def parsePackage(filename): (n, v, r, e, a) = splitFilename(filename) return {'name': n, 'version': v, 'release': r, 'arch': a}
def cve_scanimage(cve_data, image): if not cve_data: return ({}) all_packages = {} analysis_report = image.get_analysis_report().copy() idistro = image.get_distro() idistrovers = image.get_distro_vers() distrodict = get_distro_flavor(idistro, idistrovers) flavor = distrodict['flavor'] thelist = [] if 'package_list' in analysis_report and 'pkgs.all' in analysis_report['package_list']: thelist = analysis_report['package_list']['pkgs.all'] for l in thelist: l = l.strip() (p, v) = l.split() if p not in all_packages: all_packages[p] = v thelist = [] if 'package_list' in analysis_report and 'pkgs_plus_source.all' in analysis_report['package_list']: thelist = analysis_report['package_list']['pkgs_plus_source.all'] for l in thelist: l = l.strip() (p, v) = l.split() if p not in all_packages: all_packages[p] = v results = {} for v in cve_data: outel = {} vuln = v['Vulnerability'] print "cve-scan: CVE: " + vuln['Name'] if 'FixedIn' in vuln: for fixes in vuln['FixedIn']: isvuln = False vpkg = fixes['Name'] print "cve-scan: Vulnerable Package: " + vpkg if vpkg in all_packages: ivers = all_packages[fixes['Name']] vvers = re.sub(r'^[0-9]*:', '', fixes['Version']) print "cve-scan: " + vpkg + "\n\tfixed vulnerability package version: " + vvers + "\n\timage package version: " + ivers if flavor == 'RHEL': if vvers != 'None': fixfile = vpkg + "-" + vvers + ".rpm" imagefile = vpkg + "-" + ivers + ".rpm" (n1, v1, r1, e1, a1) = splitFilename(imagefile) (n2, v2, r2, e2, a2) = splitFilename(fixfile) if rpm.labelCompare(('1', v1, r1), ('1', v2, r2)) < 0: isvuln = True else: isvuln = True elif flavor == 'DEB': if vvers != 'None': if ivers != vvers and deb_pkg_tools.version.compare_versions(ivers, '<', vvers): isvuln = True else: isvuln = True if isvuln: print "cve-scan: Found vulnerable package: " + vpkg severity = url = description = 'Not Available' if 'Severity' in vuln: severity = vuln['Severity'] if 'Link' in vuln: url = vuln['Link'] if 'Description' in vuln: description = vuln['Description'] outel = {'pkgName': vpkg, 'imageVers': ivers, 'fixVers': vvers, 'severity': severity, 'url': url, 'description': description} if outel: results[vuln['Name']] = outel return (results)
#!/usr/bin/python from rpmUtils.miscutils import splitFilename # print out only the rpm name, ignoring the x.y.z versions etc. filepath = "rpm-list.txt" f = open(filepath, "r") for x in f: (n, v, r, e, a) = splitFilename(x) print(n)
def run(self): #for key, value in self.actions.iteritems(): for action in self.actions: answer = self.ui.promptConfirm(action[0]) if answer: while True: res = self.ui.prompt(action[1]) if os.path.exists(res): if action[2] == "self.rpm_build_dir": self.rpm_build_dir = res elif action[2] == "self.all_pkg_file": self.all_pkg_file = res break else: print "!! %s does not exist, please input again !!" % ( res) os.system("clear") if os.path.exists(self.all_pkg_file) and \ os.path.exists(self.rpm_build_dir): print "### Starting Verifying ###" allPkgInISOOLD = list() not_match_list = list() f = open(self.all_pkg_file) allPkgInISOOLD = [i.strip('\n') for i in f.readlines()] f.close() for line in allPkgInISOOLD: if not os.path.exists( os.path.join(self.rpm_build_dir, line.strip())): not_match_list.append(line.strip()) if len(not_match_list) > 0: print "### FAIL: Following RPMs Do Not Exist In %s: ###" % ( self.rpm_build_dir) for i in not_match_list: print "### %s ###" % (i) answer = self.ui.promptConfirm(">>> Would you like to re-generate %s ?"\ %(self.all_pkg_file)) if answer: allPkgInISONew = list() for j in os.listdir(self.rpm_build_dir): (n, v, r, e, a) = splitFilename(j) for k in not_match_list: (name, version, release, e, arch) = splitFilename(k) if (a == arch and n == name and ((v > version) or (v == version and r > release))): allPkgInISONew.append(j) break else: if j in allPkgInISOOLD: allPkgInISONew.append(j) f = open( os.path.join(os.path.dirname(self.all_pkg_file), "all.pkg.ppc64le.new"), "w+") for line in allPkgInISONew: f.write(line + '\n') f.close() print "### New all.pkg.ppc64le Has Been Generated (%s) ###" \ %(os.path.join(os.path.dirname(self.all_pkg_file), "all.pkg.ppc64le.new")) else: print "### PASS: Each Pkg in %s Do Exist In %s ###" % ( self.all_pkg_file, self.rpm_build_dir) else: print "!! Error: file does not exist !!"
import sys from rpmUtils.miscutils import splitFilename if len(sys.argv) < 2: print "No argument provided. Exiting." exit() filename = sys.argv[1] with open(filename, 'r') as handle: for line in handle: (n, v, r, e, a) = splitFilename(line) print n
def srpmname(pkg): n,v,r,e,a = splitFilename(pkg.returnSimple('sourcerpm')) return n
def srpmname(pkg): n, v, r, e, a = splitFilename(pkg.returnSimple('sourcerpm')) return n
return True return False else: return True rpmPkgs = GetRpmListFromRPM() # here we go. sorry y'all reportArray = {} for node in root: if node.tag == 'meta': continue if not IsAppliable( node, 'os_release', os_release ): continue if not IsAppliable( node, 'os_arch', os_arch ): continue for pkgLine in node.iter( 'packages' ): pkg = splitFilename( pkgLine.text ) if pkg[0] in rpmPkgs: insPkg = rpmPkgs[ pkg[0] ] insPkg = [ insPkg['epoch'], insPkg['version'], insPkg['release'] ] errPkg = [ pkg[3], pkg[1], pkg[2] ] if compareEVR( errPkg, insPkg ) > 0: if not pkg[0] in reportArray: reportArray[ pkg[0] ] = [] if ValueInDictList( reportArray[ pkg[0] ], 'err', node.tag ): continue reportArray[ pkg[0] ].append({ 'err': node.tag, 'severity': node.get('severity'), 'type': node.get('type'), 'description': node.get('description'), 'version': errPkg[1], 'release': errPkg[2]