def _cmp(self, other): """Compares against another apt.Version object or a version string. This method behaves like Python 2's cmp builtin and returns an integer according to the outcome. The return value is negative in case of self < other, zero if self == other and positive if self > other. The comparison includes the package name and architecture if other is an apt.Version object. If other isn't an apt.Version object it'll be assumed that other is a version string (without package name/arch). .. versionchanged:: 1.0.0 """ # Assume that other is an apt.Version object. try: self_name = self.package.fullname other_name = other.package.fullname if self_name < other_name: return -1 elif self_name > other_name: return 1 return apt_pkg.version_compare(self._cand.ver_str, other.version) except AttributeError: # Assume that other is a string that only contains the version. try: return apt_pkg.version_compare(self._cand.ver_str, other) except TypeError: return NotImplemented
def openstack_upgrade_available(package): """ Determines if an OpenStack upgrade is available from installation source, based on version of installed package. :param package: str: Name of installed package. :returns: bool: : Returns True if configured installation source offers a newer version of package. """ import apt_pkg as apt src = config('openstack-origin') cur_vers = get_os_version_package(package) if "swift" in package: codename = get_os_codename_install_source(src) avail_vers = get_os_version_codename_swift(codename) else: avail_vers = get_os_version_install_source(src) apt.init() if "swift" in package: major_cur_vers = cur_vers.split('.', 1)[0] major_avail_vers = avail_vers.split('.', 1)[0] major_diff = apt.version_compare(major_avail_vers, major_cur_vers) return avail_vers > cur_vers and (major_diff == 1 or major_diff == 0) return apt.version_compare(avail_vers, cur_vers) == 1
def check_and_install_jinja2(): """ We need Jinja 2.6+ for some `sort*()` functions and other enhancements. Version 2.6 is available as a package only on Ubuntu Precise and Debian Wheezy as of 20120227. Older DEBIAN distros will get it installed via PIP. """ installed_via_pip = False if (LMC.configuration.distro == distros.UBUNTU and version_compare(LMC.configuration.distro_version, '12.04') >= 0 ) or (LMC.configuration.distro == distros.DEBIAN and version_compare(LMC.configuration.distro_version, '7.0') >= 0): j2 = glob.glob('/usr/share/pyshared/jinja2') if j2 == []: packaging.install_packages(jinja2_packages) elif LMC.configuration.distro in (distros.UBUNTU, distros.DEBIAN): # Check the Jinja2 version installed to eventually trigger an # upgrade via PIP if an older version is installed. Glob Python # too, to be gentle with 2.5/2.6/2.7 distros. j2 = glob.glob('/usr/local/lib/python*/dist-packages/Jinja2-2.6*') if j2 == []: packaging.pip_install_packages([ jinja2_packages[distros.UNKNOWN] ]) installed_via_pip = True else: packaging.raise_not_installable(jinja2_packages[distros.UNKNOWN]) return installed_via_pip
def check_and_install_django(): """ We need Django 1.3 for shortcuts and some other features. Version 1.3 is packaged on Ubuntu Oneiric and Debian Wheezy. It's also available in ``squeeze-backports``, but testing if this source is active is a bit overhaul for the current `ùpgrades` mechanism. Perhaps in a future version if `foundations.apt` or `foundations.packaging` gets some dedicated function. """ installed_via_pip = False if (LMC.configuration.distro == distros.UBUNTU and version_compare(LMC.configuration.distro_version, '11.10') >= 0 ) or (LMC.configuration.distro == distros.DEBIAN and version_compare(LMC.configuration.distro_version, '7.0') >= 0): dj = glob.glob('/usr/share/pyshared/django*') if dj == []: packaging.install_packages(django_packages) elif LMC.configuration.distro in (distros.UBUNTU, distros.DEBIAN): # We need at least Django 1.3 dj = glob.glob('/usr/local/lib/python*/dist-packages/Django-1.[3456]*') if dj == []: packaging.pip_install_packages([ django_packages[distros.UNKNOWN] ]) installed_via_pip = True else: packaging.raise_not_installable(django_packages[distros.UNKNOWN]) return installed_via_pip
def check_and_install_pyudev(): """ We need Pyudev to talk to udev and handle volumes. """ installed_via_pip = False if (LMC.configuration.distro == distros.UBUNTU and version_compare(LMC.configuration.distro_version, '10.04') >= 0 ) or (LMC.configuration.distro == distros.DEBIAN and version_compare(LMC.configuration.distro_version, '7.0') >= 0): udev = glob.glob('/usr/share/pyshared/pyudev*') if udev == []: packaging.install_packages(pyudev_packages) elif (LMC.configuration.distro == distros.DEBIAN and version_compare(LMC.configuration.distro_version, '6.0') <= 0): udev = glob.glob('/usr/local/lib/python*/dist-packages/pyudev*') if udev == []: packaging.pip_install_packages([ pyudev_packages[distros.UNKNOWN] ]) installed_via_pip = True else: packaging.raise_not_installable(pyudev_packages[distros.UNKNOWN]) return installed_via_pip
def _version_checks(self, upload, suite, other_suite, op, op_name): session = upload.session if upload.changes.source is not None: source_name = upload.changes.source.dsc['Source'] source_version = upload.changes.source.dsc['Version'] v = self._highest_source_version(session, source_name, other_suite) if v is not None and not op(version_compare(source_version, v)): raise Reject("Version check failed:\n" "Your upload included the source package {0}, version {1},\n" "however {3} already has version {2}.\n" "Uploads to {5} must have a {4} version than present in {3}." .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name)) for binary in upload.changes.binaries: binary_name = binary.control['Package'] binary_version = binary.control['Version'] architecture = binary.control['Architecture'] v = self._highest_binary_version(session, binary_name, other_suite, architecture) if v is not None and not op(version_compare(binary_version, v)): raise Reject("Version check failed:\n" "Your upload included the binary package {0}, version {1}, for {2},\n" "however {4} already has version {3}.\n" "Uploads to {6} must have a {5} version than present in {4}." .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
def mark_logs_with_reported_bugs(): for failed_log in find_logs("fail") + find_logs("untestable"): try: pname = package_name(failed_log) pversion = package_source_version(failed_log) try: failed_errors = extract_errors(failed_log) except IOError: print('IOError while processing %s' % failed_log) continue moved = False abugs = piupartsbts.bugs_affecting(pname) bugs = piupartsbts.bugs_in(pname) for bug in abugs + bugs: if moved: break if bug in abugs: bugged = "affected" else: bugged = "bugged" found_versions = piupartsbts.bug_versions(bug) if pversion in found_versions: move_to_bugged(failed_log, bugged, bug) moved = True break for bug_version in found_versions: # print('DEBUG: %s/%s #%d %s' % (pname, pversion, bug, bug_version)) if apt_pkg.version_compare(pversion, bug_version) > 0: # pversion > bug_version bugged_logs = find_bugged_logs(failed_log) if not bugged_logs and not moved: print('%s/%s: Maybe the bug was filed earlier: https://bugs.debian.org/%d against %s/%s' % (pname, pversion, bug, pname, bug_version)) break for bugged_log in bugged_logs: old_pversion = package_source_version(bugged_log) bugged_errors = extract_errors(bugged_log) if (apt_pkg.version_compare(old_pversion, bug_version) == 0 # old_pversion == bug_version and failed_errors == bugged_errors): # a bug was filed for an old version of the package, # and the errors were the same back then - assume it is the same bug. if not moved: mark_bugged_version(failed_log, bugged_log) moved = True bts_update_found(bug, pversion) break if not moved: write_bug_file(failed_log, abugs + bugs) except KeyboardInterrupt: raise except: print('ERROR processing %s' % failed_log) print(sys.exc_info()[0]) alarm(0)
def check_pkg_status(package): versions = package.version_list version = versions[0] for other_version in versions: if apt_pkg.version_compare(version.ver_str, other_version.ver_str)<0: version = other_version if package.current_ver: current = package.current_ver if apt_pkg.version_compare(current.ver_str, version.ver_str)<0: return "upgradable" else: return "current" else: return "uninstalled"
def add_trigger_to_results(self, trigger, src, ver, arch, run_id, seen, status): # Ensure that we got a new enough version try: (trigsrc, trigver) = trigger.split('/', 1) except ValueError: self.logger.error('Ignoring invalid test trigger %s', trigger) return if trigsrc == src and apt_pkg.version_compare(ver, trigver) < 0: self.logger.error('test trigger %s, but run for older version %s, ignoring', trigger, ver) return if self.options.adt_baseline == 'reference' and \ not self.test_version_in_any_suite(src, ver): self.logger.error( "Ignoring result for source %s and trigger %s as the tested version %s isn't found in any suite", src, trigger, ver) return result = self.test_results.setdefault(trigger, {}).setdefault( src, {}).setdefault(arch, [Result.FAIL, None, '', 0]) # don't clobber existing passed results with non-passing ones from # re-runs, except for reference updates if status == Result.PASS or result[0] != Result.PASS or \ (self.options.adt_baseline == 'reference' and trigger == REF_TRIG): result[0] = status result[1] = ver result[2] = run_id result[3] = seen
def _checkVersionedDependency(self, depname, depver, deptype): """Can this versioned dependency be satisfied with the current set of packages?""" if depname not in self.packages: return False if deptype == "": return True ver = self.packages[depname]["Version"] compare = apt_pkg.version_compare(ver, depver) if deptype == "<=": return compare <= 0 elif deptype == ">=": return compare >= 0 elif deptype == "<": return compare < 0 elif deptype == ">": return compare > 0 elif deptype == "=": return compare == 0 elif deptype == "!=": return compare != 0 else: logging.error("Unknown dependency comparator: %s" % deptype) return False
def compare_to_version_in_cache(self, use_installed=True): """Compare the package to the version available in the cache. Checks if the package is already installed or availabe in the cache and if so in what version, returns one of (VERSION_NONE, VERSION_OUTDATED, VERSION_SAME, VERSION_NEWER). """ self._dbg(3, "compare_to_version_in_cache") pkgname = self._sections["Package"] debver = self._sections["Version"] self._dbg(1, "debver: %s" % debver) if pkgname in self._cache: if use_installed and self._cache[pkgname].installed: cachever = self._cache[pkgname].installed.version elif not use_installed and self._cache[pkgname].candidate: cachever = self._cache[pkgname].candidate.version else: return self.VERSION_NONE if cachever is not None: cmp = apt_pkg.version_compare(cachever, debver) self._dbg(1, "CompareVersion(debver,instver): %s" % cmp) if cmp == 0: return self.VERSION_SAME elif cmp < 0: return self.VERSION_NEWER elif cmp > 0: return self.VERSION_OUTDATED return self.VERSION_NONE
def _get_debcruft(self): debrm = DebianRemovals() debrm_list = debrm.get_removed_sources() cruftList = list() for rmitem in debrm_list: # we don't care about experimental if rmitem.suite == "experimental": continue if rmitem.pkgname in self._source_pkgs_full: pkg_item = self._source_pkgs_full[rmitem.pkgname] # the package is in Tanglu, check if it contains Tanglu changes. # if it does, we skip it here, else it apparently is cruft if not self._distro_name in pkg_item.version: # check for Tanglu autorebuilds (and don't count them when comparing versions) p = re.compile(r"(.*)b\d$") m = p.match(pkg_item.version) if m == None: version_norebuild = pkg_item.version else: version_norebuild = m.group(1) if apt_pkg.version_compare(version_norebuild, rmitem.version) > 0: # the version in Tanglu is newer, we don't want to delete the package continue tglpkgrm = PackageRemovalItem(self._current_suite, pkg_item.pkgname, pkg_item.version, rmitem.reason) cruftList.append(tglpkgrm) return cruftList
def _refresh_parents(self): for package in self.packages: for dependency in package.dependencies: for candidate in self.packages: if candidate.name == dependency[0][0]: if dependency[0][2] == '=': if apt_pkg.version_compare(candidate.version, dependency[0][1]) == 0: candidate.parents.append((package.name, package.version)) elif dependency[0][2] == '>=': if apt_pkg.version_compare(candidate.version, dependency[0][1]) >= 0: candidate.parents.append((package.name, package.version)) elif dependency[0][2] == '<=': if apt_pkg.version_compare(candidate.version, dependency[0][1]) <= 0: candidate.parents.append((package.name, package.version)) elif dependency[0][2] == '': candidate.parents.append((package.name, package.version))
def compare_to_version_in_cache(self, use_installed=True): # type: (bool) -> int """Compare the package to the version available in the cache. Checks if the package is already installed or availabe in the cache and if so in what version, returns one of (VERSION_NONE, VERSION_OUTDATED, VERSION_SAME, VERSION_NEWER). """ self._dbg(3, "compare_to_version_in_cache") pkgname = self._sections["Package"] architecture = self._sections["Architecture"] # Arch qualify the package name pkgname = ":".join([pkgname, architecture]) debver = self._sections["Version"] self._dbg(1, "debver: %s" % debver) if pkgname in self._cache: pkg = self._cache[pkgname] if use_installed and pkg.installed is not None: cachever = pkg.installed.version elif not use_installed and pkg.candidate is not None: cachever = pkg.candidate.version else: return self.VERSION_NONE if cachever is not None: cmp = apt_pkg.version_compare(cachever, debver) self._dbg(1, "CompareVersion(debver,instver): %s" % cmp) if cmp == 0: return self.VERSION_SAME elif cmp < 0: return self.VERSION_NEWER elif cmp > 0: return self.VERSION_OUTDATED return self.VERSION_NONE
def _checkDependencyVersion(self, available, required, relation): """Return True if the available version satisfies the context.""" # This dict maps the package version relationship syntax in lambda # functions which returns boolean according to the results of # apt_pkg.version_compare function (see the order above). # For further information about pkg relationship syntax see: # # http://www.debian.org/doc/debian-policy/ch-relationships.html # version_relation_map = { # any version is acceptable if no relationship is given '': lambda x: True, # strictly later '>>': lambda x: x == 1, # later or equal '>=': lambda x: x >= 0, # strictly equal '=': lambda x: x == 0, # earlier or equal '<=': lambda x: x <= 0, # strictly earlier '<<': lambda x: x == -1, } # Use apt_pkg function to compare versions # it behaves similar to cmp, i.e. returns negative # if first < second, zero if first == second and # positive if first > second. dep_result = apt_pkg.version_compare(available, required) return version_relation_map[relation](dep_result)
def check_version(self, pkg): """Check version of Package Return 'Newer', 'Older', or 'Same' version of package with dict Arguments: pkg - A string, name of package Returns: check_version - A dict, with key is name of package and value 'Newer', 'Older' or 'Same' """ dir = os.path.splitext(pkg)[0] split = dir.split("_") name = split[0] version = split[1] main_pkg = self.cache[name] sysversion = main_pkg.candidate.version version_compare = apt_pkg.version_compare(sysversion, version) check_version = {} if version_compare < 0: check_version[dir] = ">" elif version_compare > 0: check_version[dir] = "<" else: check_version[dir] = "=" return check_version
def main(args): packages = TrustyPackages() if args.fuel_version: repo_cache = packages.prepare_apt(args.fuel_version, args.sources_list_file) if not args.fuel_version: print('Please specify Fuel version with -f option.') if args.python_modules_file and os.path.exists(args.python_modules_file): with open(args.python_modules_file, 'r') as python_modules_lines: for line in python_modules_lines: package_from_file = re.sub('\s+', '', line) for package in repo_cache.keys(): if package == package_from_file: cur_pkg = repo_cache[package] highest_version = '0' for pkg_version in cur_pkg.versions: comparison_result = apt_pkg.version_compare(pkg_version.version, highest_version) if (comparison_result >= 0): highest_version = pkg_version.version v = highest_version.split('~')[0].split('-')[0].split('+')[0].split(':')[-1] print("{package} {version}".format(package=package, version=v)) break else: print('Please specify path to python modules list.') if repo_cache: repo_cache.close()
def main(args): packages = TrustyPackages() if args.fuel_version: repo_cache = packages.prepare_apt(args.fuel_version, args.sources_list_file) if not args.fuel_version: print("Please specify Fuel version with -f option.") if args.python_modules_file and os.path.exists(args.python_modules_file): with open(args.python_modules_file, "r") as python_modules_lines: for line in python_modules_lines: package_from_file = re.sub("\s+", "", line) for package in repo_cache.keys(): if package == package_from_file: cur_pkg = repo_cache[package] highest_version = "0" for pkg_version in cur_pkg.versions: comparison_result = apt_pkg.version_compare(pkg_version.version, highest_version) if comparison_result >= 0: highest_version = pkg_version.version print("{package} ==> {version}".format(package=package, version=highest_version)) break else: print("Please specify path to python modules list.") if repo_cache: repo_cache.close()
def version_checks(package, architecture, target_suite, new_version, session, force=False): if architecture == "source": suite_version_list = get_suite_version_by_source(package, session) else: suite_version_list = get_suite_version_by_package(package, architecture, session) must_be_newer_than = [vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan")] must_be_older_than = [vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan")] # Must be newer than an existing version in target_suite if target_suite not in must_be_newer_than: must_be_newer_than.append(target_suite) violations = False for suite, version in suite_version_list: cmp = apt_pkg.version_compare(new_version, version) if suite in must_be_newer_than and cmp < 1: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* newer than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if suite in must_be_older_than and cmp > 1: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* older than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if violations: if force: utils.warn("Continuing anyway (forced)...") else: utils.fubar("Aborting. Version checks violated and not forced.")
def _lookup_package_state(self, package, use_cached_success, check_outdated): if check_outdated: # Check if dependency databases have a newer version of this package. # Use the actual package versions, not the target versions. curr_ver = package.version() for db in self._dependency_databases: dep_ver = db.get_version(package.name()) if dep_ver is not None and apt_pkg.version_compare(curr_ver, dep_ver) < 0: #logging.info("[%s] outdated: %s %s < %s @[%s]" % (self.prefix, package.name(), curr_ver, dep_ver, db.prefix)) return "outdated"; if self._recycle_mode: if self._logdb.log_exists(package, [self._reserved]): return "waiting-to-be-tested" if self._logdb.log_exists(package, [self._recycle]): return "unknown" if self._logdb.log_exists(package, [self._ok]): success = True if not use_cached_success: # if a pass/ log exists but any dependency may be not # trivially satisfiable do not skip dependency resolution for dep in package.dependencies(): if not self.get_package(dep, resolve_virtual=True): success = False break if success: return "successfully-tested" if self._logdb.log_exists(package, [self._fail] + self._morefail): return "failed-testing" if self._logdb.log_exists(package, [self._evil]): return "cannot-be-tested" if self._logdb.log_exists(package, [self._reserved]): return "waiting-to-be-tested" return "unknown"
def test_remove_package(self): """ Removes a package that has been uploaded to Debian. """ source = self.changes['Source'] log.debug('Checking whether package %s is in the repository' % source) package = meta.session.query(Package).filter_by(name=source).first() if package is None: # The package is not in the repository. This will happen the most often. log.debug('Package is not in the repository') return # Initialise apt_pkg apt_pkg.init() keep_package_versions = [] for package_version in package.package_versions: if apt_pkg.version_compare(self.changes['Version'], package_version.version) < 0: keep_package_versions.append(package_version.version) if len(keep_package_versions) == 0: # Get rid of the whole package. self._remove_package_versions(package.package_versions) log.debug('Deleting package database entry: %s' % package.name) meta.session.delete(package) else: # Only remove certain package versions. for package_version in package.package_versions: if package_version not in keep_package_versions: meta.session.delete(package_version) meta.session.commit()
def build_sources_from_universe_and_inst_tester(policy, pkg_universe, inst_tester, suite='unstable'): suite_info = policy.suite_info src_universe = {} bin_universe = {} src_source = {} binaries_t = {} binaries_s = {} for pkg_id in pkg_universe: pkg_name = pkg_id.package_name src_universe[pkg_id] = create_source_package(pkg_id.version, binaries={pkg_id}) bin_universe[pkg_id] = create_bin_package(pkg_id) if inst_tester.is_pkg_in_the_suite(pkg_id): if pkg_name in suite_info.target_suite.sources: # sanity check, this shouldn't happen raise(KeyError) suite_info.target_suite.sources[pkg_name] = src_universe[pkg_id] binaries_t.setdefault(ARCH, {}).setdefault(pkg_name, bin_universe[pkg_id]) # We need to find the highest version of a package to add it to the # sources of the source suite if pkg_name not in src_source or \ apt_pkg.version_compare(src_source[pkg_name].version, pkg_id.version) < 0: src_source[pkg_name] = pkg_id suite_info.target_suite.binaries = binaries_t for pkg_id in src_source.values(): pkg_name = pkg_id.package_name suite_info[suite].sources[pkg_name] = src_universe[pkg_id] binaries_s.setdefault(ARCH, {}).setdefault(pkg_name, bin_universe[pkg_id]) suite_info[suite].binaries = binaries_s
def install_puppet_modules(): modules_installed = get_modules_installed() with open(MODULES_FILE_PATH) as modules_file: modules_requirements = modules_file.read().replace("/", "-") for module in parse_requirements(modules_requirements): current_cmd, compare, version, version_comparison = "", "", "", None if module.project_name in modules_installed: if module.specs: compare, version = module.specs[0] version_comparison = apt_pkg.version_compare(modules_installed[module.project_name], version) else: continue if version_comparison == 0 and compare is not ">": # module version installed is equal version continue else: # module version installed is smaller or bigger than version current_cmd = "upgrade" else: current_cmd = "install" if version and compare and ">" not in compare: run(current_cmd, module.project_name, version) else: if not version_comparison or version_comparison < 0: run(current_cmd, module.project_name)
def __cmp__(self, other): """compare two changes files We sort by source name and version first. If these are identical, we sort changes that include source before those without source (so that sourceful uploads get processed first), and finally fall back to the filename (this should really never happen). @rtype: number @return: n where n < 0 if self < other, n = 0 if self == other, n > 0 if self > other """ ret = cmp(self.changes.get('Source'), other.changes.get('Source')) if ret == 0: # compare version ret = apt_pkg.version_compare(self.changes.get('Version', ''), other.changes.get('Version', '')) if ret == 0: # sort changes with source before changes without source if 'source' in self.architectures and 'source' not in other.architectures: ret = -1 elif 'source' not in self.architectures and 'source' in other.architectures: ret = 1 else: ret = 0 if ret == 0: # fall back to filename ret = cmp(self.filename, other.filename) return ret
def run(options=None): # we are run in "are security updates installed automatically?" # question mode if options.security_updates_unattended: return apt_pkg.config.find_i("APT::Periodic::Unattended-Upgrade", 0) # get caches cache = apt_pkg.Cache(OpNullProgress()) depcache = apt_pkg.DepCache(cache) # read the pin files depcache.read_pinfile() # read the synaptic pins too if os.path.exists(SYNAPTIC_PINFILE): depcache.read_pinfile(SYNAPTIC_PINFILE) # init the depcache depcache.init() if depcache.broken_count > 0: raise RuntimeError(_("Error: BrokenCount > 0")) saveDistUpgrade(cache,depcache) # analyze the ugprade upgrades = 0 security_updates = 0 for pkg in cache.packages: # skip packages that are not marked upgraded/installed if not (depcache.marked_install(pkg) or depcache.marked_upgrade(pkg)): continue # check if this is really a upgrade or a false positive # (workaround for ubuntu #7907) inst_ver = pkg.current_ver cand_ver = depcache.get_candidate_ver(pkg) if cand_ver == inst_ver: continue # check for security upgrades upgrades += 1 if isSecurityUpgrade(cand_ver): security_updates += 1 continue # now check for security updates that are masked by a # canidate version from another repo (-proposed or -updates) for ver in pkg.version_list: if (inst_ver and apt_pkg.version_compare(ver.ver_str, inst_ver.ver_str) <= 0): #print "skipping '%s' " % ver.VerStr continue if isSecurityUpgrade(ver): security_updates += 1 break if options.show_package_names: return (cache, depcache) return(upgrades, security_updates)
def read_packages_dict_from_file(archive_root, suite, component, arch): source_path = archive_root + "/dists/%s/%s/binary-%s/Packages.gz" % (suite, component, arch) f = gzip.open(source_path, 'rb') tagf = TagFile(f) package_dict = dict() for section in tagf: pkg = dict() pkg['arch'] = section['Architecture'] pkg['version'] = section['Version'] pkg['name'] = section['Package'] if not section.get('Filename'): print("Package %s-%s has no filename specified." % (pkg['name'], pkg['version'])) continue pkg['filename'] = section['Filename'] pkg['maintainer'] = section['Maintainer'] pkg2 = package_dict.get(pkg['name']) if pkg2: compare = version_compare(pkg2['version'], pkg['version']) if compare >= 0: continue package_dict[pkg['name']] = pkg return package_dict
def apply_srcarch_policy_impl(self, build_deps_info, item, arch, source_data_tdist, source_data_srcdist, excuse): verdict = PolicyVerdict.PASS source_suite = item.suite target_suite = self.suite_info.target_suite binaries_s = source_suite.binaries sources_t = target_suite.sources def check_bu_in_suite(bu_source, bu_version, source_suite): found = False if bu_source not in source_suite.sources: return found s_source = source_suite.sources[bu_source] s_ver = s_source.version if apt_pkg.version_compare(s_ver, bu_version) >= 0: found = True item_name = compute_item_name(sources_t, source_suite.sources, bu_source, arch) if arch in self.options.break_arches: excuse.addhtml("Ignoring Built-Using for %s/%s on %s" % (pkg_name, arch, item_name)) else: excuse.add_dependency(DependencyType.BUILT_USING, item_name, arch) excuse.addhtml("%s/%s has Built-Using on %s" % (pkg_name, arch, item_name)) return found for pkg_id in sorted(x for x in source_data_srcdist.binaries if x.architecture == arch): pkg_name = pkg_id.package_name # retrieve the testing (if present) and unstable corresponding binary packages binary_s = binaries_s[arch][pkg_name] for bu in binary_s.builtusing: bu_source = bu[0] bu_version = bu[1] found = False if bu_source in target_suite.sources: t_source = target_suite.sources[bu_source] t_ver = t_source.version if apt_pkg.version_compare(t_ver, bu_version) >= 0: found = True if not found: found = check_bu_in_suite(bu_source, bu_version, source_suite) if not found and source_suite.suite_class.is_additional_source: found = check_bu_in_suite(bu_source, bu_version, self.suite_info.primary_source_suite) if not found: if arch in self.options.break_arches: excuse.addhtml("Ignoring unsatisfiable Built-Using for %s/%s on %s %s" % ( pkg_name, arch, bu_source, bu_version)) else: excuse.addhtml("%s/%s has unsatisfiable Built-Using on %s %s" % ( pkg_name, arch, bu_source, bu_version)) if verdict.value < PolicyVerdict.REJECTED_PERMANENTLY.value: verdict = PolicyVerdict.REJECTED_PERMANENTLY return verdict
def _build_installed_pkgs_cache(self, dist, component): for arch in self._supportedArchs: source_path = self._archivePath + "/dists/%s/%s/binary-%s/Packages.gz" % (dist, component, arch) f = gzip.open(source_path, 'rb') tagf = TagFile (f) for section in tagf: # make sure we have the right arch (closes bug in installed-detection) if section['Architecture'] != arch: continue pkgversion = section['Version'] pkgname = section['Package'] pkgsource = section.get('Source', '') # if source has different version, we cheat and set the binary pkg version # to the source package version if "(" in pkgsource: m = re.search(r"\((.*)\)", pkgsource) s = m.group(1).strip() if s != "": pkgversion = s pkid = "%s_%s" % (pkgname, arch) if pkid in self._installedPkgs: regVersion = self._installedPkgs[pkid] compare = version_compare(regVersion, pkgversion) if compare >= 0: continue self._installedPkgs[pkid] = pkgversion
def read_sources(mirror_dist, sources=None, intern=intern): if sources is None: sources = {} for filename in mirror_dist.sources_files: tag_file = apt_pkg.TagFile(filename) get_field = tag_file.section.get step = tag_file.step while step(): if get_field('Extra-Source-Only', 'no') == 'yes': # Ignore sources only referenced by Built-Using continue pkg = intern(get_field('Package')) ver = intern(get_field('Version')) if pkg in sources and apt_pkg.version_compare(sources[pkg].version, ver) > 0: continue binaries = frozenset(x.strip() for x in get_field('Binary').split(',')) sources[pkg] = SourcePackage( source=pkg, package=pkg, version=ver, source_version=ver, binaries=binaries, ) return sources
def list_available_updates(self): self.ensure_mirrors(self.config['upstream_repos']) # needed in order to use apt_pkg.version_compare apt_pkg.init() # Store newest version available for each package with updates package_updates = {} for package_name, package_version in self.packages_from_config(): updates = [v for v in self.available_versions_for_package( package_name) if apt_pkg.version_compare( package_version, v) < 0] updates = sorted(set(updates), cmp=apt_pkg.version_compare) if updates: package_updates[package_name] = updates[-1] LOGGER.info("Package %(name)s Config Version: %(current)s" " Updates: %(updates)s" % {'name': package_name, 'current': package_version, 'updates': updates}) # write new config file with updated versions, if requested if package_updates and 'new_config_path' in self.args: for package, version in package_updates.iteritems(): self.config['packages'][package] = version self.config.write(self.args.new_config_path) LOGGER.info("updated config written to %s" % self.args.new_config_path)
def transition_info(transitions): """ Print information about all defined transitions. Calls L{get_info} for every transition and then tells user if the transition is still ongoing or if the expected version already hit testing. @type transitions: dict @param transitions: defined transitions """ session = DBConn().session() for trans in transitions: t = transitions[trans] source = t["source"] expected = t["new"] # Will be None if nothing is in testing. sourceobj = get_source_in_suite(source, "testing", session) print( get_info(trans, source, expected, t["rm"], t["reason"], t["packages"])) if sourceobj is None: # No package in testing print( "Transition source %s not in testing, transition still ongoing." % (source)) else: compare = apt_pkg.version_compare(sourceobj.version, expected) print("Apt compare says: %s" % (compare)) if compare < 0: # This is still valid, the current version in database is older than # the new version we wait for print( "This transition is still ongoing, we currently have version %s" % (sourceobj.version)) else: print( "This transition is over, the target package reached testing, should be removed" ) print("%s wanted version: %s, has %s" % (source, expected, sourceobj.version)) print( "-------------------------------------------------------------------------" )
def version_checks(package, architecture, target_suite, new_version, session, force=False): if architecture == "source": suite_version_list = get_suite_version_by_source(package, session) else: suite_version_list = get_suite_version_by_package( package, architecture, session) must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ] must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ] # Must be newer than an existing version in target_suite if target_suite not in must_be_newer_than: must_be_newer_than.append(target_suite) violations = False for suite, version in suite_version_list: cmp = apt_pkg.version_compare(new_version, version) if suite in must_be_newer_than and cmp < 1: utils.warn( "%s (%s): version check violated: %s targeted at %s is *not* newer than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if suite in must_be_older_than and cmp > 1: utils.warn( "%s (%s): version check violated: %s targeted at %s is *not* older than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if violations: if force: utils.warn("Continuing anyway (forced)...") else: utils.fubar("Aborting. Version checks violated and not forced.")
def cache_applicable_hosts_for_advisory_package(sender, **kwargs): """ When a new package is added to an advisory work out what hosts it applies to. """ advisory_package = kwargs.get('instance') advisory = advisory_package.advisory print("Considering advisory package %s (%s)" % (advisory_package.package, advisory_package.architecture)) affected_packages = Package.objects.filter(name=advisory_package.package, architecture=advisory_package.architecture, host__release=advisory_package.release) for package in affected_packages: unsafe = apt_pkg.version_compare(package.version, advisory_package.safe_version) < 0 print("%s installed on %s is unsafe=%r due to installed version %s being <= %s" %(package.name, package.host, unsafe, package.version, advisory_package.safe_version)) if unsafe: Problem.objects.get_or_create(advisory=advisory, host=package.host, installed_package_name=package.name, installed_package_version=package.version, installed_package_architecture=package.architecture, safe_package=advisory_package, fixed__isnull=True) else: # remove any problems that might have existed due to older incarnations of this advisory Problem.objects.filter(advisory=advisory, host=package.host, installed_package_name=package.name, installed_package_version=package.version, installed_package_architecture=package.architecture, safe_package=advisory_package).delete()
def scan_oldkernel_packages(self): #pkgs = self.cache.packages cache = common.get_cache_list() final_oldkernel_list = [] current_version = '-'.join(os.uname()[2].split('-')[:2]) if cache: for pkg in cache: if pkg.is_installed and pkg.name.startswith('linux'): if re.match(self.flag, pkg.name): #version = pkg.installedVersion[:-3] version = pkg.installed.version if apt_pkg.version_compare(version, current_version) < 0: #tmp_oldkernel_list = [pkg.name, common.confirm_filesize_unit(pkg.installedSize)] #final_oldkernel_list.append('<2_2>'.join(tmp_oldkernel_list)) final_oldkernel_list.append(pkg) return final_oldkernel_list
def _read_file(self, input, restrict_packages=None): """Parse a Packages file and add its packages to us-the-dict""" while True: headers = rfc822_like_header_parse(input) if not headers: break p = Package(headers) if p["Package"] in self: q = self[p["Package"]] if apt_pkg.version_compare(p["Version"], q["Version"]) <= 0: # there is already a newer version continue if restrict_packages is not None: if p["Package"] not in restrict_packages: # unwanted package continue self[p["Package"]] = p
def openstack_upgrade_available(package): """ Determines if an OpenStack upgrade is available from installation source, based on version of installed package. :param package: str: Name of installed package. :returns: bool: : Returns True if configured installation source offers a newer version of package. """ src = config('openstack-origin') cur_vers = get_os_version_package(package) available_vers = get_os_version_install_source(src) apt.init() return apt.version_compare(available_vers, cur_vers) == 1
def _check_vuln_is_valid(vuln): for pkg in vuln.get('pkg_vulnerabilities', []): if 'affected_package' in pkg and 'fixed_package' in pkg: # Parse the version out of the "package_name (version)" string. version_re = r'.*\((.*)\)' affected_version = re.match( version_re, pkg.get('affected_package')).groups()[0] fixed_version = re.match(version_re, pkg.get('fixed_package')).groups()[0] if apt_pkg.version_compare(fixed_version, affected_version) > 0: return True logging.info( 'Vulnerability %s is already fixed. ' 'The affected package: %s is greater ' 'than the fixed package: %s', vuln.get('vulnerability'), affected_version, fixed_version) return False
def cmp_pkgrevno(package, revno, pkgcache=None): """Compare supplied revno with the revno of the installed package. * 1 => Installed revno is greater than supplied arg * 0 => Installed revno is the same as supplied arg * -1 => Installed revno is less than supplied arg This function imports apt_cache function from charmhelpers.fetch if the pkgcache argument is None. Be sure to add charmhelpers.fetch if you call this function, or pass an apt_pkg.Cache() instance. """ import apt_pkg if not pkgcache: from charmhelpers.fetch import apt_cache pkgcache = apt_cache() pkg = pkgcache[package] return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
def main(): # If the package not found or if the version is outdated, install puppet if not pkg_available('puppet'): config_puppetlabs_repo() pkg = get_package('puppet')[0] if not pkg.is_installed: install_puppet() elif apt_pkg.version_compare(pkg.installed.version, PUPPET_TARGET_VERSION) < 0: install_puppet(upgrade=True) if os.path.isfile('/vagrant/puppet/hiera.yaml'): copyfile('/vagrant/puppet/hiera.yaml', '/etc/puppet/hiera.yaml') locale.setlocale(locale.LC_ALL, '') install_puppet_modules()
def cmp_pkgrevno(package, revno, pkgcache=None): '''Compare supplied revno with the revno of the installed package * 1 => Installed revno is greater than supplied arg * 0 => Installed revno is the same as supplied arg * -1 => Installed revno is less than supplied arg ''' import apt_pkg if not pkgcache: apt_pkg.init() # Force Apt to build its cache in memory. That way we avoid race # conditions with other applications building the cache in the same # place. apt_pkg.config.set("Dir::Cache::pkgcache", "") pkgcache = apt_pkg.Cache() pkg = pkgcache[package] return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
def current_release_tracking(self): """The PackageUpstreamTracking state for the current release.""" upstream_release = self.context.productseries.getLatestRelease() current_release = self.context.currentrelease if upstream_release is None or current_release is None: # Launchpad is missing data. There is not enough information to # track releases. return PackageUpstreamTracking.NONE # Compare the base version contained in the full debian version # to upstream release's version. base_version = upstream_version(current_release.version) age = version_compare(upstream_release.version, base_version) if age > 0: return PackageUpstreamTracking.NEWER elif age < 0: return PackageUpstreamTracking.OLDER else: # age == 0: return PackageUpstreamTracking.CURRENT
def intersection(self, other): """Make the intersection of two specifiers Return a new `SpecifierSet` with version specifier(s) common to the specifier and the other. Example: >>> Specifier('>= 2.2') & '>> 2.2.1' == '>> 2.2.1' >>> Specifier('>= 2.2') & '<< 2.3' == '>= 2.2, << 2.3' """ if isinstance(other, basestring): try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented # store spec parts for easy access rel1, v1 = self.relation, self.version rel2, v2 = other.relation, other.version result = [] if other == self: result = [other] elif rel1 == '=': result = [self] if v1 in other else None elif rel2 == '=': result = [other] if v2 in self else None elif v1 == v2: result = [other if rel1[1] == '=' else self] elif v2 in self or v1 in other: is_self_greater = version_compare(v1, v2) > 0 if rel1[0] == rel2[0]: if rel1[0] == '>': result = [self if is_self_greater else other] else: result = [other if is_self_greater else self] else: result = [self, other] return SpecifierSet(result if result is not None else '')
def openstack_upgrade_available(self, package=None, snap=None): """Check if an OpenStack upgrade is available :param package: str Package name to use to check upgrade availability :returns: bool """ if not package: package = self.release_pkg if not snap: snap = self.release_snap src = self.config[self.source_config_key] cur_vers = self.get_os_version_package(package) avail_vers = os_utils.get_os_version_install_source(src) if os_utils.snap_install_requested(): cur_vers = self.get_os_version_snap(snap) else: cur_vers = self.get_os_version_package(package) apt.init() return apt.version_compare(avail_vers, cur_vers) == 1
def determine_reproducibility(status1, version1, status2, version2): versionscompared = apt_pkg.version_compare(version1, version2) # if version1 > version2, # ignore the older package (version2) if (versionscompared > 0): return status1, version1 # if version1 < version2, # ignore the older package (version1) elif (versionscompared < 0): return status2, version2 # if version1 == version 2, # we are comparing status for the same (most recent) version else: if status1 == 'reproducible' and status2 == 'reproducible': return 'reproducible', version1 else: return 'not_reproducible', version1
def query_new_versions(suite, arch, limit): criteria = 'tested before, new version available, sorted by last build date' query = """SELECT s.id, s.name, s.version, r.version, max(r.build_date) max_date FROM sources AS s JOIN results AS r ON s.id = r.package_id WHERE s.suite='{suite}' AND s.architecture='{arch}' AND s.version != r.version AND r.status != 'blacklisted' AND s.id IN (SELECT package_id FROM results) AND s.id NOT IN (SELECT schedule.package_id FROM schedule) GROUP BY s.id, s.name, s.version, r.version ORDER BY max_date LIMIT {limit}""".format(suite=suite, arch=arch, limit=limit) pkgs = query_db(query) # the next line avoids constant rescheduling of packages: # packages in our repository != official repo, # so they will always be selected by the query above # so we only accept them if there version is greater than the already tested one packages = [(x[0], x[1]) for x in pkgs if version_compare(x[2], x[3]) > 0] print_schedule_result(suite, arch, criteria, packages) return packages
def openstack_upgrade_available(package): """ Determines if an OpenStack upgrade is available from installation source, based on version of installed package. :param package: str: Name of installed package. :returns: bool: : Returns True if configured installation source offers a newer version of package. """ import apt_pkg as apt src = config('openstack-origin') cur_vers = get_os_version_package(package) if "swift" in package: codename = get_os_codename_install_source(src) available_vers = get_os_version_codename(codename, SWIFT_CODENAMES) else: available_vers = get_os_version_install_source(src) apt.init() return apt.version_compare(available_vers, cur_vers) == 1
def load_sources_collection(item, map): tagfile, release, pocket, section = item parser = apt_pkg.TagFile(_get_apt_tags(tagfile)) while parser.step(): pkg = parser.section['Package'] map.setdefault(release, dict()).setdefault(pkg, { 'section': 'unset', 'version': '~', 'pocket': 'unset' }) map[release][pkg]['section'] = section if not pocket: map[release][pkg]['release_version'] = parser.section['Version'] if apt_pkg.version_compare(parser.section['Version'], map[release][pkg]['version']) > 0: map[release][pkg]['pocket'] = pocket map[release][pkg]['version'] = parser.section['Version'] map[release][pkg]['binaries'] = parser.section['Binary'].split( ', ') return map
def find_nearly_finished_transitions(src_test, bin_test, stage): src2bin = {} for bin_pkg in bin_test.itervalues(): if bin_pkg.architecture == 'all': continue source_pkg = src_test.get(bin_pkg.source, None) if source_pkg is None: src2bin.setdefault(bin_pkg.source, set()) src2bin[bin_pkg.source].add(bin_pkg.package) continue if apt_pkg.version_compare(source_pkg.version, bin_pkg.source_version) > 0: src2bin.setdefault(bin_pkg.source, set()) src2bin[bin_pkg.source].add(bin_pkg.package) for source in sorted(src2bin): extra_info = {} source_pkg = src_test[source] new_bin = sorted(x for x in source_pkg.binaries - src2bin[source]) old_bin = sorted(x for x in src2bin[source] - source_pkg.binaries) yield (source, source, new_bin, old_bin, stage, extra_info)
def isNewerThanParent(self, dsd): """Is the child's version of this package newer than the parent's? If it is, there's no point in offering to sync it. Any version is considered "newer" than a missing version. """ # This is trickier than it looks: versions are not totally # ordered. Two non-identical versions may compare as equal. # Only consider cases where the child's version is conclusively # newer, not where the relationship is in any way unclear. if dsd.parent_source_version is None: # There is nothing to sync; the child is up to date and if # anything needs updating, it's the parent. return True if dsd.source_version is None: # The child doesn't have this package. Treat that as the # parent being newer. return False comparison = apt_pkg.version_compare(dsd.parent_source_version, dsd.source_version) return comparison < 0
def neutron_plugin_joined(): # create plugin config plugin = "neutron_plugin_contrail.plugins.opencontrail.contrail_plugin.NeutronPluginContrailCoreV2" \ if version_compare(CONTRAIL_VERSION, "1.20~") >= 0 \ else "neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_core.NeutronPluginContrailCoreV2" conf = { "neutron-api": { "/etc/neutron/neutron.conf": { "sections": { "DEFAULT": [ ("api_extensions_path", "/usr/lib/python2.7/dist-packages/neutron_plugin_contrail/extensions") ] } } } } settings = { "neutron-plugin": "contrail", "core-plugin": plugin, "neutron-plugin-config": "/etc/neutron/plugins/opencontrail/ContrailPlugin.ini", "service-plugins": " ", "quota-driver": "neutron_plugin_contrail.plugins.opencontrail.quota.driver.QuotaDriver", "subordinate_configuration": json.dumps(conf) } relation_set(relation_settings=settings)
def get_anyversion(self, sv_list, suite): """ @type sv_list: list @param sv_list: list of (suite, version) tuples to check @type suite: string @param suite: suite name Description: TODO """ Cnf = Config() anyversion = None anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ] for (s, v) in sv_list: if s in [x.lower() for x in anysuite]: if not anyversion or apt_pkg.version_compare(anyversion, v) <= 0: anyversion = v return anyversion
def __call__(self, package: str, architecture: str, new_version: str): if architecture == "source": suite_version_list = get_suite_version_by_source(package, self.session) else: suite_version_list = get_suite_version_by_package(package, architecture, self.session) violations = False for suite, version in suite_version_list: cmp = apt_pkg.version_compare(new_version, version) # for control-suite we allow equal version (for uploads, we don't) if suite in self.must_be_newer_than and cmp < 0: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* newer than %s in %s" % (package, architecture, new_version, self.target_suite, version, suite)) violations = True if suite in self.must_be_older_than and cmp > 0: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* older than %s in %s" % (package, architecture, new_version, self.target_suite, version, suite)) violations = True if violations: if self.force: utils.warn("Continuing anyway (forced)...") else: utils.fubar("Aborting. Version checks violated and not forced.")
def compare_to_version_in_cache(self, use_installed=True): """Compare the package to the version available in the cache. Checks if the package is already installed or availabe in the cache and if so in what version, returns one of (VERSION_NONE, VERSION_OUTDATED, VERSION_SAME, VERSION_NEWER). """ self._dbg(3, "compare_to_version_in_cache") pkgname = self._sections["Package"] architecture = self._sections["Architecture"] # Architecture all gets mapped to the native architecture internally if architecture == 'all': architecture = apt_pkg.config.find("APT::Architecture") # Arch qualify the package name pkgname = ":".join([pkgname, architecture]) debver = self._sections["Version"] self._dbg(1, "debver: %s" % debver) if pkgname in self._cache: if use_installed and self._cache[pkgname].installed: cachever = self._cache[pkgname].installed.version elif not use_installed and self._cache[pkgname].candidate: cachever = self._cache[pkgname].candidate.version else: return self.VERSION_NONE if cachever is not None: cmp = apt_pkg.version_compare(cachever, debver) self._dbg(1, "CompareVersion(debver,instver): %s" % cmp) if cmp == 0: return self.VERSION_SAME elif cmp < 0: return self.VERSION_NEWER elif cmp > 0: return self.VERSION_OUTDATED return self.VERSION_NONE
def aggregate_changelog(self, since_version): """See `ISourcePackagePublishingHistory`.""" if self.changelog is None: return None apt_pkg.init_system() chunks = [] changelog = self.changelog # The python-debian API for parsing changelogs is pretty awful. The # only useful way of extracting info is to use the iterator on # Changelog and then compare versions. try: changelog_text = changelog.read().decode("UTF-8", "replace") for block in Changelog(changelog_text): version = block._raw_version if (since_version and apt_pkg.version_compare(version, since_version) <= 0): break # Poking in private attributes is not nice but again the # API is terrible. We want to ensure that the name/date # line is omitted from these composite changelogs. block._no_trailer = True try: # python-debian adds an extra blank line to the chunks # so we'll have to sort this out. chunks.append(str(block).rstrip()) except ChangelogCreateError: continue if not since_version: # If a particular version was not requested we just # return the most recent changelog entry. break except ChangelogParseError: return None output = "\n\n".join(chunks) return output.decode("utf-8", "replace")
def get_newest_sources_index(session, repo, suite): ''' Create an index of the most recent source packages, using the source-UUID of source packages. ''' from apt_pkg import version_compare res_spkgs = {} spkgs = session.query(SourcePackage) \ .options(undefer(SourcePackage.version)) \ .options(undefer(SourcePackage.architectures)) \ .filter(SourcePackage.suites.any(ArchiveSuite.id == suite.id)) \ .filter(SourcePackage.repo_id == repo.id) \ .order_by(SourcePackage.version.desc()) \ .all() for pkg in spkgs: epkg = res_spkgs.get(pkg.uuid) if epkg and version_compare(pkg.version, epkg.version) <= 0: # don't override if the existing version is newer continue res_spkgs[pkg.uuid] = pkg return res_spkgs
def check_versions(domains): # Get QEMU on the hypervisor result = execute(['/usr/bin/qemu-system-x86_64', '-version']) hypervisor_qemu_version = parse_qemu_version(result) if hypervisor_qemu_version is None: raise HVQEMUVersionException() for domain in domains: result = execute(['/proc/{}/exe'.format(domain['pid']), '-version']) version = parse_qemu_version(result) # QEMU version couldn't be obtained for domain if version is None: domain['status'] = CheckResult.unknown continue domain['version'] = version vc = apt_pkg.version_compare(domain['version'], hypervisor_qemu_version) if vc > 0: domain['status'] = CheckResult.newer elif vc == 0: domain['status'] = CheckResult.same else: domain['status'] = CheckResult.older return hypervisor_qemu_version, domains
def reboot_required(): """ Check if reboot required by comparing running kernel package's version with the newest installed kernel package's one. """ boot_image_path = kernel_cmdline().get('BOOT_IMAGE') if boot_image_path is None: return None if is_debian(): # For apt-based distros. import apt_pkg apt_pkg.init() kernel_pkg = get_kernel_deb_package(boot_image_path) if kernel_pkg is not None: match = DEBIAN_KERNEL_PKG_NAME_RE.match(kernel_pkg.name) if match: name_parts = match.groups( ) # E.g. ('linux-image-4.4.0-', '174', '-generic') latest_kernel_pkg = get_latest_same_kernel_deb( name_parts[0], name_parts[2]) return apt_pkg.version_compare( latest_kernel_pkg.installed.version, kernel_pkg.installed.version) > 0 elif is_amazon_linux2(): # For Amazon Linux 2. import rpm kernel_pkg = get_kernel_rpm_package(boot_image_path) if kernel_pkg is not None: ts = rpm.ts() # Find the newest kernel package. package_iterator = ts.dbMatch('name', 'kernel') if package_iterator.count() > 0: latest_kernel_pkg = sorted( [package_header for package_header in package_iterator], key=cmp_to_key(rpm.versionCompare), reverse=True)[0] return rpm.versionCompare(latest_kernel_pkg, kernel_pkg) > 0 return None
def oudated_packages(self, tracked, packages): """Compute the number of missing updates of installed package: How outdated packages are :param tracked: set of installed package versions :return packages: historical data of Debian """ for x in packages: packages[x] = packages[x].apply(str) tracked = (tracked.set_index(['package', 'first_seen']).merge( packages.set_index( ['package', 'first_seen']).rename(columns={'version': 'version_compare'}), left_index=True, right_index=True, how='left').dropna().reset_index().drop_duplicates()) tracked['missing_updates'] = tracked.apply( lambda d: apt_pkg.version_compare(d['version'], d['version_compare' ]) < 0, axis=1) tracked = (tracked.query('missing_updates == True').groupby( ['package', 'version']).count()[['missing_updates']].reset_index()) return tracked
def checkCopy(self, source, series, pocket, person=None, check_permissions=True): """Check if the source can be copied to the given location. Check possible conflicting publications in the destination archive. See `_checkArchiveConflicts()`. Also checks if the version of the source being copied is equal or higher than any version of the same source present in the destination suite (series + pocket). If person is not None, check that this person has upload rights to the destination (archive, component, pocket). :param source: copy candidate, `ISourcePackagePublishingHistory`. :param series: destination `IDistroSeries`. :param pocket: destination `PackagePublishingPocket`. :param person: requester `IPerson`. :param check_permissions: boolean indicating whether or not the requester's permissions to copy should be checked. :raise CannotCopy when a copy is not allowed to be performed containing the reason of the error. """ if check_permissions: check_copy_permissions(person, self.archive, series, pocket, [source]) if series not in self.archive.distribution.series: raise CannotCopy( "No such distro series %s in distribution %s." % (series.name, source.distroseries.distribution.name)) format = SourcePackageFormat.getTermByToken( source.sourcepackagerelease.dsc_format).value if not series.isSourcePackageFormatPermitted(format): raise CannotCopy( "Source format '%s' not supported by target series %s." % (source.sourcepackagerelease.dsc_format, series.name)) # Deny copies of source publications containing files with an # expiration date set. for source_file in source.sourcepackagerelease.files: if source_file.libraryfile.expires is not None: raise CannotCopy('source contains expired files') if self.include_binaries and self.strict_binaries: built_binaries = source.getBuiltBinaries(want_files=True) if len(built_binaries) == 0: raise CannotCopy("source has no binaries to be copied") # Deny copies of binary publications containing files with # expiration date set. We only set such value for immediate # expiration of old superseded binaries, so no point in # checking its content, the fact it is set is already enough # for denying the copy. for binary_pub in built_binaries: for binary_file in binary_pub.binarypackagerelease.files: if binary_file.libraryfile.expires is not None: raise CannotCopy('source has expired binaries') if (self.archive.is_main and not self.archive.build_debug_symbols and binary_file.filetype == BinaryPackageFileType.DDEB): raise CannotCopy( "Cannot copy DDEBs to a primary archive") # Check if there is already a source with the same name and version # published in the destination archive. self._checkArchiveConflicts(source, series) ancestry = self.archive.getPublishedSources( name=source.source_package_name, exact_match=True, distroseries=series, pocket=pocket, status=active_publishing_status).first() if ancestry is not None: ancestry_version = ancestry.sourcepackagerelease.version copy_version = source.sourcepackagerelease.version apt_pkg.init_system() if apt_pkg.version_compare(copy_version, ancestry_version) < 0: raise CannotCopy( "version older than the %s published in %s" % (ancestry.displayname, ancestry.distroseries.name)) requires_unembargo = (not self.archive.private and has_restricted_files(source)) if requires_unembargo and not self.unembargo: raise CannotCopy( "Cannot copy restricted files to a public archive without " "explicit unembargo option.") # Copy is approved, update the copy inventory. self.addCopy(source)
def package_version_compare(version, other_version): try: return apt_pkg.version_compare(version, other_version) except AttributeError: return apt_pkg.VersionCompare(version, other_version)