def metaname2release(metaname): if metaname == "latest": # 'all' is a list of codenames of all known releases, including # the development one, in the release order. return distro_info.UbuntuDistroInfo().all[-1] return getattr(distro_info.UbuntuDistroInfo(), metaname)()
def collect(dryrun=False): """Submit data to Push Gateway.""" try: devel = distro_info.UbuntuDistroInfo().devel() except distro_info.DistroDataOutdated: devel = distro_info.UbuntuDistroInfo().stable() devel_results = get_iso_size_data(devel) print('%s: %s' % (devel, devel_results)) lts = distro_info.UbuntuDistroInfo().lts() lts_results = get_iso_size_data(lts, True) print('%s: %s' % (lts, lts_results)) if not dryrun: print('Pushing data...') data = [{ 'measurement': 'iso_size_devel', 'fields': devel_results, }, { 'measurement': 'iso_size_lts', 'fields': lts_results, }] util.influxdb_insert(data)
def main(): print("\n\n\n\n\n\n") launchpad = Launchpad.login_anonymously('read-only connection', 'production', version="devel") ubuntu_distro_info = distro_info.UbuntuDistroInfo() # < artful used rls-L-incoming >= use rls-LL-incoming and those are # interesting to us available_series = [r.series for r in distro_info.UbuntuDistroInfo().supported( result='object') if r.release >= datetime.date(2017, 4, 21)] parser = argparse.ArgumentParser(description='find rls bugs') parser.add_argument('--release', '-r', action='append', metavar='RELEASE', choices=available_series + ['ALL'], help='release to consider, or ALL (the default) to ' 'consider all supported releases') parser.add_argument('team', metavar='TEAM', type=str, nargs='+', help='team to consider') args = parser.parse_args() if args.release is None or 'ALL' in args.release: args.release = available_series for rls in args.release: print("# %s" % (rls)) print("---\n") ubuntu_series = launchpad.distributions['ubuntu'].getSeries(name_or_version=rls) uploads = ubuntu_series.getPackageUploads(status='Unapproved') global uploads_bug_list uploads_bug_list = set(build_uploads_bug_list(uploads)) for (assignee, bugs) in RlsTrackingBugs(rls, args.team).items(): for (bugno, bug) in bugs.items(): if bugno not in RLS_BUGS_LIST and bugno not in uploads_bug_list: if assignee: if assignee == "Unassigned": print("#### :warning: %s :warning:\n" % assignee) else: if assignee in USERNAMES.keys(): assignee = assignee+" (@" + USERNAMES[assignee]+")" print("#### %s\n" % assignee) assignee = False age = (datetime.datetime.now() - bug.date_task_created).days print("[%s](%s)\n" % (bug.title, bug.web_link)) print('{} {} Task created {} days ago; {}'.format(bug.status, ':sleeping:' if age > 7 else ':sunglasses:', age, bug.date_task_created)) print('\n') # Without this extra new line the discourse formatting goes to pot and everything is a heading RLS_BUGS_LIST.append(bugno) print("---\n")
def latest_ubuntu_release(): """Determine latest Ubuntu development release. When there is not a development release, then return latest stable. @return: string of latest release """ try: return distro_info.UbuntuDistroInfo().devel() except distro_info.DistroDataOutdated: return distro_info.UbuntuDistroInfo().stable()
def main(argv): cli_utils.setup_logging() parser = argparse.ArgumentParser() parser.add_argument("--service") parser.add_argument("--resource") options = parser.parse_args() service = cli_utils.parse_arg(options, 'service') resource = cli_utils.parse_arg(options, 'resource') xenial = distro_info.UbuntuDistroInfo().all.index('xenial') series = os.environ.get('MOJO_SERIES') mojo_env = distro_info.UbuntuDistroInfo().all.index(series) if mojo_env >= xenial: resource = resource.replace('eth0', 'ens2') mojo_os_utils.delete_crm_leader(service, resource)
def get_current_download_serials(download_root): """ Given a download root, determine the latest current serial. This works, specifically, by inspecting <download_root>/<suite>/current/unpacked/build-info.txt for supported releases. """ current_serials = {} for release in distro_info.UbuntuDistroInfo().supported(): url = os.path.join( download_root, release, 'current', 'unpacked', 'build-info.txt') build_info_response = requests.get(url) if not build_info_response.ok: # If the release doesn't have images, we should ignore it continue for line in build_info_response.text.splitlines(): if line.lower().startswith('serial='): serial = _parse_serial_date_int_from_string( line.split('=')[1]) break else: # If the build-info.txt doesn't contain a serial, we should ignore # it continue current_serials[release] = serial return current_serials
def resolve_release_codename(name: str, date=None) -> str: def oldest_name(fn): return max(fn("object", date), key=lambda r: r.created).name if '/' in name: distro, name = name.split('/', 1) else: distro = None if distro in ('debian', None): debian = distro_info.DebianDistroInfo() if name == 'lts': return oldest_name(debian.lts_supported) if name == 'elts': return oldest_name(debian.elts_supported) if debian.codename(name): return debian.codename(name) if debian.valid(name): return name if distro in ('ubuntu', None): ubuntu = distro_info.UbuntuDistroInfo() if name == 'esm': return oldest_name(ubuntu.supported_esm) if ubuntu.valid(name): return name return None
def test_clone_upgrade_synthetic(self): """ test clone upgrade with on-the-fly generated chroots """ supported = distro_info.UbuntuDistroInfo().supported() for meta in [ "ubuntu-standard", "ubuntu-desktop", "kubuntu-desktop", "xubuntu-desktop" ]: logging.info("testing %s" % meta) old = self._create_fake_upgradable_root(supported[-2], meta=meta) self.addCleanup(shutil.rmtree, old) # create statefile based on the old data with tarfile.open("lala.tar.gz", "w:gz") as state: state.add(os.path.join(old, "var", "lib", "apt-clone", "installed.pkgs"), arcname="./var/lib/apt-clone/installed.pkgs") # create new fake environment and try to upgrade new = self._create_fake_upgradable_root(supported[-1], meta=meta) self.addCleanup(shutil.rmtree, new) cache = apt.Cache(rootdir=new) clone = AptClone() clone._restore_package_selection_in_cache("lala.tar.gz", cache, protect_installed=True) self.assertFalse(cache[meta].marked_delete, "package %s marked for removal" % meta) self.assertTrue(len(cache.get_changes()) > 0)
def test_restore_state_simulate(self): clone = AptClone() supported = distro_info.UbuntuDistroInfo().supported() missing = clone.simulate_restore_state("./data/apt-state.tar.gz", new_distro=supported[-1], exclude_pkgs=[]) # missing, because clone does not have universe enabled self.assertEqual(list(missing), ['accerciser'])
def _check_hwe_support_status(self): di = distro_info.UbuntuDistroInfo() codename = get_dist() lts = di.is_lts(codename) if not lts: return None HWE = "/usr/bin/hwe-support-status" if not os.path.exists(HWE): return None cmd = [HWE, "--show-replacements"] self._parse_hwe_support_status(cmd)
def get_devel_series_codename(): import distro_info from datetime import datetime di = distro_info.UbuntuDistroInfo() today = datetime.today().date() try: codename = di.devel(today) # this can happen on release and before # distro-info-data is SRU'ed except distro_info.DistroDataOutdated: codename = di.stable() return codename
def test_ubuntu_xenial_security(self): c = self.__config c.vendor = 'ubuntu' c.suite = 'xenial-security' ubuntu = c.get_vendor('ubuntu') sec = c.get_suite(ubuntu, 'xenial-security', True) xenial = c.get_suite(ubuntu, 'xenial', True) self.assertEqual(list(sec.hierarchy), [sec, xenial]) self.assertIs(sec.base, xenial) self.assertEqual(sec.components, {'main', 'universe'}) self.assertEqual(sec.extra_components, {'multiverse', 'restricted'}) self.assertEqual(sec.all_components, {'main', 'universe', 'multiverse', 'restricted'}) self.assertEqual(sec.archive, 'ubuntu') self.assertEqual(c.get_mirrors().lookup_suite(sec), 'http://mirror/ubuntu') self.assertEqual(sec.apt_key, '/usr/share/keyrings/ubuntu-archive-keyring.gpg') self.assertEqual(sec.apt_suite, 'xenial-security') with self.assertRaises(AttributeError): c.archive self.assertEqual(c.qemu_image_size, '10G') self.assertGreaterEqual(c.parallel, 1) self.assertIs(c.build_indep_together, False) self.assertIs(c.sbuild_source_together, False) self.assertEqual(c.sbuild_resolver, []) self.assertEqual(c.apt_key, '/usr/share/keyrings/ubuntu-archive-keyring.gpg') self.assertIsNone(c.dpkg_source_diff_ignore) self.assertEqual(c.dpkg_source_tar_ignore, []) self.assertEqual(c.dpkg_source_extend_diff_ignore, []) self.assertEqual(c.output_parent, '..') self.assertEqual(c.debootstrap_script, 'xenial-security') self.assertIs(c.suite, sec) try: import distro_info except ImportError: return ubuntu_info = distro_info.UbuntuDistroInfo() backports = c.get_suite(ubuntu, ubuntu_info.lts() + '-backports') lts = c.get_suite(ubuntu, ubuntu_info.lts()) self.assertIs(c.worker_suite, lts) self.assertIs(c.pbuilder_worker_suite, lts) self.assertIs(c.sbuild_worker_suite, lts) self.assertIs(c.vmdebootstrap_worker_suite, lts)
def _get_from_and_to_version(self): di = distro_info.UbuntuDistroInfo() try: self._from_version = \ di.version('%s' % self.controller.fromDist).split()[0] self._to_version = \ di.version('%s' % self.controller.toDist).split()[0] # Ubuntu 18.04's python3-distro-info does not have version except AttributeError: self._from_version = next( (r.version for r in di.get_all("object") if r.series == self.controller.fromDist), self.controller.fromDist).split()[0] self._to_version = next((r.version for r in di.get_all("object") if r.series == self.controller.toDist), self.controller.toDist).split()[0]
def test_distro_info(self): debian = self.__config._get_vendor('debian') ubuntu = self.__config._get_vendor('ubuntu') try: import distro_info except ImportError: return debian_info = distro_info.DebianDistroInfo() ubuntu_info = distro_info.UbuntuDistroInfo() try: ubuntu_devel = ubuntu_info.devel() except distro_info.DistroDataOutdated: ubuntu_devel = ubuntu_info.stable() self.assertEqual(str(ubuntu.get_suite('devel')), ubuntu_devel) self.assertEqual(str(debian.get_suite('unstable')), 'sid') self.assertEqual(str(debian.get_suite('testing')), debian_info.testing()) self.assertEqual(str(debian.get_suite('oldstable')), debian_info.old()) self.assertEqual(str(debian.get_suite('rc-buggy')), 'experimental') stable = debian.get_suite('stable') self.assertEqual(str(stable), debian_info.stable()) self.assertEqual(stable.sbuild_resolver, []) backports = debian.get_suite('stable-backports') self.assertEqual(str(backports), debian_info.stable() + '-backports') self.assertEqual(backports.sbuild_resolver, ['--build-dep-resolver=aptitude']) self.assertEqual(backports.apt_suite, debian_info.stable() + '-backports') self.assertEqual(backports.mirror, 'http://192.168.122.1:3142/debian') self.assertEqual(backports.hierarchy[0], backports) self.assertEqual(str(backports.hierarchy[1]), str(stable)) security = debian.get_suite('stable-security') self.assertEqual(security.apt_suite, '{}/updates'.format(debian_info.stable())) self.assertEqual(security.mirror, 'http://192.168.122.1:3142/security.debian.org') self.assertEqual(security.hierarchy[0], security) self.assertEqual(str(security.hierarchy[1]), str(stable))
def test_cross_vendor(self): c = self.__config c.vendor = 'steamrt' c.suite = 'scout' steamrt = c.get_vendor('steamrt') ubuntu = c.get_vendor('ubuntu') scout = c.get_suite(steamrt, 'scout') precise = c.get_suite(ubuntu, 'precise') self.assertEqual(list(scout.hierarchy), [scout, precise]) self.assertEqual(c.components, {'main', 'contrib', 'non-free'}) self.assertEqual(c.vendor, steamrt) # TODO: not sure whether it's correct for these to be inherited # from Ubuntu due to the cross-vendor base suite? self.assertIs(c.worker_vendor, ubuntu) self.assertIs(c.sbuild_worker_vendor, ubuntu) self.assertIs(c.vmdebootstrap_worker_vendor, ubuntu) # TODO: not sure whether it's correct for these to be inherited # from Ubuntu due to the cross-vendor base suite? self.assertEqual(c.autopkgtest, ['lxc', 'qemu']) self.assertEqual( c.get_mirrors().lookup_suite(scout), 'http://192.168.122.1:3142/repo.steamstatic.com/steamrt') self.assertEqual(scout.archive, 'repo.steamstatic.com/steamrt') try: import distro_info except ImportError: return ubuntu_info = distro_info.UbuntuDistroInfo() self.assertIs(c.worker_suite, c.get_suite(ubuntu, ubuntu_info.lts() + '-backports'))
def get_daily_ubuntu_image_ami(release=None): """Given a particular OS, find the latest daily image.""" if not release: release = distro_info.UbuntuDistroInfo().lts() print('searching for daily AMI of %s' % (release)) image_filter = ('ubuntu/images-testing/hvm-ssd/' 'ubuntu-%s-daily-amd64-server-*' % (release)) session = boto3.session.Session() client = session.client(service_name='ec2') response = client.describe_images(Filters=[{ 'Name': 'name', 'Values': [image_filter] }]) images = sorted(response['Images'], key=lambda k: k['CreationDate']) try: return images[-1]['ImageId'] except IndexError: print('error: cannot find daily image for "%s"' % (release)) sys.exit(1)
def get_ubuntu_releases(self): """Return a list of all Ubuntu releases in order of release.""" _d = distro_info.UbuntuDistroInfo() _release_list = _d.all return _release_list
def is_current_distro_supported(): distro = aptsources.distro.get_distro() di = distro_info.UbuntuDistroInfo() return distro.codename in di.supported(datetime.now().date())
def is_current_distro_lts(): distro = aptsources.distro.get_distro() di = distro_info.UbuntuDistroInfo() return di.is_lts(distro.codename)
def collect(dryrun=False): """Submit data to Push Gateway.""" try: devel = distro_info.UbuntuDistroInfo().devel() except distro_info.DistroDataOutdated: devel = distro_info.UbuntuDistroInfo().stable() devel_results = get_iso_size_data(devel) print('%s: %s' % (devel, devel_results)) lts = distro_info.UbuntuDistroInfo().lts() lts_results = get_iso_size_data(lts, True) print('%s: %s' % (lts, lts_results)) if not dryrun: print('Pushing data...') registry = CollectorRegistry() Gauge('server_iso_devel_amd64_size_total', 'dev amd64 size', None, registry=registry).set(devel_results['amd64']) Gauge('server_iso_devel_arm64_size_total', 'dev arm64 size', None, registry=registry).set(devel_results['arm64']) Gauge('server_iso_devel_i386_size_total', 'dev i386 size', None, registry=registry).set(devel_results['i386']) Gauge('server_iso_devel_ppc64el_size_total', 'dev ppc64el size', None, registry=registry).set(devel_results['ppc64el']) Gauge('server_iso_devel_s390x_size_total', 'dev s390x size', None, registry=registry).set(devel_results['s390x']) Gauge('server_iso_lts_amd64_size_total', 'lts amd64 size', None, registry=registry).set(lts_results['amd64']) Gauge('server_iso_lts_arm64_size_total', 'lts arm64 size', None, registry=registry).set(lts_results['arm64']) Gauge('server_iso_lts_i386_size_total', 'lts i386 size', None, registry=registry).set(lts_results['i386']) Gauge('server_iso_lts_ppc64el_size_total', 'lts ppc64el size', None, registry=registry).set(lts_results['ppc64el']) Gauge('server_iso_lts_s390x_size_total', 'lts s390x size', None, registry=registry).set(lts_results['s390x']) util.push2gateway('server-iso', registry)
def get_ubuntu_releases(self): """Return a list of all Ubuntu releases in order of release.""" _d = distro_info.UbuntuDistroInfo() _release_list = _d.all self.log.debug('Ubuntu release list: {}'.format(_release_list)) return _release_list
def __init__(self, config_layers=(), current_directory=None): super(Config, self).__init__() self._suites = WeakValueDictionary() self._vendors = {} self._overrides = {} self._relevant_directory = None d = yaml.safe_load( open(os.path.join(os.path.dirname(__file__), 'defaults.yaml'))) # Some things can have better defaults that can't be hard-coded d['defaults']['parallel'] = str(os.cpu_count()) try: d['defaults']['architecture'] = subprocess.check_output( ['dpkg', '--print-architecture'], universal_newlines=True).strip() except subprocess.CalledProcessError: pass d['vendors']['debian']['default_suite'] = 'sid' try: import distro_info except ImportError: d['vendors']['debian']['default_worker_suite'] = 'sid' else: debian = distro_info.DebianDistroInfo() ubuntu = distro_info.UbuntuDistroInfo() d['vendors']['debian']['default_worker_suite'] = debian.stable() d['vendors']['debian']['suites']['stable'] = { 'alias_for': debian.stable(), } d['vendors']['debian']['suites']['testing'] = { 'alias_for': debian.testing(), } d['vendors']['debian']['suites']['oldstable'] = { 'alias_for': debian.old(), } # According to autopkgtest-buildvm-ubuntu-cloud, just after # an Ubuntu release there is briefly no development version # at all. try: ubuntu_devel = ubuntu.devel() except distro_info.DistroDataOutdated: ubuntu_devel = ubuntu.stable() d['vendors']['ubuntu']['default_suite'] = ubuntu_devel d['vendors']['ubuntu']['default_worker_suite'] = (ubuntu.lts() + '-backports') d['vendors']['ubuntu']['suites']['devel'] = { 'alias_for': ubuntu_devel, } for suite in debian.all: d['vendors']['debian']['suites'].setdefault(suite, {}) for suite in ubuntu.all: d['vendors']['ubuntu']['suites'].setdefault(suite, {}) self._raw = [] self._raw.append(d) if config_layers: self._raw[:0] = list(config_layers) else: config_dirs = XDG_CONFIG_DIRS.split(':') config_dirs = list(reversed(config_dirs)) config_dirs.append(XDG_CONFIG_HOME) for p in config_dirs: conffile = os.path.join(p, 'vectis', 'vectis.yaml') try: reader = open(conffile) except FileNotFoundError: continue with reader: raw = yaml.safe_load(reader) if not isinstance(raw, dict): raise ConfigError( 'Reading {!r} did not yield a dict'.format( conffile)) self._raw.insert(0, raw) if current_directory is None: current_directory = os.getcwd() self._relevant_directory = None while self._relevant_directory is None: for r in self._raw: if current_directory in r.get('directories', {}): self._relevant_directory = current_directory break else: parent, _ = os.path.split(current_directory) # Guard against infinite recursion. If current_directory == '/' # we would already have found directories./ in the hard-coded # defaults, and broken out of the loop assert len(parent) < len(current_directory) current_directory = parent continue assert self._relevant_directory is not None self._path_based = Directory(self._relevant_directory, self._raw)
def test_ubuntu_xenial(self): c = self.__config c.vendor = 'ubuntu' c.suite = 'xenial' ubuntu = c.get_vendor('ubuntu') xenial = c.get_suite(ubuntu, 'xenial', True) self.assertEqual(list(xenial.hierarchy), [xenial]) self.assertEqual(xenial.components, {'main', 'universe'}) self.assertEqual(xenial.extra_components, {'multiverse', 'restricted'}) self.assertEqual(xenial.all_components, {'main', 'universe', 'multiverse', 'restricted'}) self.assertIs(xenial.base, None) self.assertEqual(xenial.archive, 'ubuntu') self.assertEqual(c.get_mirrors().lookup_suite(xenial), 'http://mirror/ubuntu') self.assertEqual(xenial.apt_key, '/usr/share/keyrings/ubuntu-archive-keyring.gpg') self.assertEqual(xenial.apt_suite, 'xenial') self.assertEqual(c.components, {'main', 'universe'}) self.assertEqual(c.extra_components, {'multiverse', 'restricted'}) self.assertEqual(c.all_components, {'main', 'universe', 'multiverse', 'restricted'}) self.assertIs(c.vendor, ubuntu) self.assertIs(c.worker_vendor, ubuntu) self.assertIs(c.sbuild_worker_vendor, ubuntu) self.assertIs(c.vmdebootstrap_worker_vendor, ubuntu) with self.assertRaises(AttributeError): c.archive self.assertEqual(c.qemu_image_size, '10G') self.assertGreaterEqual(c.parallel, 1) self.assertIs(c.sbuild_indep_together, False) self.assertIs(c.sbuild_source_together, False) self.assertEqual(c.sbuild_resolver, []) self.assertEqual(c.apt_key, '/usr/share/keyrings/ubuntu-archive-keyring.gpg') self.assertIsNone(c.dpkg_source_diff_ignore) self.assertEqual(c.dpkg_source_tar_ignore, []) self.assertEqual(c.dpkg_source_extend_diff_ignore, []) self.assertEqual(c.output_parent, '..') self.assertEqual(c.debootstrap_script, 'xenial') self.assertIs(c.suite, xenial) try: import distro_info except ImportError: return ubuntu_info = distro_info.UbuntuDistroInfo() backports = c.get_suite(ubuntu, ubuntu_info.lts() + '-backports') self.assertIs(c.worker_suite, backports) self.assertIs(c.sbuild_worker_suite, backports) self.assertIs(c.vmdebootstrap_worker_suite, backports) try: ubuntu_devel = ubuntu_info.devel() except distro_info.DistroDataOutdated: ubuntu_devel = ubuntu_info.stable() # FIXME: this seems wrong self.assertEqual(c.default_suite, ubuntu_devel)
def __init__(self, useDevelopmentRelease=False, useProposed=False, forceLTS=False, forceDownload=False, cache=None): self._debug("MetaRelease.__init__() useDevel=%s useProposed=%s" % (useDevelopmentRelease, useProposed)) # force download instead of sending if-modified-since self.forceDownload = forceDownload self.useDevelopmentRelease = useDevelopmentRelease # information about the available dists self.downloaded = threading.Event() self.upgradable_to = None self.new_dist = None if cache is None: cache = apt.Cache() self.flavor = get_ubuntu_flavor(cache=cache) self.flavor_name = get_ubuntu_flavor_name(cache=cache) self.current_dist_name = get_dist() self.current_dist_version = get_dist_version() self.no_longer_supported = None self.prompt = None # default (if the conf file is missing) base_uri = "https://changelogs.ubuntu.com/" self.METARELEASE_URI = base_uri + "meta-release" self.METARELEASE_URI_LTS = base_uri + "meta-release-lts" self.METARELEASE_URI_UNSTABLE_POSTFIX = "-development" self.METARELEASE_URI_PROPOSED_POSTFIX = "-development" # check the meta-release config first parser = configparser.ConfigParser() if os.path.exists(self.CONF_METARELEASE): try: parser.read(self.CONF_METARELEASE) except configparser.Error as e: sys.stderr.write("ERROR: failed to read '%s':\n%s" % (self.CONF_METARELEASE, e)) return # make changing the metarelease file and the location # for the files easy if parser.has_section("METARELEASE"): sec = "METARELEASE" for k in [ "URI", "URI_LTS", "URI_UNSTABLE_POSTFIX", "URI_PROPOSED_POSTFIX" ]: if parser.has_option(sec, k): self._debug( "%s: %s " % (self.CONF_METARELEASE, parser.get(sec, k))) setattr(self, "%s_%s" % (sec, k), parser.get(sec, k)) # check the config file first to figure if we want lts upgrades only parser = configparser.ConfigParser() if os.path.exists(self.CONF): try: parser.read(self.CONF) except configparser.Error as e: sys.stderr.write("ERROR: failed to read '%s':\n%s" % (self.CONF, e)) return # now check which specific url to use if parser.has_option("DEFAULT", "Prompt"): prompt = parser.get("DEFAULT", "Prompt").lower() if (prompt == "never" or prompt == "no"): self.prompt = 'never' # nothing to do for this object # FIXME: what about no longer supported? self.downloaded.set() return elif prompt == "lts": self.prompt = 'lts' # the Prompt=lts setting only makes sense when running on # a LTS, otherwise it would result in users not receiving # any distro upgrades di = distro_info.UbuntuDistroInfo() if di.is_lts(self.current_dist_name): self.METARELEASE_URI = self.METARELEASE_URI_LTS else: self._debug("Prompt=lts for non-LTS, ignoring") else: self.prompt = 'normal' # needed for the _tryUpgradeSelf() code in DistUpgradeController if forceLTS: self.METARELEASE_URI = self.METARELEASE_URI_LTS # devel and proposed "just" change the postfix if useDevelopmentRelease: self.METARELEASE_URI += self.METARELEASE_URI_UNSTABLE_POSTFIX elif useProposed: self.METARELEASE_URI += self.METARELEASE_URI_PROPOSED_POSTFIX self._debug("metarelease-uri: %s" % self.METARELEASE_URI) self.metarelease_information = None if not self._buildMetaReleaseFile(): self._debug("_buildMetaReleaseFile failed") return # we start the download thread here and we have a timeout threading.Thread(target=self.download).start()
class BugTasks(): """ The BugTasks class The class logs into Launchpad and query the tasks to find the tagged tasks """ valid_series = distro_info.UbuntuDistroInfo().supported() start_date = '2015-01-01' def __init__(self): self.tag = None self.all_tasks = {} self.lp = None def login(self): """ Log into the production Launchpad instance version='devel' is important, otherwise no task will be returned """ self.lp = lp.login_anonymously('sts_tags', 'production', version='devel') def add_one_task(self, one_task, serie): OneBug = self.all_tasks.setdefault(one_task.bug.id, {}) OneBug['title'] = one_task.bug.title OneBug.setdefault('series', set()).add(serie) OneBug.setdefault('pkg', set()).add(one_task.bug_target_name.split()[0]) assignee = one_task.assignee.display_name \ if one_task.assignee else 'None' OneBug.setdefault('owners', set()).add(assignee) OneBug['verification'] = [ vers for vers in one_task.bug.tags if vers.startswith('verification') ] print("%s" % serie[0], end='', flush=True) def get_openstack_tasks(self, tag): """ Get all openstack tasks tagged with tag Add each task found to the all_task list """ print('Fetching openstack projects.\nThis will take some time...' '', end="") self.openstack = self.lp.project_groups['openstack'] self.oprojects = [(proj, proj.series) for proj in self.openstack.projects] print('Got all %d openstack projects' % len(self.oprojects)) print( 'Scanning through all project' 'series to find tagged bugs\n\ This is a long process...', end='') for (project, series) in self.oprojects: for serie in series: tasks = serie.searchTasks(tags=tag) for task in tasks: self.add_one_task(task, serie.name) print('\n') def get_uca_tasks(self, tag): """ Get all ubuntu cloud archive tasks tagged with tag. Add each task found to the all_task list """ print('Fetching Ubuntu Cloud Archive tasks.', end='') self.uca = self.lp.distributions['cloud-archive'] self.series = [serie for serie in self.uca.series] self.series.reverse() for serie in self.series: tasks = serie.searchTasks(tags=tag, created_since=self.start_date, order_by='id') for task in tasks: self.add_one_task(task, serie.name) print('\n') def get_ubuntu_tasks(self, tag): """ Get all ubuntu tasks tagged with tag. Add each task found to the all_task list """ print('Fetching ubuntu tasks.', end='') self.ubuntu = self.lp.distributions['ubuntu'] self.series = [self.ubuntu.current_series] self.series += [ serie for serie in self.ubuntu.series if serie.supported is True ] self.series.reverse() for serie in self.series: tasks = serie.searchTasks(tags=tag, created_since=self.start_date, order_by='id') for task in tasks: self.add_one_task(task, serie.name) print('\n') def display_report(self, long_display, tag): """ Format and display all the collected tasks """ PkgList = {} for bug in sorted(self.all_tasks.keys()): PkgList.setdefault('pkg', set()).add(' '.join(self.all_tasks[bug]['pkg'])) print("LP: #%s - (%s) %s" % (bug, ' '.join( self.all_tasks[bug]['pkg']), self.all_tasks[bug]['title'])) if long_display: print(" - Series to SRU : %s" % ' '.join(self.all_tasks[bug]['series'])) if self.all_tasks[bug]['verification']: print(" - Verification : %s" % ' '.join(self.all_tasks[bug]['verification'])) print(" - Owners : %s" % ' '.join(self.all_tasks[bug]['owners']) + '\n') if long_display: if tag == 'sts-sru-needed': print("#info SRU pending for : %s" % ', '.join(PkgList['pkg']))
def test_ubuntu(self): c = self.__config c.vendor = 'ubuntu' ubuntu = c.get_vendor('ubuntu') self.assertIs(c.vendor, ubuntu) self.assertEqual(str(ubuntu), 'ubuntu') self.assertIsNone(c.get_suite(ubuntu, 'unstable', create=False)) self.assertIsNone(c.get_suite(ubuntu, 'stable', create=False)) self.assertEqual(c.components, {'main', 'universe'}) self.assertEqual(c.extra_components, {'restricted', 'multiverse'}) self.assertEqual(c.all_components, {'main', 'universe', 'restricted', 'multiverse'}) self.assertIs(c.vendor, ubuntu) self.assertIs(c.worker_vendor, ubuntu) self.assertIs(c.sbuild_worker_vendor, ubuntu) self.assertIs(c.vmdebootstrap_worker_vendor, ubuntu) with self.assertRaises(AttributeError): c.archive self.assertEqual(c.autopkgtest, ['lxc', 'qemu']) self.assertEqual(c.components, {'main', 'universe'}) self.assertEqual(c.extra_components, {'restricted', 'multiverse'}) self.assertEqual(c.all_components, {'main', 'universe', 'restricted', 'multiverse'}) self.assertIs(c.vendor, ubuntu) self.assertEqual(c.qemu_image_size, '10G') self.assertGreaterEqual(c.parallel, 1) self.assertIs(c.sbuild_indep_together, False) self.assertIs(c.sbuild_source_together, False) self.assertEqual(c.sbuild_resolver, []) self.assertEqual(c.apt_key, '/usr/share/keyrings/ubuntu-archive-keyring.gpg') with self.assertRaises(AttributeError): c.apt_suite self.assertIsNone(c.dpkg_source_diff_ignore) self.assertEqual(c.dpkg_source_tar_ignore, []) self.assertEqual(c.dpkg_source_extend_diff_ignore, []) self.assertEqual(c.output_parent, '..') try: import distro_info except ImportError: return ubuntu_info = distro_info.UbuntuDistroInfo() try: ubuntu_devel = ubuntu_info.devel() except distro_info.DistroDataOutdated: ubuntu_devel = ubuntu_info.stable() self.assertEqual(str(c.get_suite(ubuntu, 'devel')), ubuntu_devel) self.assertEqual(ubuntu.default_suite, ubuntu_devel) self.assertEqual(ubuntu.default_worker_suite, ubuntu_info.lts() + '-backports') devel = c.get_suite(ubuntu, 'devel') self.assertEqual(devel.archive, 'ubuntu') self.assertEqual(c.get_mirrors().lookup_suite(devel), 'http://mirror/ubuntu') backports = c.get_suite(ubuntu, ubuntu_info.lts() + '-backports') self.assertEqual(c.worker_suite, backports) self.assertEqual(c.sbuild_worker_suite, backports) self.assertEqual(c.vmdebootstrap_worker_suite, backports) self.assertEqual(backports.archive, 'ubuntu') self.assertEqual(c.get_mirrors().lookup_suite(backports), 'http://mirror/ubuntu')
def _is_livepatch_supported(self): di = distro_info.UbuntuDistroInfo() codename = get_dist() return di.is_lts(codename)