class ToTestBase(object): """Base class to store the basic interface""" def __init__(self, project, dryrun = False): self.project = project self.dryrun = dryrun self.api = StagingAPI(osc.conf.config['apiurl'], project='openSUSE:%s' % project) self.known_failures = self.known_failures_from_dashboard(project) def openqa_group(self): return self.project def iso_prefix(self): return self.project def jobs_num(self): return 70 def current_version(self): return self.release_version() def binaries_of_product(self, project, product): url = self.api.makeurl(['build', project, 'images', 'local', product]) try: f = self.api.retried_GET(url) except urllib2.HTTPError: return [] ret = [] root = ET.parse(f).getroot() for binary in root.findall('binary'): ret.append(binary.get('filename')) return ret def get_current_snapshot(self): """Return the current snapshot in :ToTest""" # for now we hardcode all kind of things for binary in self.binaries_of_product('openSUSE:%s:ToTest' % self.project, '_product:openSUSE-cd-mini-%s' % self.arch()): result = re.match(r'openSUSE-%s-NET-.*-Snapshot(.*)-Media.iso' % self.iso_prefix(), binary) if result: return result.group(1) return None def ftp_build_version(self, tree): for binary in self.binaries_of_product('openSUSE:%s' % self.project, tree): result = re.match(r'openSUSE.*Build(.*)-Media1.report', binary) if result: return result.group(1) raise Exception("can't find %s version" % self.project) def release_version(self): url = self.api.makeurl(['build', 'openSUSE:%s' % self.project, 'standard', self.arch(), '_product:openSUSE-release']) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for binary in root.findall('binary'): binary = binary.get('filename', '') result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary) if result: return result.group(1) raise Exception("can't find %s version" % self.project) def find_openqa_results(self, snapshot): """Return the openqa jobs of a given snapshot and filter out the cloned jobs """ url = makeurl('https://openqa.opensuse.org', ['api', 'v1', 'jobs'], { 'group': self.openqa_group(), 'build': snapshot } ) f = self.api.retried_GET(url) jobs = [] for job in json.load(f)['jobs']: if job['clone_id'] or job['result'] == 'obsoleted': continue job['name'] = job['name'].replace(snapshot, '') jobs.append(job) return jobs def _result2str(self, result): if result == QA_INPROGRESS: return 'inprogress' elif result == QA_FAILED: return 'failed' else: return 'passed' def find_failed_module(self, testmodules): # print json.dumps(testmodules, sort_keys=True, indent=4) for module in testmodules: if module['result'] != 'failed': continue flags = module['flags'] if 'fatal' in flags or 'important' in flags: return module['name'] break logger.info('%s %s %s'%(module['name'], module['result'], module['flags'])) def overall_result(self, snapshot): """Analyze the openQA jobs of a given snapshot Returns a QAResult""" if snapshot is None: return QA_FAILED jobs = self.find_openqa_results(snapshot) if len(jobs) < self.jobs_num(): # not yet scheduled logger.warning('we have only %s jobs' % len(jobs)) return QA_INPROGRESS number_of_fails = 0 in_progress = False machines = [] for job in jobs: machines.append(job['settings']['MACHINE']) # print json.dumps(job, sort_keys=True, indent=4) if job['result'] in ('failed', 'incomplete', 'skipped', 'user_cancelled', 'obsoleted'): jobname = job['name'] + '@' + job['settings']['MACHINE'] # Record machines we have tests for if jobname in self.known_failures: self.known_failures.remove(jobname) continue number_of_fails += 1 # print json.dumps(job, sort_keys=True, indent=4), jobname failedmodule = self.find_failed_module(job['modules']) url = 'https://openqa.opensuse.org/tests/%s' % job['id'] print (jobname, url, failedmodule, job['retry_avbl']) # if number_of_fails < 3: continue elif job['result'] == 'passed' or job['result'] == 'softfailed': continue elif job['result'] == 'none': if job['state'] != 'cancelled': in_progress = True else: raise Exception(job['result']) if number_of_fails > 0: return QA_FAILED if in_progress: return QA_INPROGRESS machines = list(set(machines)) for item in machines: for item2 in self.known_failures: if item2.split('@')[1] == item: logger.info('now passing %s'%item2) return QA_PASSED def all_repos_done(self, project, codes=None): """Check the build result of the project and only return True if all repos of that project are either published or unpublished """ # coolo's experience says that 'finished' won't be # sufficient here, so don't try to add it :-) codes = ['published', 'unpublished'] if not codes else codes url = self.api.makeurl(['build', project, '_result'], {'code': 'failed'}) f = self.api.retried_GET(url) root = ET.parse(f).getroot() ready = True for repo in root.findall('result'): # ignore ports. 'factory' is used by arm for repos that are not # meant to use the totest manager. if repo.get('repository') in ('ports', 'factory', 'images_staging'): continue # ignore 32bit for now. We're only interesed in aarch64 here if repo.get('arch') in ('armv6l', 'armv7l'): continue if repo.get('dirty', '') == 'true': logger.info('%s %s %s -> %s'%(repo.get('project'), repo.get('repository'), repo.get('arch'), 'dirty')) ready = False if repo.get('code') not in codes: logger.info('%s %s %s -> %s'%(repo.get('project'), repo.get('repository'), repo.get('arch'), repo.get('code'))) ready = False return ready def maxsize_for_package(self, package): if re.match(r'.*-mini-.*', package): return 737280000 # a CD needs to match if re.match(r'.*-dvd5-.*', package): return 4700372992 # a DVD needs to match if re.match(r'.*-image-livecd-x11.*', package): return 681574400 # not a full CD if re.match(r'.*-image-livecd.*', package): return 999999999 # a GB stick if re.match(r'.*-dvd9-dvd-.*', package): return 8539996159 if package.startswith('_product:openSUSE-ftp-ftp-'): return None if package == '_product:openSUSE-Addon-NonOss-ftp-ftp-i586_x86_64': return None raise Exception('No maxsize for {}'.format(package)) def package_ok(self, project, package, repository, arch): """Checks one package in a project and returns True if it's succeeded """ query = {'package': package, 'repository': repository, 'arch': arch} url = self.api.makeurl(['build', project, '_result'], query) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for repo in root.findall('result'): status = repo.find('status') if status.get('code') != 'succeeded': logger.info('%s %s %s %s -> %s'%(project, package, repository, arch, status.get('code'))) return False maxsize = self.maxsize_for_package(package) if not maxsize: return True url = self.api.makeurl(['build', project, repository, arch, package]) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for binary in root.findall('binary'): if not binary.get('filename', '').endswith('.iso'): continue isosize = int(binary.get('size', 0)) if isosize > maxsize: logger.error('%s %s %s %s: %s'%(project, package, repository, arch, 'too large by %s bytes' % (isosize-maxsize))) return False return True def factory_snapshottable(self): """Check various conditions required for factory to be snapshotable """ if not self.all_repos_done('openSUSE:%s' % self.project): return False for product in self.ftp_products + self.main_products: if not self.package_ok('openSUSE:%s' % self.project, product, 'images', 'local'): return False if len(self.livecd_products): if not self.all_repos_done('openSUSE:%s:Live' % self.project): return False for arch in ['i586', 'x86_64' ]: for product in self.livecd_products: if not self.package_ok('openSUSE:%s:Live' % self.project, product, 'standard', arch): return False return True def release_package(self, project, package, set_release=None): query = {'cmd': 'release'} if set_release: query['setrelease'] = set_release # FIXME: make configurable. openSUSE:Factory:ARM currently has multiple # repos with release targets, so obs needs to know which one to release if project == 'openSUSE:Factory:ARM': query['repository'] = 'images' baseurl = ['source', project, package] url = self.api.makeurl(baseurl, query=query) if self.dryrun: logger.info("release %s/%s (%s)"%(project, package, set_release)) else: self.api.retried_POST(url) def update_totest(self, snapshot): logger.info('Updating snapshot %s' % snapshot) if not self.dryrun: self.api.switch_flag_in_prj('openSUSE:%s:ToTest' % self.project, flag='publish', state='disable') for product in self.ftp_products: self.release_package('openSUSE:%s' % self.project, product) for cd in self.livecd_products: self.release_package('openSUSE:%s:Live' % self.project, cd, set_release='Snapshot%s' % snapshot) for cd in self.main_products: self.release_package('openSUSE:%s' % self.project, cd, set_release='Snapshot%s' % snapshot) def publish_factory_totest(self): logger.info('Publish ToTest') if not self.dryrun: self.api.switch_flag_in_prj('openSUSE:%s:ToTest' % self.project, flag='publish', state='enable') def totest_is_publishing(self): """Find out if the publishing flag is set in totest's _meta""" url = self.api.makeurl(['source', 'openSUSE:%s:ToTest' % self.project, '_meta']) f = self.api.retried_GET(url) root = ET.parse(f).getroot() if not root.find('publish'): # default true return True for flag in root.find('publish'): if flag.get('repository', None) or flag.get('arch', None): continue if flag.tag == 'enable': return True return False def totest(self): current_snapshot = self.get_current_snapshot() new_snapshot = self.current_version() current_result = self.overall_result(current_snapshot) current_qa_version = self.api.load_file_content("%s:Staging" % self.api.project, "dashboard", "version_totest") logger.info('current_snapshot %s: %s'%(current_snapshot, self._result2str(current_result))) logger.debug('new_snapshot %s', new_snapshot) logger.debug('current_qa_version %s', current_qa_version) snapshotable = self.factory_snapshottable() logger.debug("snapshotable: %s", snapshotable) can_release = (current_result != QA_INPROGRESS and snapshotable) # not overwriting if new_snapshot == current_snapshot: logger.debug("no change in snapshot version") can_release = False elif not self.all_repos_done('openSUSE:%s:ToTest' % self.project): logger.debug("not all repos done, can't release") # the repos have to be done, otherwise we better not touch them with a new release can_release = False can_publish = (current_result == QA_PASSED) # already published if self.totest_is_publishing(): logger.debug("totest already publishing") can_publish = False if can_publish: if current_qa_version == current_snapshot: self.publish_factory_totest() self.write_version_to_dashboard("snapshot", current_snapshot) can_release = False # we have to wait else: # We reached a very bad status: openQA testing is 'done', but not of the same version # currently in :ToTest. This can happen when 'releasing' the product failed raise Exception("Publishing stopped: tested version (%s) does not match :ToTest version (%s)" % (current_qa_version, current_snapshot)) if can_release: self.update_totest(new_snapshot) self.write_version_to_dashboard("totest", new_snapshot) def release(self): new_snapshot = self.current_version() self.update_totest(new_snapshot) def known_failures_from_dashboard(self, project): known_failures = [] if self.project == "Factory:PowerPC": project = "Factory" else: project = self.project url = self.api.makeurl(['source', 'openSUSE:%s:Staging' % project, 'dashboard', 'known_failures']) f = self.api.retried_GET(url) for line in f: if not line[0] == '#': known_failures.append(line.strip()) return known_failures def write_version_to_dashboard(self, target, version): if not self.dryrun: url = self.api.makeurl(['source', 'openSUSE:%s:Staging' % self.project, 'dashboard', 'version_%s' % target]) osc.core.http_PUT(url + '?comment=Update+version', data=version)
class ToTestBase(object): """Base class to store the basic interface""" product_repo = 'images' product_arch = 'local' livecd_repo = 'images' livecd_archs = ['i586', 'x86_64'] def __init__(self, project, dryrun=False, api_url=None, openqa_server='https://openqa.opensuse.org', test_subproject=None): self.project = project self.dryrun = dryrun if not api_url: api_url = osc.conf.config['apiurl'] self.api = StagingAPI(api_url, project=project) self.openqa_server = openqa_server if not test_subproject: test_subproject = 'ToTest' self.test_project = '%s:%s' % (self.project, test_subproject) self.openqa = OpenQA_Client(server=openqa_server) self.issues_to_ignore = [] self.issuefile = "{}_{}".format(self.project, ISSUE_FILE) if os.path.isfile(self.issuefile): with open(self.issuefile, 'r') as f: for line in f.readlines(): self.issues_to_ignore.append(line.strip()) self.project_base = project.split(':')[0] self.update_pinned_descr = False def openqa_group(self): return self.project def iso_prefix(self): return self.project def jobs_num(self): return 70 def current_version(self): return self.release_version() def binaries_of_product(self, project, product): url = self.api.makeurl(['build', project, self.product_repo, self.product_arch, product]) try: f = self.api.retried_GET(url) except urllib2.HTTPError: return [] ret = [] root = ET.parse(f).getroot() for binary in root.findall('binary'): ret.append(binary.get('filename')) return ret def get_current_snapshot(self): """Return the current snapshot in the test project""" for binary in self.binaries_of_product(self.test_project, '_product:%s-cd-mini-%s' % (self.project_base, self.arch())): result = re.match(r'%s-%s-NET-.*-Snapshot(.*)-Media.iso' % (self.project_base, self.iso_prefix()), binary) if result: return result.group(1) return None def ftp_build_version(self, project, tree, base=None): if not base: base = self.project_base for binary in self.binaries_of_product(project, tree): result = re.match(r'%s.*Build(.*)-Media1.report' % base, binary) if result: return result.group(1) raise NotFoundException("can't find %s ftp version" % project) def iso_build_version(self, project, tree, base=None): if not base: base = self.project_base for binary in self.binaries_of_product(project, tree): result = re.match(r'%s.*Build(.*)-Media(.*).iso' % base, binary) if result: return result.group(1) raise NotFoundException("can't find %s iso version" % project) def release_version(self): url = self.api.makeurl(['build', self.project, 'standard', self.arch(), '_product:%s-release' % self.project_base]) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for binary in root.findall('binary'): binary = binary.get('filename', '') result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary) if result: return result.group(1) raise NotFoundException("can't find %s version" % self.project) def current_qa_version(self): return self.api.dashboard_content_load('version_totest') def find_openqa_results(self, snapshot): """Return the openqa jobs of a given snapshot and filter out the cloned jobs """ url = makeurl(self.openqa_server, ['api', 'v1', 'jobs'], {'group': self.openqa_group(), 'build': snapshot, 'latest': 1}) f = self.api.retried_GET(url) jobs = [] for job in json.load(f)['jobs']: if job['clone_id'] or job['result'] == 'obsoleted': continue job['name'] = job['name'].replace(snapshot, '') jobs.append(job) return jobs def _result2str(self, result): if result == QA_INPROGRESS: return 'inprogress' elif result == QA_FAILED: return 'failed' else: return 'passed' def find_failed_module(self, testmodules): # print json.dumps(testmodules, sort_keys=True, indent=4) for module in testmodules: if module['result'] != 'failed': continue flags = module['flags'] if 'fatal' in flags or 'important' in flags: return module['name'] break logger.info('%s %s %s' % (module['name'], module['result'], module['flags'])) def update_openqa_status_message(self): url = makeurl(self.openqa_server, ['api', 'v1', 'job_groups']) f = self.api.retried_GET(url) job_groups = json.load(f) group_id = 0 for jg in job_groups: if jg['name'] == self.openqa_group(): group_id = jg['id'] break if not group_id: logger.debug('No openQA group id found for status comment update, ignoring') return pinned_ignored_issue = 0 issues = ' , '.join(self.issues_to_ignore) status_flag = 'publishing' if self.status_for_openqa['is_publishing'] else \ 'preparing' if self.status_for_openqa['can_release'] else \ 'testing' if self.status_for_openqa['snapshotable'] else \ 'building' status_msg = "tag:{}:{}:{}".format(self.status_for_openqa['new_snapshot'], status_flag, status_flag) msg = "pinned-description: Ignored issues\r\n\r\n{}\r\n\r\n{}".format(issues, status_msg) data = {'text': msg} url = makeurl(self.openqa_server, ['api', 'v1', 'groups', str(group_id), 'comments']) f = self.api.retried_GET(url) comments = json.load(f) for comment in comments: if comment['userName'] == 'ttm' and \ comment['text'].startswith('pinned-description: Ignored issues'): pinned_ignored_issue = comment['id'] logger.debug('Writing openQA status message: {}'.format(data)) if not self.dryrun: if pinned_ignored_issue: self.openqa.openqa_request( 'PUT', 'groups/%s/comments/%d' % (group_id, pinned_ignored_issue), data=data) else: self.openqa.openqa_request( 'POST', 'groups/%s/comments' % group_id, data=data) def overall_result(self, snapshot): """Analyze the openQA jobs of a given snapshot Returns a QAResult""" if snapshot is None: return QA_FAILED jobs = self.find_openqa_results(snapshot) if len(jobs) < self.jobs_num(): # not yet scheduled logger.warning('we have only %s jobs' % len(jobs)) return QA_INPROGRESS number_of_fails = 0 in_progress = False for job in jobs: # print json.dumps(job, sort_keys=True, indent=4) if job['result'] in ('failed', 'incomplete', 'skipped', 'user_cancelled', 'obsoleted', 'parallel_failed'): jobname = job['name'] # print json.dumps(job, sort_keys=True, indent=4), jobname url = makeurl(self.openqa_server, ['api', 'v1', 'jobs', str(job['id']), 'comments']) f = self.api.retried_GET(url) comments = json.load(f) refs = set() labeled = 0 to_ignore = False for comment in comments: for ref in comment['bugrefs']: refs.add(str(ref)) if comment['userName'] == 'ttm' and comment['text'] == 'label:unknown_failure': labeled = comment['id'] if re.search(r'@ttm:? ignore', comment['text']): to_ignore = True ignored = len(refs) > 0 for ref in refs: if ref not in self.issues_to_ignore: if to_ignore: self.issues_to_ignore.append(ref) self.update_pinned_descr = True with open(self.issuefile, 'a') as f: f.write("%s\n" % ref) else: ignored = False if not ignored: number_of_fails += 1 if not labeled and len(refs) > 0 and not self.dryrun: data = {'text': 'label:unknown_failure'} self.openqa.openqa_request( 'POST', 'jobs/%s/comments' % job['id'], data=data) elif labeled: # remove flag - unfortunately can't delete comment unless admin data = {'text': 'Ignored issue'} self.openqa.openqa_request( 'PUT', 'jobs/%s/comments/%d' % (job['id'], labeled), data=data) if ignored: logger.info("job %s failed, but was ignored", jobname) else: joburl = '%s/tests/%s' % (self.openqa_server, job['id']) logger.info("job %s failed, see %s", jobname, joburl) elif job['result'] == 'passed' or job['result'] == 'softfailed': continue elif job['result'] == 'none': if job['state'] != 'cancelled': in_progress = True else: raise Exception(job['result']) if number_of_fails > 0: return QA_FAILED if in_progress: return QA_INPROGRESS return QA_PASSED def all_repos_done(self, project, codes=None): """Check the build result of the project and only return True if all repos of that project are either published or unpublished """ # coolo's experience says that 'finished' won't be # sufficient here, so don't try to add it :-) codes = ['published', 'unpublished'] if not codes else codes url = self.api.makeurl( ['build', project, '_result'], {'code': 'failed'}) f = self.api.retried_GET(url) root = ET.parse(f).getroot() ready = True for repo in root.findall('result'): # ignore ports. 'factory' is used by arm for repos that are not # meant to use the totest manager. if repo.get('repository') in ('ports', 'factory', 'images_staging'): continue # ignore 32bit for now. We're only interesed in aarch64 here if repo.get('arch') in ('armv6l', 'armv7l'): continue if repo.get('dirty', '') == 'true': logger.info('%s %s %s -> %s' % (repo.get('project'), repo.get('repository'), repo.get('arch'), 'dirty')) ready = False if repo.get('code') not in codes: logger.info('%s %s %s -> %s' % (repo.get('project'), repo.get('repository'), repo.get('arch'), repo.get('code'))) ready = False return ready def maxsize_for_package(self, package): if re.match(r'.*-mini-.*', package): return 737280000 # a CD needs to match if re.match(r'.*-dvd5-.*', package): return 4700372992 # a DVD needs to match if re.match(r'livecd-x11', package): return 681574400 # not a full CD if re.match(r'livecd-.*', package): return 999999999 # a GB stick if re.match(r'.*-(dvd9-dvd|cd-DVD)-.*', package): return 8539996159 if re.match(r'.*-ftp-(ftp|POOL)-', package): return None if ':%s-Addon-NonOss-ftp-ftp' % self.base in package: return None raise Exception('No maxsize for {}'.format(package)) def package_ok(self, project, package, repository, arch): """Checks one package in a project and returns True if it's succeeded """ query = {'package': package, 'repository': repository, 'arch': arch} url = self.api.makeurl(['build', project, '_result'], query) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for repo in root.findall('result'): status = repo.find('status') if status.get('code') != 'succeeded': logger.info( '%s %s %s %s -> %s' % (project, package, repository, arch, status.get('code'))) return False maxsize = self.maxsize_for_package(package) if not maxsize: return True url = self.api.makeurl(['build', project, repository, arch, package]) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for binary in root.findall('binary'): if not binary.get('filename', '').endswith('.iso'): continue isosize = int(binary.get('size', 0)) if isosize > maxsize: logger.error('%s %s %s %s: %s' % ( project, package, repository, arch, 'too large by %s bytes' % (isosize - maxsize))) return False return True def is_snapshottable(self): """Check various conditions required for factory to be snapshotable """ if not self.all_repos_done(self.project): return False for product in self.ftp_products + self.main_products: if not self.package_ok(self.project, product, self.product_repo, self.product_arch): return False if len(self.livecd_products): if not self.all_repos_done('%s:Live' % self.project): return False for arch in self.livecd_archs: for product in self.livecd_products: if not self.package_ok('%s:Live' % self.project, product, self.livecd_repo, arch): return False return True def _release_package(self, project, package, set_release=None): query = {'cmd': 'release'} if set_release: query['setrelease'] = set_release # FIXME: make configurable. openSUSE:Factory:ARM currently has multiple # repos with release targets, so obs needs to know which one to release if project == 'openSUSE:Factory:ARM': query['repository'] = 'images' baseurl = ['source', project, package] url = self.api.makeurl(baseurl, query=query) if self.dryrun: logger.info("release %s/%s (%s)" % (project, package, set_release)) else: self.api.retried_POST(url) def _release(self, set_release=None): for product in self.ftp_products: self._release_package(self.project, product) for cd in self.livecd_products: self._release_package('%s:Live' % self.project, cd, set_release=set_release) for cd in self.main_products: self._release_package(self.project, cd, set_release=set_release) def update_totest(self, snapshot=None): release = 'Snapshot%s' % snapshot if snapshot else None logger.info('Updating snapshot %s' % snapshot) if not self.dryrun: self.api.switch_flag_in_prj(self.test_project, flag='publish', state='disable') self._release(set_release=release) def publish_factory_totest(self): logger.info('Publish test project content') if not self.dryrun: self.api.switch_flag_in_prj( self.test_project, flag='publish', state='enable') def totest_is_publishing(self): """Find out if the publishing flag is set in totest's _meta""" url = self.api.makeurl( ['source', self.test_project, '_meta']) f = self.api.retried_GET(url) root = ET.parse(f).getroot() if not root.find('publish'): # default true return True for flag in root.find('publish'): if flag.get('repository', None) or flag.get('arch', None): continue if flag.tag == 'enable': return True return False def totest(self): try: current_snapshot = self.get_current_snapshot() except NotFoundException as e: # nothing in test project (yet) logger.warn(e) current_snapshot = None new_snapshot = self.current_version() self.update_pinned_descr = False current_result = self.overall_result(current_snapshot) current_qa_version = self.current_qa_version() logger.info('current_snapshot %s: %s' % (current_snapshot, self._result2str(current_result))) logger.debug('new_snapshot %s', new_snapshot) logger.debug('current_qa_version %s', current_qa_version) snapshotable = self.is_snapshottable() logger.debug("snapshotable: %s", snapshotable) can_release = ((current_snapshot is None or current_result != QA_INPROGRESS) and snapshotable) # not overwriting if new_snapshot == current_snapshot: logger.debug("no change in snapshot version") can_release = False elif not self.all_repos_done(self.test_project): logger.debug("not all repos done, can't release") # the repos have to be done, otherwise we better not touch them # with a new release can_release = False can_publish = (current_result == QA_PASSED) # already published totest_is_publishing = self.totest_is_publishing() if totest_is_publishing: logger.debug("totest already publishing") can_publish = False if self.update_pinned_descr: self.status_for_openqa = { 'current_snapshot': current_snapshot, 'new_snapshot': new_snapshot, 'snapshotable': snapshotable, 'can_release': can_release, 'is_publishing': totest_is_publishing, } self.update_openqa_status_message() if can_publish: if current_qa_version == current_snapshot: self.publish_factory_totest() self.write_version_to_dashboard("snapshot", current_snapshot) can_release = False # we have to wait else: # We reached a very bad status: openQA testing is 'done', but not of the same version # currently in test project. This can happen when 'releasing' the # product failed raise Exception("Publishing stopped: tested version (%s) does not match version in test project (%s)" % (current_qa_version, current_snapshot)) if can_release: self.update_totest(new_snapshot) self.write_version_to_dashboard("totest", new_snapshot) def release(self): new_snapshot = self.current_version() self.update_totest(new_snapshot) def write_version_to_dashboard(self, target, version): if not self.dryrun: self.api.dashboard_content_ensure('version_%s' % target, version, comment='Update version')
class ToTestManager(ToolBase.ToolBase): def __init__(self, tool): ToolBase.ToolBase.__init__(self) # copy attributes self.logger = logging.getLogger(__name__) self.apiurl = tool.apiurl self.debug = tool.debug self.caching = tool.caching self.dryrun = tool.dryrun def setup(self, project): self.project = ToTest(project, self.apiurl) self.api = StagingAPI(self.apiurl, project=project) def version_file(self, target): return 'version_%s' % target def write_version_to_dashboard(self, target, version): if self.dryrun or self.project.do_not_release: return self.api.pseudometa_file_ensure(self.version_file(target), version, comment='Update version') def current_qa_version(self): return self.api.pseudometa_file_load(self.version_file('totest')) def iso_build_version(self, project, tree, repo=None, arch=None): for binary in self.binaries_of_product(project, tree, repo=repo, arch=arch): result = re.match(r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz|\.raw\.xz)', binary) if result: return result.group(1) raise NotFoundException("can't find %s iso version" % project) def version_from_totest_project(self): if len(self.project.main_products): return self.iso_build_version(self.project.test_project, self.project.main_products[0]) return self.iso_build_version(self.project.test_project, self.project.image_products[0].package, arch=self.project.image_products[0].archs[0]) def binaries_of_product(self, project, product, repo=None, arch=None): if repo is None: repo = self.project.product_repo if arch is None: arch = self.project.product_arch url = self.api.makeurl(['build', project, repo, arch, product]) try: f = self.api.retried_GET(url) except HTTPError: return [] ret = [] root = ET.parse(f).getroot() for binary in root.findall('binary'): ret.append(binary.get('filename')) return ret def ftp_build_version(self, project, tree): for binary in self.binaries_of_product(project, tree): result = re.match(r'.*-Build(.*)-Media1.report', binary) if result: return result.group(1) raise NotFoundException("can't find %s ftp version" % project) # make sure to update the attribute as atomic as possible - as such # only update the snapshot and don't erase anything else. The snapshots # have very different update times within the pipeline, so there is # normally no chance that releaser and publisher overwrite states def update_status(self, status, snapshot): status_dict = self.get_status_dict() if self.dryrun: self.logger.info('setting {} snapshot to {}'.format(status, snapshot)) return if status_dict.get(status) != snapshot: status_dict[status] = snapshot text = yaml.safe_dump(status_dict) self.api.attribute_value_save('ToTestManagerStatus', text) def get_status_dict(self): text = self.api.attribute_value_load('ToTestManagerStatus') if text: return yaml.safe_load(text) return dict() def get_status(self, status): return self.get_status_dict().get(status) def release_package(self, project, package, set_release=None, repository=None, target_project=None, target_repository=None): query = {'cmd': 'release'} if set_release: query['setrelease'] = set_release if repository is not None: query['repository'] = repository if target_project is not None: # Both need to be set query['target_project'] = target_project query['target_repository'] = target_repository baseurl = ['source', project, package] url = self.api.makeurl(baseurl, query=query) if self.dryrun or self.project.do_not_release: self.logger.info('release %s/%s (%s)' % (project, package, query)) else: self.api.retried_POST(url) def all_repos_done(self, project, codes=None): """Check the build result of the project and only return True if all repos of that project are either published or unpublished """ # coolo's experience says that 'finished' won't be # sufficient here, so don't try to add it :-) codes = ['published', 'unpublished'] if not codes else codes url = self.api.makeurl( ['build', project, '_result'], {'code': 'failed'}) f = self.api.retried_GET(url) root = ET.parse(f).getroot() ready = True for repo in root.findall('result'): # ignore ports. 'factory' is used by arm for repos that are not # meant to use the totest manager. if repo.get('repository') in ('ports', 'factory', 'images_staging'): continue if repo.get('dirty') == 'true': self.logger.info('%s %s %s -> %s' % (repo.get('project'), repo.get('repository'), repo.get('arch'), 'dirty')) ready = False if repo.get('code') not in codes: self.logger.info('%s %s %s -> %s' % (repo.get('project'), repo.get('repository'), repo.get('arch'), repo.get('code'))) ready = False return ready
class ToTestBase(object): """Base class to store the basic interface""" def __init__(self, project, dryrun): self.project = project self.dryrun = dryrun self.api = StagingAPI(osc.conf.config['apiurl'], project='openSUSE:%s' % project) self.known_failures = self.known_failures_from_dashboard(project) def openqa_group(self): return self.project def iso_prefix(self): return self.project def jobs_num(self): return 90 def current_version(self): return self.release_version() def binaries_of_product(self, project, product): url = self.api.makeurl(['build', project, 'images', 'local', product]) try: f = self.api.retried_GET(url) except urllib2.HTTPError: return [] ret = [] root = ET.parse(f).getroot() for binary in root.findall('binary'): ret.append(binary.get('filename')) return ret def get_current_snapshot(self): """Return the current snapshot in :ToTest""" # for now we hardcode all kind of things for binary in self.binaries_of_product( 'openSUSE:%s:ToTest' % self.project, '_product:openSUSE-cd-mini-%s' % self.arch()): result = re.match( r'openSUSE-%s-NET-.*-Snapshot(.*)-Media.iso' % self.iso_prefix(), binary) if result: return result.group(1) return None def ftp_build_version(self, tree): for binary in self.binaries_of_product('openSUSE:%s' % self.project, tree): result = re.match(r'openSUSE.*Build(.*)-Media1.report', binary) if result: return result.group(1) raise Exception("can't find %s version" % self.project) def release_version(self): url = self.api.makeurl([ 'build', 'openSUSE:%s' % self.project, 'standard', self.arch(), '_product:openSUSE-release' ]) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for binary in root.findall('binary'): binary = binary.get('filename', '') result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary) if result: return result.group(1) raise Exception("can't find %s version" % self.project) def find_openqa_results(self, snapshot): """Return the openqa jobs of a given snapshot and filter out the cloned jobs """ url = makeurl('https://openqa.opensuse.org', ['api', 'v1', 'jobs'], { 'group': self.openqa_group(), 'build': snapshot }) f = self.api.retried_GET(url) jobs = [] for job in json.load(f)['jobs']: if job['clone_id']: continue job['name'] = job['name'].replace(snapshot, '') jobs.append(job) return jobs def _result2str(self, result): if result == QA_INPROGRESS: return 'inprogress' elif result == QA_FAILED: return 'failed' else: return 'passed' def find_failed_module(self, testmodules): # print json.dumps(testmodules, sort_keys=True, indent=4) for module in testmodules: if module['result'] != 'failed': continue flags = module['flags'] if 'fatal' in flags or 'important' in flags: return module['name'] break print module['name'], module['result'], module['flags'] def overall_result(self, snapshot): """Analyze the openQA jobs of a given snapshot Returns a QAResult""" if snapshot is None: return QA_FAILED jobs = self.find_openqa_results(snapshot) if len(jobs) < self.jobs_num(): # not yet scheduled print 'we have only %s jobs' % len(jobs) return QA_INPROGRESS number_of_fails = 0 in_progress = False machines = [] for job in jobs: # print json.dumps(job, sort_keys=True, indent=4) if job['result'] in ('failed', 'incomplete', 'skipped', 'user_cancelled', 'obsoleted'): jobname = job['name'] + '@' + job['settings']['MACHINE'] # Record machines we have tests for machines.append(job['settings']['MACHINE']) if jobname in self.known_failures: self.known_failures.remove(jobname) continue number_of_fails += 1 # print json.dumps(job, sort_keys=True, indent=4), jobname failedmodule = self.find_failed_module(job['modules']) url = 'https://openqa.opensuse.org/tests/%s' % job['id'] print jobname, url, failedmodule, job['retry_avbl'] # if number_of_fails < 3: continue elif job['result'] == 'passed': continue elif job['result'] == 'none': if job['state'] != 'cancelled': in_progress = True else: raise Exception(job['result']) if number_of_fails > 0: return QA_FAILED if in_progress: return QA_INPROGRESS machines = list(set(machines)) for item in machines: for item2 in self.known_failures: if item2.split('@')[1] == item: print 'now passing', item2 return QA_PASSED def all_repos_done(self, project, codes=None): """Check the build result of the project and only return True if all repos of that project are either published or unpublished """ codes = ['published', 'unpublished'] if not codes else codes url = self.api.makeurl(['build', project, '_result'], {'code': 'failed'}) f = self.api.retried_GET(url) root = ET.parse(f).getroot() ready = True for repo in root.findall('result'): # ignore ports. 'factory' is used by arm for repos that are not # meant to use the totest manager. if repo.get('repository') in ('ports', 'factory'): continue # ignore 32bit for now. We're only interesed in aarch64 here if repo.get('arch') in ('armv6l', 'armv7l'): continue if repo.get('dirty', '') == 'true': print repo.get('project'), repo.get('repository'), repo.get( 'arch'), 'dirty' ready = False if repo.get('code') not in codes: print repo.get('project'), repo.get('repository'), repo.get( 'arch'), repo.get('code') ready = False return ready def maxsize_for_package(self, package): if re.match(r'.*-mini-.*', package): return 737280000 # a CD needs to match if re.match(r'.*-dvd5-.*', package): return 4700372992 # a DVD needs to match if re.match(r'.*-image-livecd-x11.*', package): return 681574400 # not a full CD if re.match(r'.*-image-livecd.*', package): return 999999999 # a GB stick if re.match(r'.*-dvd9-dvd-.*', package): return 8539996159 if package.startswith('_product:openSUSE-ftp-ftp-'): return None if package == '_product:openSUSE-Addon-NonOss-ftp-ftp-i586_x86_64': return None raise Exception('No maxsize for {}'.format(package)) def package_ok(self, project, package, repository, arch): """Checks one package in a project and returns True if it's succeeded """ query = {'package': package, 'repository': repository, 'arch': arch} url = self.api.makeurl(['build', project, '_result'], query) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for repo in root.findall('result'): status = repo.find('status') if status.get('code') != 'succeeded': print project, package, repository, arch, status.get('code') return False maxsize = self.maxsize_for_package(package) if not maxsize: return True url = self.api.makeurl(['build', project, repository, arch, package]) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for binary in root.findall('binary'): if not binary.get('filename', '').endswith('.iso'): continue isosize = int(binary.get('size', 0)) if isosize > maxsize: print project, package, repository, arch, 'too large by %s bytes' % ( isosize - maxsize) return False return True def factory_snapshottable(self): """Check various conditions required for factory to be snapshotable """ if not self.all_repos_done('openSUSE:%s' % self.project): return False for product in self.ftp_products + self.main_products: if not self.package_ok('openSUSE:%s' % self.project, product, 'images', 'local'): return False if len(self.livecd_products): if not self.all_repos_done('openSUSE:%s:Live' % self.project): return False for arch in ['i586', 'x86_64']: for product in self.livecd_products: if not self.package_ok( 'openSUSE:%s:Live' % self.project, product, 'standard', arch): return False return True def release_package(self, project, package, set_release=None): query = {'cmd': 'release'} if set_release: query['setrelease'] = set_release baseurl = ['source', project, package] url = self.api.makeurl(baseurl, query=query) if self.dryrun: print "release %s/%s (%s)" % (project, package, set_release) else: self.api.retried_POST(url) def update_totest(self, snapshot): print 'Updating snapshot %s' % snapshot if not self.dryrun: self.api.switch_flag_in_prj('openSUSE:%s:ToTest' % self.project, flag='publish', state='disable') for product in self.ftp_products: self.release_package('openSUSE:%s' % self.project, product) for cd in self.livecd_products: self.release_package('openSUSE:%s:Live' % self.project, cd, set_release='Snapshot%s' % snapshot) for cd in self.main_products: self.release_package('openSUSE:%s' % self.project, cd, set_release='Snapshot%s' % snapshot) def publish_factory_totest(self): print 'Publish ToTest' if not self.dryrun: self.api.switch_flag_in_prj('openSUSE:%s:ToTest' % self.project, flag='publish', state='enable') def totest_is_publishing(self): """Find out if the publishing flag is set in totest's _meta""" url = self.api.makeurl( ['source', 'openSUSE:%s:ToTest' % self.project, '_meta']) f = self.api.retried_GET(url) root = ET.parse(f).getroot() if not root.find('publish'): # default true return True for flag in root.find('publish'): if flag.get('repository', None) or flag.get('arch', None): continue if flag.tag == 'enable': return True return False def totest(self): current_snapshot = self.get_current_snapshot() new_snapshot = self.current_version() current_result = self.overall_result(current_snapshot) print 'current_snapshot', current_snapshot, self._result2str( current_result) can_release = (current_result != QA_INPROGRESS and self.factory_snapshottable()) # not overwriting if new_snapshot == current_snapshot: can_release = False elif not self.all_repos_done('openSUSE:%s:ToTest' % self.project): # the repos have to be done, otherwise we better not touch them with a new release can_release = False can_publish = (current_result == QA_PASSED) # already published if self.totest_is_publishing(): can_publish = False if can_publish: self.publish_factory_totest() can_release = False # we have to wait if can_release: self.update_totest(new_snapshot) def release(self): new_snapshot = self.current_version() self.update_totest(new_snapshot) def known_failures_from_dashboard(self, project): known_failures = [] if self.project in ("Factory:PowerPC", "Factory:ARM"): project = "Factory" else: project = self.project url = self.api.makeurl([ 'source', 'openSUSE:%s:Staging' % project, 'dashboard', 'known_failures' ]) f = self.api.retried_GET(url) for line in f: if not line[0] == '#': known_failures.append(line.strip()) return known_failures
class ToTestManager(ToolBase.ToolBase): def __init__(self, tool): ToolBase.ToolBase.__init__(self) # copy attributes self.logger = logging.getLogger(__name__) self.apiurl = tool.apiurl self.debug = tool.debug self.caching = tool.caching self.dryrun = tool.dryrun def setup(self, project): self.project = ToTest(project, self.apiurl) self.api = StagingAPI(self.apiurl, project=project) def version_file(self, target): return 'version_%s' % target def write_version_to_dashboard(self, target, version): if self.dryrun or self.project.do_not_release: return self.api.pseudometa_file_ensure(self.version_file(target), version, comment='Update version') def current_qa_version(self): return self.api.pseudometa_file_load(self.version_file('totest')) def iso_build_version(self, project, tree, repo=None, arch=None): for binary in self.binaries_of_product(project, tree, repo=repo, arch=arch): result = re.match( r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz|\.raw\.xz|\.appx)', binary) if result: return result.group(1) raise NotFoundException("can't find %s iso version" % project) def version_from_totest_project(self): if len(self.project.main_products): return self.iso_build_version(self.project.test_project, self.project.main_products[0]) return self.iso_build_version( self.project.test_project, self.project.image_products[0].package, arch=self.project.image_products[0].archs[0]) def binaries_of_product(self, project, product, repo=None, arch=None): if repo is None: repo = self.project.product_repo if arch is None: arch = self.project.product_arch url = self.api.makeurl(['build', project, repo, arch, product]) try: f = self.api.retried_GET(url) except HTTPError: return [] ret = [] root = ET.parse(f).getroot() for binary in root.findall('binary'): ret.append(binary.get('filename')) return ret def ftp_build_version(self, project, tree): for binary in self.binaries_of_product(project, tree): result = re.match(r'.*-Build(.*)-Media1.report', binary) if result: return result.group(1) raise NotFoundException("can't find %s ftp version" % project) # make sure to update the attribute as atomic as possible - as such # only update the snapshot and don't erase anything else. The snapshots # have very different update times within the pipeline, so there is # normally no chance that releaser and publisher overwrite states def update_status(self, status, snapshot): status_dict = self.get_status_dict() if self.dryrun: self.logger.info('setting {} snapshot to {}'.format( status, snapshot)) return if status_dict.get(status) != snapshot: status_dict[status] = snapshot text = yaml.safe_dump(status_dict) self.api.attribute_value_save('ToTestManagerStatus', text) def get_status_dict(self): text = self.api.attribute_value_load('ToTestManagerStatus') if text: return yaml.safe_load(text) return dict() def get_status(self, status): return self.get_status_dict().get(status) def release_package(self, project, package, set_release=None, repository=None, target_project=None, target_repository=None): query = {'cmd': 'release'} if set_release: query['setrelease'] = set_release if repository is not None: query['repository'] = repository if target_project is not None: # Both need to be set query['target_project'] = target_project query['target_repository'] = target_repository baseurl = ['source', project, package] url = self.api.makeurl(baseurl, query=query) if self.dryrun or self.project.do_not_release: self.logger.info('release %s/%s (%s)' % (project, package, query)) else: self.api.retried_POST(url) def all_repos_done(self, project, codes=None): """Check the build result of the project and only return True if all repos of that project are either published or unpublished """ # coolo's experience says that 'finished' won't be # sufficient here, so don't try to add it :-) codes = ['published', 'unpublished'] if not codes else codes url = self.api.makeurl(['build', project, '_result'], {'code': 'failed'}) f = self.api.retried_GET(url) root = ET.parse(f).getroot() ready = True for repo in root.findall('result'): # ignore ports. 'factory' is used by arm for repos that are not # meant to use the totest manager. if repo.get('repository') in ('ports', 'factory', 'images_staging'): continue if repo.get('dirty') == 'true': self.logger.info('%s %s %s -> %s' % (repo.get('project'), repo.get('repository'), repo.get('arch'), 'dirty')) ready = False if repo.get('code') not in codes: self.logger.info('%s %s %s -> %s' % (repo.get('project'), repo.get('repository'), repo.get('arch'), repo.get('code'))) ready = False return ready
class ToTestBase(object): """Base class to store the basic interface""" product_repo = 'images' product_arch = 'local' livecd_repo = 'images' totest_container_repo = 'containers' main_products = [] ftp_products = [] container_products = [] livecd_products = [] image_products = [] def __init__(self, project, dryrun=False, norelease=False, api_url=None, openqa_server='https://openqa.opensuse.org', test_subproject=None): self.project = project self.dryrun = dryrun self.norelease = norelease if not api_url: api_url = osc.conf.config['apiurl'] self.api = StagingAPI(api_url, project=project) self.openqa_server = openqa_server if not test_subproject: test_subproject = 'ToTest' self.test_project = '%s:%s' % (self.project, test_subproject) self.openqa = OpenQA_Client(server=openqa_server) self.load_issues_to_ignore() self.project_base = project.split(':')[0] self.update_pinned_descr = False self.amqp_url = osc.conf.config.get('ttm_amqp_url') def load_issues_to_ignore(self): text = self.api.attribute_value_load('IgnoredIssues') if text: root = yaml.load(text) self.issues_to_ignore = root.get('last_seen') else: self.issues_to_ignore = dict() def save_issues_to_ignore(self): if self.dryrun: return text = yaml.dump({'last_seen': self.issues_to_ignore}, default_flow_style=False) self.api.attribute_value_save('IgnoredIssues', text) def openqa_group(self): return self.project def iso_prefix(self): return self.project def jobs_num(self): return 70 def current_version(self): return self.release_version() def binaries_of_product(self, project, product, repo=None, arch=None): if repo is None: repo = self.product_repo if arch is None: arch = self.product_arch url = self.api.makeurl(['build', project, repo, arch, product]) try: f = self.api.retried_GET(url) except HTTPError: return [] ret = [] root = ET.parse(f).getroot() for binary in root.findall('binary'): ret.append(binary.get('filename')) return ret def get_current_snapshot(self): """Return the current snapshot in the test project""" for binary in self.binaries_of_product( self.test_project, '000product:%s-cd-mini-%s' % (self.project_base, self.arch())): result = re.match( r'%s-%s-NET-.*-Snapshot(.*)-Media.iso' % (self.project_base, self.iso_prefix()), binary) if result: return result.group(1) return None def ftp_build_version(self, project, tree, base=None): if not base: base = self.project_base for binary in self.binaries_of_product(project, tree): result = re.match(r'%s.*Build(.*)-Media1.report' % base, binary) if result: return result.group(1) raise NotFoundException("can't find %s ftp version" % project) def iso_build_version(self, project, tree, base=None, repo=None, arch=None): if not base: base = self.project_base for binary in self.binaries_of_product(project, tree, repo=repo, arch=arch): result = re.match( r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz)', binary) if result: return result.group(1) raise NotFoundException("can't find %s iso version" % project) def release_version(self): url = self.api.makeurl([ 'build', self.project, 'standard', self.arch(), '000release-packages:%s-release' % self.project_base ]) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for binary in root.findall('binary'): binary = binary.get('filename', '') result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary) if result: return result.group(1) raise NotFoundException("can't find %s version" % self.project) def current_qa_version(self): return self.api.pseudometa_file_load('version_totest') def find_openqa_results(self, snapshot): """Return the openqa jobs of a given snapshot and filter out the cloned jobs """ url = makeurl(self.openqa_server, ['api', 'v1', 'jobs'], { 'group': self.openqa_group(), 'build': snapshot, 'latest': 1 }) f = self.api.retried_GET(url) jobs = [] for job in json.load(f)['jobs']: if job['clone_id'] or job['result'] == 'obsoleted': continue job['name'] = job['name'].replace(snapshot, '') jobs.append(job) return jobs def _result2str(self, result): if result == QA_INPROGRESS: return 'inprogress' elif result == QA_FAILED: return 'failed' else: return 'passed' def find_failed_module(self, testmodules): # print json.dumps(testmodules, sort_keys=True, indent=4) for module in testmodules: if module['result'] != 'failed': continue flags = module['flags'] if 'fatal' in flags or 'important' in flags: return module['name'] break logger.info('%s %s %s' % (module['name'], module['result'], module['flags'])) def update_openqa_status_message(self): url = makeurl(self.openqa_server, ['api', 'v1', 'job_groups']) f = self.api.retried_GET(url) job_groups = json.load(f) group_id = 0 for jg in job_groups: if jg['name'] == self.openqa_group(): group_id = jg['id'] break if not group_id: logger.debug( 'No openQA group id found for status comment update, ignoring') return pinned_ignored_issue = 0 issues = ' , '.join(self.issues_to_ignore.keys()) status_flag = 'publishing' if self.status_for_openqa['is_publishing'] else \ 'preparing' if self.status_for_openqa['can_release'] else \ 'testing' if self.status_for_openqa['snapshotable'] else \ 'building' status_msg = "tag:{}:{}:{}".format( self.status_for_openqa['new_snapshot'], status_flag, status_flag) msg = "pinned-description: Ignored issues\r\n\r\n{}\r\n\r\n{}".format( issues, status_msg) data = {'text': msg} url = makeurl(self.openqa_server, ['api', 'v1', 'groups', str(group_id), 'comments']) f = self.api.retried_GET(url) comments = json.load(f) for comment in comments: if comment['userName'] == 'ttm' and \ comment['text'].startswith('pinned-description: Ignored issues'): pinned_ignored_issue = comment['id'] logger.debug('Writing openQA status message: {}'.format(data)) if not self.dryrun: if pinned_ignored_issue: self.openqa.openqa_request('PUT', 'groups/%s/comments/%d' % (group_id, pinned_ignored_issue), data=data) else: self.openqa.openqa_request('POST', 'groups/%s/comments' % group_id, data=data) def overall_result(self, snapshot): """Analyze the openQA jobs of a given snapshot Returns a QAResult""" if snapshot is None: return QA_FAILED jobs = self.find_openqa_results(snapshot) self.failed_relevant_jobs = [] self.failed_ignored_jobs = [] if len(jobs) < self.jobs_num(): # not yet scheduled logger.warning('we have only %s jobs' % len(jobs)) return QA_INPROGRESS in_progress = False for job in jobs: # print json.dumps(job, sort_keys=True, indent=4) if job['result'] in ('failed', 'incomplete', 'skipped', 'user_cancelled', 'obsoleted', 'parallel_failed'): # print json.dumps(job, sort_keys=True, indent=4), jobname url = makeurl( self.openqa_server, ['api', 'v1', 'jobs', str(job['id']), 'comments']) f = self.api.retried_GET(url) comments = json.load(f) refs = set() labeled = 0 to_ignore = False for comment in comments: for ref in comment['bugrefs']: refs.add(str(ref)) if comment['userName'] == 'ttm' and comment[ 'text'] == 'label:unknown_failure': labeled = comment['id'] if re.search(r'@ttm:? ignore', comment['text']): to_ignore = True # to_ignore can happen with or without refs ignored = True if to_ignore else len(refs) > 0 build_nr = str(job['settings']['BUILD']) for ref in refs: if ref not in self.issues_to_ignore: if to_ignore: self.issues_to_ignore[ref] = build_nr self.update_pinned_descr = True else: ignored = False else: # update reference self.issues_to_ignore[ref] = build_nr if ignored: self.failed_ignored_jobs.append(job['id']) if labeled: text = 'Ignored issue' if len( refs) > 0 else 'Ignored failure' # remove flag - unfortunately can't delete comment unless admin data = {'text': text} if self.dryrun: logger.info("Would label {} with: {}".format( job['id'], text)) else: self.openqa.openqa_request('PUT', 'jobs/%s/comments/%d' % (job['id'], labeled), data=data) logger.info("job %s failed, but was ignored", job['name']) else: self.failed_relevant_jobs.append(job['id']) if not labeled and len(refs) > 0: data = {'text': 'label:unknown_failure'} if self.dryrun: logger.info("Would label {} as unknown".format( job['id'])) else: self.openqa.openqa_request('POST', 'jobs/%s/comments' % job['id'], data=data) joburl = '%s/tests/%s' % (self.openqa_server, job['id']) logger.info("job %s failed, see %s", job['name'], joburl) elif job['result'] == 'passed' or job['result'] == 'softfailed': continue elif job['result'] == 'none': if job['state'] != 'cancelled': in_progress = True else: raise Exception(job['result']) self.save_issues_to_ignore() if len(self.failed_relevant_jobs) > 0: return QA_FAILED if in_progress: return QA_INPROGRESS return QA_PASSED def all_repos_done(self, project, codes=None): """Check the build result of the project and only return True if all repos of that project are either published or unpublished """ # coolo's experience says that 'finished' won't be # sufficient here, so don't try to add it :-) codes = ['published', 'unpublished'] if not codes else codes url = self.api.makeurl(['build', project, '_result'], {'code': 'failed'}) f = self.api.retried_GET(url) root = ET.parse(f).getroot() ready = True for repo in root.findall('result'): # ignore ports. 'factory' is used by arm for repos that are not # meant to use the totest manager. if repo.get('repository') in ('ports', 'factory', 'images_staging'): continue if repo.get('dirty', '') == 'true': logger.info('%s %s %s -> %s' % (repo.get('project'), repo.get('repository'), repo.get('arch'), 'dirty')) ready = False if repo.get('code') not in codes: logger.info('%s %s %s -> %s' % (repo.get('project'), repo.get('repository'), repo.get('arch'), repo.get('code'))) ready = False return ready def maxsize_for_package(self, package): if re.match(r'.*-mini-.*', package): return 737280000 # a CD needs to match if re.match(r'.*-dvd5-.*', package): return 4700372992 # a DVD needs to match if re.match(r'livecd-x11', package): return 681574400 # not a full CD if re.match(r'livecd-.*', package): return 999999999 # a GB stick if re.match(r'.*-(dvd9-dvd|cd-DVD)-.*', package): return 8539996159 if re.match(r'.*-ftp-(ftp|POOL)-', package): return None # docker container has no size limit if re.match(r'opensuse-.*-image.*', package): return None if '-Addon-NonOss-ftp-ftp' in package: return None if 'JeOS' in package: return 4700372992 raise Exception('No maxsize for {}'.format(package)) def package_ok(self, project, package, repository, arch): """Checks one package in a project and returns True if it's succeeded """ query = {'package': package, 'repository': repository, 'arch': arch} url = self.api.makeurl(['build', project, '_result'], query) f = self.api.retried_GET(url) root = ET.parse(f).getroot() # [@code!='succeeded'] is not supported by ET failed = [ status for status in root.findall("result/status") if status.get('code') != 'succeeded' ] if any(failed): logger.info( '%s %s %s %s -> %s' % (project, package, repository, arch, failed[0].get('code'))) return False if not len(root.findall('result/status[@code="succeeded"]')): logger.info('No "succeeded" for %s %s %s %s' % (project, package, repository, arch)) return False maxsize = self.maxsize_for_package(package) if not maxsize: return True url = self.api.makeurl(['build', project, repository, arch, package]) f = self.api.retried_GET(url) root = ET.parse(f).getroot() for binary in root.findall('binary'): if not binary.get('filename', '').endswith('.iso'): continue isosize = int(binary.get('size', 0)) if isosize > maxsize: logger.error('%s %s %s %s: %s' % (project, package, repository, arch, 'too large by %s bytes' % (isosize - maxsize))) return False return True def is_snapshottable(self): """Check various conditions required for factory to be snapshotable """ if not self.all_repos_done(self.project): return False for product in self.ftp_products + self.main_products: if not self.package_ok(self.project, product, self.product_repo, self.product_arch): return False for product in self.image_products + self.container_products: for arch in product.archs: if not self.package_ok(self.project, product.package, self.product_repo, arch): return False if len(self.livecd_products): if not self.all_repos_done('%s:Live' % self.project): return False for product in self.livecd_products: for arch in product.archs: if not self.package_ok('%s:Live' % self.project, product.package, self.product_repo, arch): return False return True def _release_package(self, project, package, set_release=None, repository=None, target_project=None, target_repository=None): query = {'cmd': 'release'} if set_release: query['setrelease'] = set_release if repository is not None: query['repository'] = repository if target_project is not None: # Both need to be set query['target_project'] = target_project query['target_repository'] = target_repository baseurl = ['source', project, package] url = self.api.makeurl(baseurl, query=query) if self.dryrun or self.norelease: logger.info("release %s/%s (%s)" % (project, package, query)) else: self.api.retried_POST(url) def _release(self, set_release=None): for product in self.ftp_products: self._release_package(self.project, product, repository=self.product_repo) for cd in self.livecd_products: self._release_package('%s:Live' % self.project, cd.package, set_release=set_release, repository=self.livecd_repo) for image in self.image_products: self._release_package(self.project, image.package, set_release=set_release, repository=self.product_repo) for cd in self.main_products: self._release_package(self.project, cd, set_release=set_release, repository=self.product_repo) for container in self.container_products: # Containers are built in the same repo as other image products, # but released into a different repo in :ToTest self._release_package(self.project, container.package, repository=self.product_repo, target_project=self.test_project, target_repository=self.totest_container_repo) def update_totest(self, snapshot=None): release = 'Snapshot%s' % snapshot if snapshot else None logger.info('Updating snapshot %s' % snapshot) if not (self.dryrun or self.norelease): self.api.switch_flag_in_prj(self.test_project, flag='publish', state='disable', repository=self.product_repo) self._release(set_release=release) def publish_factory_totest(self): logger.info('Publish test project content') if not (self.dryrun or self.norelease): self.api.switch_flag_in_prj(self.test_project, flag='publish', state='enable', repository=self.product_repo) if self.container_products: logger.info('Releasing container products from ToTest') for container in self.container_products: self._release_package(self.test_project, container.package, repository=self.totest_container_repo) def totest_is_publishing(self): """Find out if the publishing flag is set in totest's _meta""" url = self.api.makeurl(['source', self.test_project, '_meta']) f = self.api.retried_GET(url) root = ET.parse(f).getroot() if not root.find('publish'): # default true return True for flag in root.find('publish'): if flag.get('repository', None) not in [None, self.product_repo]: continue if flag.get('arch', None): continue if flag.tag == 'enable': return True return False def totest(self): try: current_snapshot = self.get_current_snapshot() except NotFoundException as e: # nothing in test project (yet) logger.warn(e) current_snapshot = None new_snapshot = self.current_version() self.update_pinned_descr = False current_result = self.overall_result(current_snapshot) current_qa_version = self.current_qa_version() logger.info('current_snapshot %s: %s' % (current_snapshot, self._result2str(current_result))) logger.debug('new_snapshot %s', new_snapshot) logger.debug('current_qa_version %s', current_qa_version) snapshotable = self.is_snapshottable() logger.debug("snapshotable: %s", snapshotable) can_release = ((current_snapshot is None or current_result != QA_INPROGRESS) and snapshotable) # not overwriting if new_snapshot == current_qa_version: logger.debug("no change in snapshot version") can_release = False elif not self.all_repos_done(self.test_project): logger.debug("not all repos done, can't release") # the repos have to be done, otherwise we better not touch them # with a new release can_release = False self.send_amqp_event(current_snapshot, current_result) can_publish = (current_result == QA_PASSED) # already published totest_is_publishing = self.totest_is_publishing() if totest_is_publishing: logger.debug("totest already publishing") can_publish = False if self.update_pinned_descr: self.status_for_openqa = { 'current_snapshot': current_snapshot, 'new_snapshot': new_snapshot, 'snapshotable': snapshotable, 'can_release': can_release, 'is_publishing': totest_is_publishing, } self.update_openqa_status_message() if can_publish: if current_qa_version == current_snapshot: self.publish_factory_totest() self.write_version_to_dashboard("snapshot", current_snapshot) can_release = False # we have to wait else: # We reached a very bad status: openQA testing is 'done', but not of the same version # currently in test project. This can happen when 'releasing' the # product failed raise Exception( "Publishing stopped: tested version (%s) does not match version in test project (%s)" % (current_qa_version, current_snapshot)) if can_release: self.update_totest(new_snapshot) self.write_version_to_dashboard("totest", new_snapshot) def send_amqp_event(self, current_snapshot, current_result): if not self.amqp_url: logger.debug( 'No ttm_amqp_url configured in oscrc - skipping amqp event emission' ) return logger.debug('Sending AMQP message') inf = re.sub(r"ed$", '', self._result2str(current_result)) msg_topic = '%s.ttm.build.%s' % (self.project_base.lower(), inf) msg_body = json.dumps({ 'build': current_snapshot, 'project': self.project, 'failed_jobs': { 'relevant': self.failed_relevant_jobs, 'ignored': self.failed_ignored_jobs, } }) # send amqp event tries = 7 # arbitrary for t in range(tries): try: notify_connection = pika.BlockingConnection( pika.URLParameters(self.amqp_url)) notify_channel = notify_connection.channel() notify_channel.exchange_declare(exchange='pubsub', exchange_type='topic', passive=True, durable=True) notify_channel.basic_publish(exchange='pubsub', routing_key=msg_topic, body=msg_body) notify_connection.close() break except pika.exceptions.ConnectionClosed as e: logger.warn( 'Sending AMQP event did not work: %s. Retrying try %s out of %s' % (e, t, tries)) else: logger.error( 'Could not send out AMQP event for %s tries, aborting.' % tries) def release(self): new_snapshot = self.current_version() self.update_totest(new_snapshot) def write_version_to_dashboard(self, target, version): if not (self.dryrun or self.norelease): self.api.pseudometa_file_ensure('version_%s' % target, version, comment='Update version')