def test_regular(self): self.assertEqual( split_dependent_bugs( { 1: BugInfo(BugCategory.STABLEREQ, '', depends=[2]), 2: BugInfo(None, '', blocks=[1]), }, 1), ([], [2]))
def test_streq_mixed(self): self.assertEqual( split_dependent_bugs( { 1: BugInfo(BugCategory.STABLEREQ, '', depends=[2]), 2: BugInfo( BugCategory.KEYWORDREQ, '', depends=[3], blocks=[1]), 3: BugInfo(BugCategory.STABLEREQ, '', blocks=[2]), }, 1), ([], [2]))
def test_kwreq(self): self.assertEqual( split_dependent_bugs( { 1: BugInfo(BugCategory.KEYWORDREQ, '', depends=[2]), 2: BugInfo( BugCategory.KEYWORDREQ, '', depends=[3], blocks=[1]), 3: BugInfo(BugCategory.KEYWORDREQ, '', blocks=[2]), }, 1), ([2, 3], []))
def test_common_dep(self): self.assertEqual( split_dependent_bugs( { 1: BugInfo(BugCategory.STABLEREQ, '', depends=[2, 3]), 2: BugInfo( BugCategory.STABLEREQ, '', depends=[4], blocks=[1]), 3: BugInfo( BugCategory.STABLEREQ, '', depends=[4], blocks=[1]), 4: BugInfo(BugCategory.STABLEREQ, '', blocks=[2, 3]), }, 1), ([2, 3, 4], []))
def sanity_check(self) -> int: repo, git_repo = self.get_git_repository() if not self.args.update_bugs: log.warning('Running in pretend mode.') log.warning('(pass --update-bugs to enable bug updates)') cache = self.get_cache() cache.setdefault('bugs', {}) start_time = datetime.datetime.utcnow() log.info(f'NATTkA starting at {start_time}') end_time = None if self.args.time_limit is not None: end_time = (start_time + datetime.timedelta(seconds=self.args.time_limit)) log.info(f'... will process until {end_time}') bz = self.get_bugzilla(require_api_key=self.args.update_bugs) bugnos, bugs = self.find_bugs() log.info(f'Found {len(bugnos)} bugs') bugs_done = 0 profiles = load_profiles(repo) try: for bno in bugnos: if self.args.bug_limit and bugs_done >= self.args.bug_limit: log.info(f'Reached limit of {self.args.bug_limit} bugs') break if (end_time is not None and datetime.datetime.utcnow() > end_time): log.info('Reached time limit') break b = bugs[bno] # Bugzilla is prone to race conditions between fetching bug # data and updating bugs, so ignore bugs that have been updated # recently. if (start_time - b.last_change_time).total_seconds() < 60: log.info(f'Bug {bno}: skipping due to recent change') continue if b.category is None: log.info(f'Bug {bno}: neither stablereq nor keywordreq') continue kw_deps, reg_deps = split_dependent_bugs(bugs, bno) # processing bug without its dependencies may result # in issuing false positives if any(dep not in bugs for dep in reg_deps): log.warning(f'Bug {bno}: dependencies not fetched, ' f'skipping') continue log.info(f'Bug {bno} ({b.category.name})') plist: PackageKeywordsDict = {} comment: typing.Optional[str] = None check_res: typing.Optional[bool] = None cache_entry: typing.Optional[dict] = None cc_arches: typing.List[str] = [] allarches_chg = False expanded_plist: typing.Optional[str] = None try: arches_cced = bool(arches_from_cc(b.cc, repo.known_arches)) try: for p, kw in match_package_list(repo, b, only_new=True): masked, mask_kws = is_masked(repo, p, kw, profiles) if masked == MaskReason.REPOSITORY_MASK: raise PackageMasked( f'package masked: {p.cpvstr}') elif masked == MaskReason.PROFILE_MASK: raise PackageMasked( f'package masked: {p.cpvstr}, ' f'in all profiles for arch: ' f'{" ".join(mask_kws)}') elif masked == MaskReason.KEYWORD_MASK: raise PackageMasked( f'package masked: {p.cpvstr}, ' f'by keywords: {" ".join(mask_kws)}') plist[p] = kw except KeywordNotSpecified: assert not arches_cced assert plist # this is raised after iterating all entries, # so plist is usable already if 'CC-ARCHES' not in b.keywords: raise all_keywords = set() for p, kw in plist.items(): fkw = frozenset(kw) if not fkw: fkw = get_suggested_keywords( repo, p, b.category == BugCategory.STABLEREQ) all_keywords.add(fkw) # we can CC arches iff all packages have # consistent (potential) keywords if len(all_keywords) > 1 or not fkw: raise plist[p] = list(fkw) check_packages = dict(plist) for kw_dep in kw_deps: try: merge_package_list( plist, match_package_list(repo, bugs[kw_dep], only_new=True)) except KeywordNotSpecified: raise DependentBugError( f'dependent bug #{kw_dep} is missing keywords') except PackageListEmpty: # ignore the dependent bug continue except PackageMatchException: raise DependentBugError( f'dependent bug #{kw_dep} has errors') # check if we have arches to CC if ('CC-ARCHES' in b.keywords and not arches_cced and b.assigned_to != '*****@*****.**'): cc_arches = sorted([ f'{x}@gentoo.org' for x in set( filter_prefix_keywords( itertools.chain.from_iterable( check_packages.values()))) ]) # check if we have ALLARCHES to toggle allarches = (b.category == BugCategory.STABLEREQ and all(is_allarches(x) for x in plist)) allarches_chg = (allarches != ('ALLARCHES' in b.keywords)) # check if keywords need expanding if (('*' in b.atoms or '^' in b.atoms) and (arches_cced or cc_arches)): try: expanded_plist = expand_package_list( repo, b, cc_arches or b.cc) except ExpandImpossible: pass plist_json = package_list_to_json(plist.items()) cache_entry = cache['bugs'].get(str(bno), {}) assert cache_entry is not None last_check = cache_entry.get('last-check') if last_check is not None: if cache_entry.get('package-list', '') != plist_json: log.info('Package list changed, will recheck.') elif (cache_entry.get('check-res', None) is not b.sanity_check): log.info('Sanity-check flag changed, ' 'will recheck.') elif (datetime.datetime.utcnow() - datetime.datetime.strptime( last_check, '%Y-%m-%dT%H:%M:%S') > datetime.timedelta( seconds=self.args.cache_max_age)): log.info('Cache entry is old, will recheck.') elif (not cache_entry.get('updated') and self.args.update_bugs): log.info('Cache entry from no-update mode, ' 'will recheck.') else: log.info('Cache entry is up-to-date.') raise NoChanges() with git_repo: add_keywords(plist.items(), b.category == BugCategory.STABLEREQ) check_res, issues = check_dependencies( repo, check_packages.items()) bugs_done += 1 if bugs_done > 0 and bugs_done % 10 == 0: log.info(f'Tested {bugs_done} bugs so far') cache_entry = cache['bugs'][str(bno)] = { 'last-check': datetime.datetime.utcnow().isoformat( timespec='seconds'), 'package-list': plist_json, 'check-res': check_res, } if check_res: # if nothing changed, do nothing if b.sanity_check is True: cache_entry['updated'] = True log.info('Still good') raise NoChanges() # otherwise, update the bug status log.info('All good') # if it was bad before, leave a comment if b.sanity_check is False: comment = ('All sanity-check issues ' 'have been resolved') else: issues = list(format_results(issues)) comment = ('Sanity check failed:\n\n' + '\n'.join(issues)) log.info('Sanity check failed') except KeywordNoneLeft: # do not update bug status, it's probably done log.info('Skipping, no CC and probably no work to do') continue except KeywordNotSpecified: log.info('Skipping because of incomplete keywords') comment = ('Resetting sanity check; keywords are ' 'not fully specified and arches are not ' 'CC-ed.') assert check_res is None except PackageListDoneAlready: # do not update bug status if done already log.info('Skipping, work done already') continue except PackageListEmpty: log.info('Skipping because of empty package list') comment = ('Resetting sanity check; package list ' 'is empty or all packages are done.') assert check_res is None except (PackageMatchException, DependentBugError) as e: log.error(e) check_res = False comment = f'Unable to check for sanity:\n\n> {e}' except NoChanges: # if it's not positive, don't do extra work if b.sanity_check is not True: continue # check if there's anything related to do if not cc_arches and expanded_plist is None: continue check_res = True except GitDirtyWorkTree: log.critical(f'{git_repo.path}: working tree is dirty') raise SystemExit(1) # if we can not check it, and it's not been marked # as checked, just skip it; otherwise, reset the flag if check_res is None and b.sanity_check is None: continue # truncate comment if necessary if (comment is not None and len(comment) >= BUGZILLA_MAX_COMMENT_LEN): comment = (comment[:BUGZILLA_MAX_COMMENT_LEN - 4] + '...\n') # for negative results, we verify whether the comment # needs to change if check_res is False and b.sanity_check is False: assert comment is not None old_comment = bz.get_latest_comment(bno) # do not add a second identical comment if (old_comment is not None and comment.strip() == old_comment.strip()): if cache_entry is not None: cache_entry['updated'] = True log.info('Failure reported already') continue if check_res is not True: # CC arches and change ALLARCHES only after # successful check cc_arches = [] allarches_chg = False expanded_plist = None elif b.sanity_check is True: # change ALLARCHES only on state changes allarches_chg = False if cc_arches: log.info(f'CC arches: {" ".join(cc_arches)}') if allarches_chg: log.info(f'{"Adding" if allarches else "Removing"} ' f'ALLARCHES') if expanded_plist: log.info('Expanding package list') if not self.args.update_bugs: log.info(f'New package list: {expanded_plist}') if self.args.update_bugs: kwargs = {} if cc_arches: kwargs['cc_add'] = cc_arches if allarches_chg: if allarches: kwargs['keywords_add'] = ['ALLARCHES'] else: kwargs['keywords_remove'] = ['ALLARCHES'] if expanded_plist: kwargs['new_package_list'] = [expanded_plist] bz.update_status(bno, check_res, comment, **kwargs) if cache_entry is not None: cache_entry['updated'] = True log.info('Bug status updated') else: log.info(f'New comment: {comment}') finally: self.write_cache(cache) end_time = datetime.datetime.utcnow() log.info(f'NATTkA exiting at {end_time}') log.info(f'Total time elapsed: {end_time - start_time}') return 0
def test_empty(self): self.assertEqual( split_dependent_bugs({1: BugInfo(BugCategory.STABLEREQ, '')}, 1), ([], []))