def getCommitByBugId(dotreleases): """Get the revisions from the hg.m.o urls in the bug comments""" nightly_pats = Bugzilla.get_landing_patterns(channels=['nightly']) def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug['comments'], [], nightly_pats) data[bugid]['revs'] = [i['revision'] for i in r] def bug_handler(bug, data): if 'id' in bug: data[str(bug['id'])]['title'] = bug['summary'] bugids = [] for r, bug in dotreleases.items(): bugids += bug revisions = { bugid: { 'revs': [], 'title': '' } for bugid in map(str, bugids) } Bugzilla(bugids=bugids, include_fields=['id', 'summary'], bughandler=bug_handler, bugdata=revisions, commenthandler=comment_handler, commentdata=revisions, comment_include_fields=['text']).get_data().wait() return revisions
def list_bugs(query): ''' List all the bugs from a Bugzilla query ''' def _bughandler(bug, data): bugid = bug['id'] data[bugid] = bug def _attachmenthandler(attachments, bugid, data): data[int(bugid)] = attachments bugs, attachments = {}, {} bz = Bugzilla(query, bughandler=_bughandler, attachmenthandler=_attachmenthandler, bugdata=bugs, attachmentdata=attachments) bz.get_data().wait() # Map attachments on bugs for bugid, _attachments in attachments.items(): if bugid not in bugs: continue bugs[bugid]['attachments'] = _attachments return bugs
def update_status_flags(info, update=False, verbose=False): status_flags_by_channel = info['status_flags'] base_versions = info['base_versions'] start_date_by_channel = info['start_dates'] end_date = info['end_date'] for c, d in start_date_by_channel.items(): start_date_by_channel[c] = utils.get_date_str(d) bugs_to_update = {} for sgn, i in info['signatures'].items(): data = generate_bug_report(sgn, i, status_flags_by_channel, base_versions, start_date_by_channel, end_date) if data: bugid = i['bugid'] bugs_to_update[bugid] = data for bugid, data in bugs_to_update.items(): __warn('Bug %d: %s' % (bugid, str(data)), verbose) __warn(data['comment']['body'], verbose) if update: Bugzilla([str(bugid)]).put(data) if update: links = '\n'.join(Bugzilla.get_links(list(bugs_to_update.keys()))) __warn('Bug links: %s' % links, verbose)
def get_bugs(data): signatures = set() for p, i1 in data.items(): for c, i2 in i1.items(): signatures = signatures.union(set(i2.keys())) bugs_by_signature = socorro.Bugs.get_bugs(list(signatures)) bugs = set() for b in bugs_by_signature.values(): bugs = bugs.union(set(b)) bugs = list(sorted(bugs)) def handler(bug, data): data[bug['id']] = bug['status'] data = {} Bugzilla(bugids=bugs, include_fields=['id', 'status'], bughandler=handler, bugdata=data).wait() for s, bugs in bugs_by_signature.items(): resolved = [] unresolved = [] for b in bugs: b = int(b) status = data[b] if status == 'RESOLVED': resolved.append(b) else: unresolved.append(b) if resolved: last_resolved = max(resolved) last_resolved = (str(last_resolved), Bugzilla.get_links(last_resolved)) else: last_resolved = None if unresolved: last_unresolved = max(unresolved) last_unresolved = (str(last_unresolved), Bugzilla.get_links(last_unresolved)) else: last_unresolved = None unresolved = sorted(unresolved) bugs_by_signature[s] = { 'resolved': last_resolved, 'unresolved': last_unresolved } return bugs_by_signature
def get_bugs(signatures): N = len(signatures) logger.info('Get bugs for {} signatures: started.'.format(N)) bugs_by_signature = socorro.Bugs.get_bugs(list(signatures)) bugs = set() for b in bugs_by_signature.values(): bugs = bugs.union(set(b)) bugs = list(sorted(bugs)) def handler(bug, data): data[bug['id']] = bug['status'] data = {} Bugzilla(bugids=bugs, include_fields=['id', 'status'], bughandler=handler, bugdata=data).wait() for s, bugs in bugs_by_signature.items(): resolved = [] unresolved = [] for b in bugs: b = int(b) status = data.get(b, None) if status in ['RESOLVED', 'VERIFIED', 'CLOSED']: resolved.append(b) elif status is not None: unresolved.append(b) if resolved: last_resolved = max(resolved) last_resolved = (str(last_resolved), Bugzilla.get_links(last_resolved)) else: last_resolved = None if unresolved: last_unresolved = max(unresolved) last_unresolved = (str(last_unresolved), Bugzilla.get_links(last_unresolved)) else: last_unresolved = None unresolved = sorted(unresolved) bugs_by_signature[s] = { 'resolved': last_resolved, 'unresolved': last_unresolved } logger.info('Get bugs: finished.'.format(N)) return bugs_by_signature
def get(bug_ids: Iterable[int]) -> Dict[int, dict]: """Function to retrieve Bug Information including history, comments using Bugzilla REST API and attachment using Bugzilla package. :param bug_ids: find bug information for these `bug_ids` :type bug_ids: list of integers or str or int :return: dict with key as `id`(int) of a bug, and values as bug_information :rtype: dict """ def attachmenthandler(bug, bug_id): bug_id = int(bug_id) if bug_id not in new_bugs: new_bugs[bug_id] = dict() new_bugs[bug_id]["attachments"] = bug bug_ids = sorted(set(bug_ids)) new_bugs = dict() batch_size = Bugzilla.BUGZILLA_CHUNK_SIZE # Getting _default, history and comments information using REST API # Attachment meta data is retrieved using Bugzilla module for i in range(0, len(bug_ids), batch_size): batch = bug_ids[i : i + batch_size] batch_of_ids = ",".join(map(str, batch)) # "include_fields": "_default,history,comments,attachments", # Attachments data size is heavy, so handling them separately params_for_custom_fields = { "id": batch_of_ids, "include_fields": "_default,history,comments", } response = utils.get_session("bugzilla").get( "https://bugzilla.mozilla.org/rest/bug", params=params_for_custom_fields ) response.raise_for_status() batch_of_bugs_info = response.json() batch_of_bugs_info = { int(a_bug["id"]): a_bug for a_bug in batch_of_bugs_info["bugs"] } new_bugs.update(batch_of_bugs_info) Bugzilla( batch, bughandler=None, commenthandler=None, comment_include_fields=None, attachmenthandler=attachmenthandler, attachment_include_fields=ATTACHMENT_INCLUDE_FIELDS, historyhandler=None, ).get_data().wait() return new_bugs
def set_autofix(self, bugs): def history_handler(bug, data): bugid = str(bug['id']) deps = data[bugid]['deps'] stats = {} for h in bug['history']: for change in h['changes']: if ( change['field_name'] in {'blocks', 'depends_on'} and change['added'] in deps ): who = h['who'] stats[who] = stats.get(who, 0) + 1 data[bugid]['winner'] = ( max(stats.items(), key=lambda p: p[1])[0] if stats else None ) no_assignee = [bugid for bugid, info in bugs.items() if not info['assignee']] Bugzilla( bugids=no_assignee, historyhandler=history_handler, historydata=bugs ).get_data().wait() no_nick = {} for bugid, info in bugs.items(): if info['assignee']: winner = { 'mail': info['assignee']['email'], 'nickname': info['assignee']['nick'], } self.add_auto_ni(bugid, winner) elif info['winner']: winner = info['winner'] if winner not in no_nick: no_nick[winner] = [] no_nick[winner].append(bugid) else: winner = { 'mail': info['creator']['email'], 'nickname': info['creator']['nick'], } self.add_auto_ni(bugid, winner) if no_nick: def user_handler(user, data): data[user['name']] = user['nick'] data = {} BugzillaUser( user_names=list(no_nick.keys()), include_fields=['name', 'nick'], user_handler=user_handler, user_data=data, ).wait() for bzmail, bugids in no_nick.items(): nick = data[bzmail] for bugid in bugids: self.add_auto_ni(bugid, {'mail': bzmail, 'nickname': nick})
def __init__(self, channel): # since it's used in name() we must have it before to call parent ctor self.channel = channel super(Unlanded, self).__init__() self.bug_ids = [] self.versions = utils.get_checked_versions() self.channel_pat = Bugzilla.get_landing_patterns(channels=[channel])
def get_dups(self, bugs): def handler(bug, data): if bug['product'] in self.get_config('products'): self.handle_bug(bug, data) bugids = [info['dupe'] for info in bugs.values()] data = {} Bugzilla( bugids=bugids, include_fields=[ 'cf_crash_signature', 'dupe_of', 'product', 'component', 'id', 'summary', 'groups', 'version', ], bughandler=handler, bugdata=data, ).get_data().wait() return data
def retrieve_history(self, bugs): """Retrieve bug history""" def history_handler(bug): bugs[int(bug['id'])]['history'] = bug['history'] Bugzilla(bugids=[bug_id for bug_id in bugs.keys()], historyhandler=history_handler).get_data().wait()
def reduce_set_of_bugs(bugs_by_signature): # we remove dup bugs # for example if we've {1,2,3,4,5} and if 2 is a dup of 5 then the set will be reduced to {1,3,4,5} bugs = set() for v in bugs_by_signature.values(): bugs = bugs.union(v) dups = Bugzilla.follow_dup(bugs, only_final=False) bugs_count = 0 bugs.clear() for s, bugids in bugs_by_signature.items(): _bugids = set(bugids) toremove = set() for bugid in bugids: chain = dups[str(bugid)] if chain: elems = [] for e in chain: e = int(e) if e in _bugids: elems.append(e) if elems: elems[ -1] = bugid # we remove the final and put the initial toremove = toremove.union(elems) diff = _bugids - toremove bugs_by_signature[s] = list(diff) bugs_count += len(diff) bugs = bugs.union(diff) return bugs, bugs_count
def filter_bugs(self, bugs): # We must remove bugs which have open dependencies (except meta bugs) # because devs may wait for those bugs to be fixed before their patch # can land. all_deps = set(dep for info in bugs.values() for dep in info['deps']) def bug_handler(bug, data): if (bug['status'] in {'RESOLVED', 'VERIFIED', 'CLOSED'} or 'meta' in bug['keywords']): data.add(bug['id']) useless = set() Bugzilla( bugids=list(all_deps), include_fields=['id', 'keywords', 'status'], bughandler=bug_handler, bugdata=useless, ).get_data().wait() for bugid, info in bugs.items(): # finally deps will contain open bugs which are not meta info['deps'] -= useless # keep bugs with no deps bugs = { bugid: info for bugid, info in bugs.items() if not info['deps'] } return bugs
def filter_bugs(self, bugs): invalids = set() def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug['comments'], [], NIGHTLY_PAT) if not r: invalids.add(bugid) return data[bugid]['land'] = { i['revision']: { 'date': None, 'backedout': False, 'bugid': bugid } for i in r } bugids = list(bugs.keys()) Bugzilla( bugids=bugids, commenthandler=comment_handler, commentdata=bugs, comment_include_fields=['text'], ).get_data().wait() for bugid in invalids: del bugs[bugid]
def autofix(self, bugs, dryrun): """Autofix the bugs according to what is returned by get_autofix_change""" ni_changes = self.set_needinfo(dryrun) change = self.get_autofix_change() if not ni_changes and not change: return bugs self.has_autofix = True new_changes = {} if not self.has_individual_autofix(change): bugids = self.get_list_bugs(bugs) for bugid in bugids: new_changes[bugid] = utils.merge_bz_changes( change, ni_changes.get(bugid, {})) else: change = {str(k): v for k, v in change.items()} bugids = set(change.keys()) | set(ni_changes.keys()) for bugid in bugids: mrg = utils.merge_bz_changes(change.get(bugid, {}), ni_changes.get(bugid, {})) if mrg: new_changes[bugid] = mrg if dryrun or self.test_mode: for bugid, ch in new_changes.items(): logger.info( 'The bugs: {}\n will be autofixed with:\n{}'.format( bugid, ch)) else: for bugid, ch in new_changes.items(): Bugzilla([str(bugid)]).put(ch) return bugs
def get_not_landed(self, bugs): not_landed = set() def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug['comments'], [], self.channel_pat) if not r: not_landed.add(bugid) return data[bugid]['land'] = { i['revision']: { 'ok': False, 'bugid': bugid } for i in r } bugids = list(bugs.keys()) Bugzilla( bugids=bugids, commenthandler=comment_handler, commentdata=bugs, comment_include_fields=['text'], ).get_data().wait() not_landed |= self.get_hg(bugs) for bugid, info in bugs.items(): if 'land' in info: del info['land'] info['landed'] = 'No' if bugid in not_landed else 'Yes' return bugs
def get_bug_info(self, bugids): logger.info('History: get bugs info: start...') def history_handler(bug, data): bugid = str(bug['id']) for h in bug['history']: if h['who'] == History.BOT: del h['who'] data[bugid].append(h) def comment_handler(bug, bugid, data): bugid = str(bugid) for comment in bug['comments']: if comment['author'] == History.BOT: text = comment['text'] data[bugid].append({ 'comment': text, 'date': comment['creation_time'] }) data = {str(bugid): [] for bugid in bugids} Bugzilla( list(data.keys()), historyhandler=history_handler, historydata=data, commenthandler=comment_handler, commentdata=data, timeout=960, ).get_data().wait() logger.info('History: get bugs info: end.') return data
def get_bug_info(self, bugids): logger.info("History: get bugs info: start...") def history_handler(bug, data): bugid = str(bug["id"]) for h in bug["history"]: if h["who"] == History.BOT: del h["who"] data[bugid].append(h) def comment_handler(bug, bugid, data): bugid = str(bugid) for comment in bug["comments"]: if comment["author"] == History.BOT: text = comment["text"] data[bugid].append({ "comment": text, "date": comment["creation_time"] }) data = {str(bugid): [] for bugid in bugids} Bugzilla( list(data.keys()), historyhandler=history_handler, historydata=data, commenthandler=comment_handler, commentdata=data, timeout=960, ).get_data().wait() logger.info("History: get bugs info: end.") return data
def filter_bugs(self, bugs): # Exclude meta bugs. bugs = {bug["id"]: bug for bug in bugs.values() if not bug["is_meta"]} # Exclude bugs assigned to nobody. bugs = { bug["id"]: bug for bug in bugs.values() if not utils.is_no_assignee(bug["assigned_to_email"]) } # Exclude bugs that do not have a range found by BugMon. def comment_handler(bug, bug_id): if not any( "BugMon: Reduced build range" in comment["text"] or "The bug appears to have been introduced in the following build range" in comment["text"] for comment in bug["comments"]): del bugs[bug_id] Bugzilla( bugids=self.get_list_bugs(bugs), commenthandler=comment_handler, comment_include_fields=["text"], ).get_data().wait() return bugs
def get_not_landed(self, bugs): not_landed = set() def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug["comments"], [], self.channel_pat) if not r: not_landed.add(bugid) return data[bugid]["land"] = { i["revision"]: {"ok": False, "bugid": bugid} for i in r } bugids = list(bugs.keys()) Bugzilla( bugids=bugids, commenthandler=comment_handler, commentdata=bugs, comment_include_fields=["text"], ).get_data().wait() not_landed |= self.get_hg(bugs) for bugid, info in bugs.items(): if "land" in info: del info["land"] info["landed"] = "No" if bugid in not_landed else "Yes" return bugs
def get_recursive_blocking(self, bugs, got_bugs, depth=0): meta_bugs = (bug for bug in bugs.values() if bug["is_meta"]) blocked_ids = list({ bug_id for bug in meta_bugs for bug_id in bug["depends_on"] if bug_id not in got_bugs }) if len(blocked_ids) == 0: return got_bugs.update(blocked_ids) if depth == MAX_DEPTH: return chunks = ( blocked_ids[i:(i + Bugzilla.BUGZILLA_CHUNK_SIZE)] for i in range(0, len(blocked_ids), Bugzilla.BUGZILLA_CHUNK_SIZE)) for chunk in chunks: params = self._get_bz_params(chunk) self.amend_bzparams(params, None) Bugzilla( params, bughandler=self.bughandler, bugdata=bugs, ).get_data().wait() self.get_recursive_blocking(bugs, got_bugs, depth + 1)
def add_bug_info(signatures, bugs, status_flags, product, verbose): include_fields = ['status', 'id', 'cf_crash_signature'] + list( status_flags.values()) bug_info = defaultdict(lambda: {'bug': {}, 'jsbugmon': set()}) def bug_handler(bug, data): data[str(bug['id'])]['bug'].update(bug) def comment_handler(bug, bugid, data): for comment in bug['comments']: if comment['author'] == '*****@*****.**': _, major = get_jsbugmon_regression(comment['raw_text'], product=product) if major != -1: data[str(bugid)]['jsbugmon'].add(major) Bugzilla(bugs, include_fields=include_fields, bughandler=bug_handler, bugdata=bug_info, commenthandler=comment_handler, commentdata=bug_info).get_data().wait() __warn('Collected bug info: Ok', verbose) for info in signatures.values(): bug = info['selected_bug'] if bug: if bug in bug_info: info['selected_bug'] = bug_info[bug]['bug'] jsbugmon = bug_info[bug]['jsbugmon'] if jsbugmon: info['jsbugmon'] = min(jsbugmon) else: info['selected_bug'] = 'private'
def get_bugs(start_date, end_date): logger.info('Get bugs from {} to {}: started.'.format(start_date, end_date)) # the search query can be long to evaluate TIMEOUT = 240 def bug_handler(bug, data): sgns = bug['cf_crash_signature'] sgns = utils.get_signatures([sgns]) data[str(bug['id'])] = sgns bugs = {} Bugzilla(get_bz_params(start_date, end_date), bughandler=bug_handler, bugdata=bugs, timeout=TIMEOUT).get_data().wait() res = {} for bugid, sgns in bugs.items(): for sgn in sgns: if sgn not in res: res[sgn] = [] res[sgn].append(bugid) logger.info('{} bugs and {} signatures collected.'.format(len(bugs), len(res))) return res, list(bugs.keys())
def reduce_set_of_bugs(bugs_by_signature): # we remove dup bugs # for example if we've {1,2,3,4,5} and if 2 is a dup of 5 then the set will be reduced to {1,3,4,5} bugs = set() for v in bugs_by_signature.values(): bugs = bugs.union(v) dups = Bugzilla.follow_dup(bugs, only_final=False) bugs_count = 0 bugs.clear() for s, bugids in bugs_by_signature.items(): _bugids = set(bugids) toremove = set() for bugid in bugids: chain = dups[str(bugid)] if chain: elems = [] for e in chain: e = int(e) if e in _bugids: elems.append(e) if elems: elems[-1] = bugid # we remove the final and put the initial toremove = toremove.union(elems) diff = _bugids - toremove bugs_by_signature[s] = list(diff) bugs_count += len(diff) bugs = bugs.union(diff) return bugs, bugs_count
def get_bugs(self, date="today", bug_ids=[], chunk_size=None): """Get the bugs""" bugs = self.get_data() params = self.get_bz_params(date) self.amend_bzparams(params, bug_ids) self.query_url = utils.get_bz_search_url(params) if isinstance(self, Nag): self.query_params = params old_CHUNK_SIZE = Bugzilla.BUGZILLA_CHUNK_SIZE try: if chunk_size: Bugzilla.BUGZILLA_CHUNK_SIZE = chunk_size Bugzilla( params, bughandler=self.bughandler, bugdata=bugs, timeout=self.get_config("bz_query_timeout"), ).get_data().wait() finally: Bugzilla.BUGZILLA_CHUNK_SIZE = old_CHUNK_SIZE self.get_comments(bugs) return bugs
def get_dups(self, bugs): def handler(bug, data): if bug["product"] in self.get_config("products"): self.handle_bug(bug, data) bugids = [info["dupe"] for info in bugs.values()] data = {} Bugzilla( bugids=bugids, include_fields=[ "cf_crash_signature", "dupe_of", "product", "component", "id", "summary", "groups", "version", ], bughandler=handler, bugdata=data, ).get_data().wait() return data
def set_autofix(self, bugs): def history_handler(bug, data): bugid = str(bug["id"]) deps = data[bugid]["deps"] stats = {} for h in bug["history"]: for change in h["changes"]: if (change["field_name"] in {"blocks", "depends_on"} and change["added"] in deps ) or change["field_name"] == "cf_has_regression_range": who = h["who"] stats[who] = stats.get(who, 0) + 1 data[bugid]["winner"] = (max(stats.items(), key=lambda p: p[1])[0] if stats else None) no_assignee = [ bugid for bugid, info in bugs.items() if not info["assignee"] ] Bugzilla(bugids=no_assignee, historyhandler=history_handler, historydata=bugs).get_data().wait() no_nick = {} for bugid, info in bugs.items(): if info["assignee"]: winner = { "mail": info["assignee"]["email"], "nickname": info["assignee"]["nick"], } self.add_auto_ni(bugid, winner) elif info["winner"]: winner = info["winner"] if winner not in no_nick: no_nick[winner] = [] no_nick[winner].append(bugid) else: winner = { "mail": info["creator"]["email"], "nickname": info["creator"]["nick"], } self.add_auto_ni(bugid, winner) if no_nick: def user_handler(user, data): data[user["name"]] = user["nick"] data = {} BugzillaUser( user_names=list(no_nick.keys()), include_fields=["name", "nick"], user_handler=user_handler, user_data=data, ).wait() for bzmail, bugids in no_nick.items(): nick = data[bzmail] for bugid in bugids: self.add_auto_ni(bugid, {"mail": bzmail, "nickname": nick})
def filter_bugs(self, bugs): invalids = set() def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug["comments"], [], NIGHTLY_PAT) if not r: invalids.add(bugid) return data[bugid]["land"] = { i["revision"]: { "date": None, "backedout": False, "bugid": bugid } for i in r } bugids = list(bugs.keys()) Bugzilla( bugids=bugids, commenthandler=comment_handler, commentdata=bugs, comment_include_fields=["text"], ).get_data().wait() for bugid in invalids: del bugs[bugid]
def filter_bugs(self, bugs): all_deps = set() deps_to_max_allowed_date = {} for bugid, info in bugs.items(): bugid = int(bugid) # we just keep bugs from dependencies created before bugid # since the others cannot be a regresser ! info["deps"] = deps = set(x for x in info["deps"] if x < bugid) if deps: all_deps |= deps for dep in deps: deps_to_max_allowed_date[dep] = info["creation"] def bug_handler(bug, data): if "meta" in bug["keywords"] or not bug["cf_last_resolved"]: data.add(bug["id"]) def history_handler(bug, data): bugid = bug["id"] treated.add(bugid) resolved_before = False for h in bug["history"]: if resolved_before: break for change in h["changes"]: if change["field_name"] == "cf_last_resolved" and change[ "added"]: date = dateutil.parser.parse(change["added"]) if date < deps_to_max_allowed_date[bugid]: resolved_before = True break if not resolved_before: data.add(bugid) invalids = set() treated = set() Bugzilla( bugids=list(all_deps), include_fields=["id", "keywords", "cf_last_resolved"], bughandler=bug_handler, bugdata=invalids, historyhandler=history_handler, historydata=invalids, ).get_data().wait() # Some bugs aren't accessible so they won't appear in treated (all_deps - treated) # Since we don't have any info about them, then we consider them as invalid invalids |= all_deps - treated for bugid, info in bugs.items(): info["deps"] -= invalids bugs = { bugid: info for bugid, info in bugs.items() if info["deps"] or info["has_regression_range"] } return bugs
def filter_by_regr(self, bugs): # Filter the bugs which don't have any regression or where the regressions are all closed def bug_handler(bug, data): if bug["status"] in {"RESOLVED", "VERIFIED", "CLOSED"}: data.add(bug["id"]) bugids = {r for info in bugs.values() for r in info["regressions"]} if not bugids: return bugs fixed_bugs = set() Bugzilla( bugids=list(bugids), include_fields=["id", "status"], bughandler=bug_handler, bugdata=fixed_bugs, ).get_data().wait() bugs_without_regr = {} for bugid, info in bugs.items(): regs = set(info["regressions"]) regs = regs - fixed_bugs if not regs: bugs_without_regr[bugid] = info return bugs_without_regr
def get_comments(self, bugs): """Get the bugs comments""" if self.has_last_comment_time(): bugids = self.get_list_bugs(bugs) Bugzilla( bugids=bugids, commenthandler=self._commenthandler, commentdata=bugs ).get_data().wait() return bugs
def get_bugs(data): signatures = set() for p, i1 in data.items(): for c, i2 in i1.items(): signatures = signatures.union(set(i2.keys())) bugs_by_signature = socorro.Bugs.get_bugs(list(signatures)) bugs = set() for b in bugs_by_signature.values(): bugs = bugs.union(set(b)) bugs = list(sorted(bugs)) def handler(bug, data): data[bug['id']] = bug['status'] data = {} Bugzilla(bugids=bugs, include_fields=['id', 'status'], bughandler=handler, bugdata=data).wait() for s, bugs in bugs_by_signature.items(): resolved = [] unresolved = [] for b in bugs: b = int(b) status = data[b] if status == 'RESOLVED': resolved.append(b) else: unresolved.append(b) if resolved: last_resolved = max(resolved) last_resolved = (str(last_resolved), Bugzilla.get_links(last_resolved)) else: last_resolved = None if unresolved: last_unresolved = max(unresolved) last_unresolved = (str(last_unresolved), Bugzilla.get_links(last_unresolved)) else: last_unresolved = None unresolved = sorted(unresolved) bugs_by_signature[s] = {'resolved': last_resolved, 'unresolved': last_unresolved} return bugs_by_signature
def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug["comments"], [], self.channel_pat) if not r: not_landed.add(bugid) return data[bugid]["land"] = { i["revision"]: {"ok": False, "bugid": bugid} for i in r }
def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug['comments'], [], self.channel_pat) if not r: not_landed.add(bugid) return data[bugid]['land'] = { i['revision']: {'ok': False, 'bugid': bugid} for i in r }
def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug['comments'], [], NIGHTLY_PAT) if not r: invalids.add(bugid) return data[bugid]['land'] = { i['revision']: {'date': None, 'backedout': False, 'bugid': bugid} for i in r }
def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug['comments'], [], nightly_pats) landed = bool(r) if not landed: for comment in bug['comments']: comment = comment['text'].lower() if 'backed out' in comment or 'backout' in comment: landed = True break data[bugid]['landed'] = landed
def comment_handler(bug, bugid, data): commenters = data[bugid]['commenters'] for comment in bug['comments']: commenter = comment['author'] if commenter in commenters: commenters[commenter] += 1 else: commenters[commenter] = 1 r = Bugzilla.get_landing_comments(bug['comments'], [], nightly_pats) data[bugid]['revisions'] = [i['revision'] for i in r]
def get_bugs(data): signatures = set() for p, i1 in data.items(): for c, i2 in i1.items(): signatures = signatures.union(set(i2.keys())) bugs_by_signature = socorro.Bugs.get_bugs(list(signatures)) statusflags.reduce_set_of_bugs(bugs_by_signature) for s, bugs in bugs_by_signature.items(): bugs_by_signature[s] = [(str(bug), Bugzilla.get_links(bug)) for bug in sorted(bugs, key=lambda k: int(k))] return bugs_by_signature
def test_get_bugs_info(self): status_flags = Bugzilla.get_status_flags() bugs = ['701227', '701232'] info = statusflags.get_bugs_info(bugs, status_flags) self.assertEqual(set(info.keys()), set(bugs)) info1 = info[bugs[0]] self.assertEqual(info1['assigned'], True) self.assertEqual(info1['fixed'], True) self.assertEqual(info1['incomplete'], False) self.assertEqual(info1['no_change'], set()) self.assertEqual(info1['patched'], True) self.assertEqual(info1['resolved'], False) self.assertEqual(info1['fixed_dates'][0], utils.get_date_ymd('2011-11-11 10:26:06')) self.assertEqual(info1['last_change'], utils.get_date_ymd('2013-01-14 16:31:50')) info2 = info[bugs[1]] self.assertEqual(info2['assigned'], True) self.assertEqual(info2['fixed'], True) self.assertEqual(info2['incomplete'], False) self.assertEqual(info2['no_change'], set()) self.assertEqual(info2['patched'], False) self.assertEqual(info2['resolved'], True) self.assertEqual(info2['fixed_dates'][0], utils.get_date_ymd('2011-11-10 03:13:31')) self.assertEqual(info2['last_change'], utils.get_date_ymd('2015-03-12 15:17:16')) bugs = ['844479'] fl = 'cf_status_firefox' info = statusflags.get_bugs_info(bugs, {'nightly': fl + '23', 'aurora': fl + '22', 'beta': fl + '21', 'release': fl + '20'}) self.assertEqual(set(info.keys()), set(bugs)) info3 = info[bugs[0]] self.assertEqual(info3['assigned'], True) self.assertEqual(info3['fixed'], True) self.assertEqual(info3['incomplete'], False) self.assertEqual(info3['no_change'], {'aurora', 'beta'}) self.assertEqual(info3['patched'], True) self.assertEqual(info3['resolved'], True) self.assertEqual(info3['fixed_dates'][0], utils.get_date_ymd('2013-08-06 21:12:03')) self.assertEqual(info3['last_change'], utils.get_date_ymd('2013-08-20 19:26:07'))
def test_get_partial(self): channel = ['release', 'beta', 'aurora', 'nightly', 'esr'] signature = 'js::GCMarker::processMarkStackTop' bugids = socorro.Bugs.get_bugs([signature]) base_versions = {'nightly': 51, 'aurora': 50, 'beta': 49, 'release': 48, 'esr': 45} self.assertEqual(set(bugids[signature]), {792226, 789892, 719114, 730283, 1257309, 941491, 745334, 772441, 952381}) start_date, min_date, versions_by_channel, start_date_by_channel, base_versions = statusflags.get_versions_info('Firefox', base_versions=base_versions) search_date = statusflags.get_search_date('', start_date, '2016-09-14') sgninfo = statusflags.get_signatures(100, 'Firefox', versions_by_channel, channel, search_date, [signature], [], False) status_flags = Bugzilla.get_status_flags(base_versions=base_versions) bugs_history_info = statusflags.get_bugs_info(bugids[signature], status_flags) last_bugs_info, _ = statusflags.get_last_bugs_info(bugids[signature], signature, sgninfo, [], bugs_history_info, min_date) self.assertEqual(last_bugs_info['resolved-fixed-patched'], ['', utils.get_guttenberg_death()]) self.assertEqual(last_bugs_info['resolved-fixed-unpatched'], ['', utils.get_guttenberg_death()]) self.assertEqual(last_bugs_info['resolved-unfixed'], ['952381', utils.get_date_ymd('2014-05-13 12:12:41')]) self.assertEqual(last_bugs_info['unresolved-assigned'], ['', utils.get_guttenberg_death()]) self.assertEqual(last_bugs_info['unresolved-unassigned'], ['719114', utils.get_date_ymd('2016-08-30 23:15:17')]) last_bug = statusflags.get_last_bug(bugids[signature], signature, sgninfo, [], bugs_history_info, min_date) self.assertEqual(last_bug, '719114')
def get_revisions(self, bugs): """Get the revisions from the hg.m.o urls in the bug comments""" nightly_pats = Bugzilla.get_landing_patterns(channels=['nightly']) def comment_handler(bug, bugid, data): commenters = data[bugid]['commenters'] for comment in bug['comments']: commenter = comment['author'] if commenter in commenters: commenters[commenter] += 1 else: commenters[commenter] = 1 r = Bugzilla.get_landing_comments(bug['comments'], [], nightly_pats) data[bugid]['revisions'] = [i['revision'] for i in r] def attachment_handler(attachments, bugid, data): for attachment in attachments: if self.is_patch(attachment): data[bugid]['creators'].add(attachment['creator']) bugids = list(bugs.keys()) revisions = { bugid: {'revisions': [], 'creators': set(), 'commenters': {}} for bugid in bugids } Bugzilla( bugids=bugids, commenthandler=comment_handler, commentdata=revisions, comment_include_fields=['text', 'author'], attachmenthandler=attachment_handler, attachmentdata=revisions, ).get_data().wait() return revisions
# You can obtain one at http://mozilla.org/MPL/2.0/. import datetime from dateutil.relativedelta import relativedelta from libmozdata.bugzilla import Bugzilla from libmozdata.connection import Query from libmozdata.release_calendar import get_calendar from libmozdata import utils as lmdutils, hgmozilla import re import whatthepatch from auto_nag.bzcleaner import BzCleaner from auto_nag.people import People from auto_nag import utils NIGHTLY_PAT = Bugzilla.get_landing_patterns(channels=['nightly']) BUG_PAT = re.compile('[\t ]*bug[s]?[\t ]*([0-9]+)', re.I) BACKOUT_PAT = re.compile('^back(ed)?[ \t]*out', re.I) class CodeFreezeWeek(BzCleaner): def __init__(self): super(CodeFreezeWeek, self).__init__() self.versions = utils.get_checked_versions() if not self.versions: return self.people = People() self.nightly = self.versions['central'] self.beta = self.versions['beta'] self.release = self.versions['release']
def __init__(self, channel): super(Unlanded, self).__init__() self.channel = channel self.bug_ids = [] self.versions = utils.get_checked_versions() self.channel_pat = Bugzilla.get_landing_patterns(channels=[channel])
def get_patch_data(self, bugs): """Get patch information in bugs """ nightly_pats = Bugzilla.get_landing_patterns(channels=['nightly']) def comment_handler(bug, bugid, data): r = Bugzilla.get_landing_comments(bug['comments'], [], nightly_pats) landed = bool(r) if not landed: for comment in bug['comments']: comment = comment['text'].lower() if 'backed out' in comment or 'backout' in comment: landed = True break data[bugid]['landed'] = landed def attachment_handler(attachments, bugid, data): res = {} for attachment in attachments: self.handle_attachment(attachment, res) if 'phab' in res: if res['phab']: data[bugid]['patch'] = 'phab' data[bugid]['author'] = res['author'] data[bugid]['count'] = res['count'] elif 'splinter' in res and res['splinter']: data[bugid]['patch'] = 'splinter' data[bugid]['author'] = res['author'] data[bugid]['count'] = res['count'] bugids = list(bugs.keys()) data = { bugid: {'landed': False, 'patch': None, 'author': None, 'count': 0} for bugid in bugids } Bugzilla( bugids=bugids, attachmenthandler=attachment_handler, attachmentdata=data, attachment_include_fields=[ 'bug_id', 'creator', 'data', 'is_obsolete', 'is_patch', 'content_type', 'flags', ], ).get_data().wait() data = {bugid: v for bugid, v in data.items() if v['patch'] is not None} splinter_bugs = [bugid for bugid, v in data.items() if v['patch'] == 'splinter'] Bugzilla( bugids=splinter_bugs, commenthandler=comment_handler, commentdata=data, comment_include_fields=['text'], ).get_data().wait() data = { bugid: {'authors': v['author'], 'patch_count': v['count']} for bugid, v in data.items() if v['patch'] == 'phab' or not v['landed'] } return data
def get(product='Firefox', limit=1000, verbose=False, search_start_date='', end_date=None, signatures=[], bug_ids=[], max_bugs=-1, base_versions=None, check_for_fx=True, check_bz_version=True, check_noisy=True): """Get crashes info Args: product (Optional[str]): the product limit (Optional[int]): the number of crashes to get from tcbs Returns: dict: contains all the info about how to update flags """ p = product.lower() if p == 'firefox': product = 'Firefox' elif p == 'fennecandroid': product = 'FennecAndroid' channel = ['release', 'beta', 'aurora', 'nightly'] if product == 'Firefox': channel.append('esr') start_date, min_date, versions_by_channel, start_date_by_channel, base_versions = get_versions_info(product, date=end_date, base_versions=base_versions) nv = Bugzilla.get_nightly_version() if check_bz_version and nv != base_versions['nightly']: __warn('Mismatch between nightly version from Bugzilla (%d) and Socorro (%d)' % (nv, base_versions['nightly']), verbose) return None if check_bz_version and (base_versions['aurora'] != nv - 1 or base_versions['beta'] != nv - 2 or base_versions['release'] != nv - 3): __warn('All versions are not up to date (Bugzilla nightly version is %d): %s' % (nv, base_versions), verbose) return None __warn('Versions: %s' % versions_by_channel, verbose) __warn('Start dates: %s' % start_date_by_channel, verbose) if not end_date: end_date = utils.get_date('today') search_date = get_search_date(search_start_date, start_date, end_date) signatures = get_signatures(limit, product, versions_by_channel, channel, search_date, signatures, bug_ids, verbose) # signatures == { 'foo::bar': {'affected_channels': [('release', 1234), ...], # 'bugs': None, # 'platforms': ['Windows'], # 'selected_bug': None}, ... } __warn('Collected signatures: %d' % len(signatures), verbose) # get the bugs for each signatures bugs_by_signature = socorro.Bugs.get_bugs(list(signatures.keys())) # if we've some bugs in bug_ids then we must remove the other ones for a given signature if bug_ids: bids = set(bug_ids) for s, bugids in bugs_by_signature.items(): inter = bids.intersection(bugids) if inter: bugs_by_signature[s] = inter __warn('Collected bugs in Socorro: Ok', verbose) bugs, bugs_count = reduce_set_of_bugs(bugs_by_signature) __warn('Remove duplicates: Ok', verbose) __warn('Bugs to analyze: %d' % bugs_count, verbose) # we filter the bugs to remove meaningless ones if not bug_ids: bugs = filter_bugs(bugs, product) status_flags = Bugzilla.get_status_flags(base_versions=base_versions) # we get the "better" bug where to update the info bugs_history_info = get_bugs_info(bugs, status_flags) patched_bugs = [] for bugid, hinfo in bugs_history_info.items(): if hinfo['patched']: patched_bugs.append(bugid) if patched_bugs: patch_info = dataanalysis.analyze_bugs(patched_bugs, min_date=min_date, base_versions=base_versions) else: patch_info = {} crashes_to_reopen = [] bugs.clear() for s, v in bugs_by_signature.items(): info = signatures[s] no_change = set() if v: bug_to_touch = get_last_bug(v, s, signatures[s], patch_info, bugs_history_info, min_date) if bug_to_touch: no_change = bugs_history_info[bug_to_touch]['no_change'] else: crashes_to_reopen.append(s) else: bug_to_touch = None info['selected_bug'] = bug_to_touch info['bugs'] = v info['no_change'] = no_change if bug_to_touch: bugs.add(bug_to_touch) __warn('Collected last bugs: %d' % len(bugs), verbose) # add bug info in signatures add_bug_info(signatures, list(bugs), status_flags, product, verbose) # analyze the signatures analysis = analyze(signatures, status_flags, base_versions) if max_bugs > 0: __analysis = {} count = 0 for signature, info in analysis.items(): if not check_for_fx or info['firefox']: __analysis[signature] = info count += 1 if count == max_bugs: analysis = __analysis break __warn('Analysis: Ok', verbose) positions_result, positions = get_crash_positions(-1, product, versions_by_channel, channel, search_date=search_date, verbose=verbose) # Now get the number of crashes for each signature trends = get_stats_for_past_weeks(product, channel, start_date_by_channel, versions_by_channel, analysis, search_start_date, end_date, check_for_fx=check_for_fx) if check_noisy: noisy = get_noisy(trends, analysis) __warn('Noisy signatures: %s' % [analysis[s] for s in noisy], verbose) else: noisy = set() __warn('Collected trends: Ok\n', verbose) positions_result.wait() # replace dictionary containing trends by a list empty_ranks = {'browser': -1, 'content': -1, 'plugin': -1, 'gpu': -1} for signature, i in trends.items(): if signature in noisy: del analysis[signature] else: signature_info = analysis[signature] ranks = signature_info['rank'] for chan, trend in i.items(): i[chan] = [trend[week] for week in sorted(trend.keys(), reverse=False)] ranks[chan] = positions[chan].get(signature, empty_ranks) signature_info['trend'] = i __prettywarn(analysis, verbose) return {'status_flags': status_flags, 'base_versions': base_versions, 'start_dates': start_date_by_channel, 'signatures': analysis, 'end_date': end_date}
def get_signatures(limit, product, versions, channel, search_date, signatures, bug_ids, verbose): if limit <= 0: count = [] socorro.SuperSearch(params={'product': product, 'version': versions, 'date': search_date, 'release_channel': channel, '_facets_size': 1, '_results_number': 0}, handler=lambda json: count.append(json['total'])).wait() limit = count[0] __warn('Maximum signatures to collect: %d' % limit, verbose) __signatures = {} known_platforms = {'Windows NT', 'Windows', 'Mac OS X', 'Linux'} known_wtf_platforms = {'0x00000000', ''} ignored_signatures = get_ignored_signatures() def handler_ss(json, data): n = 0 for bucket in json['facets']['signature']: signature = bucket['term'] if signature in ignored_signatures: continue n += 1 if n > limit: break l1 = [] l2 = [] data[signature] = {'affected_channels': l1, 'platforms': l2, 'selected_bug': None, 'jsbugmon': 0, 'bugs': None} facets = bucket['facets'] for c in facets['release_channel']: l1.append((c['term'], c['count'])) for p in facets['platform']: os = p['term'] if os and os in known_platforms: if os == 'Windows NT': os = 'Windows' l2.append(os) elif os not in known_wtf_platforms: __warn('Unknown os: %s' % os) all_versions = [] for c in channel: all_versions += versions[c] if signatures or bug_ids: if bug_ids: _sgns = Bugzilla.get_signatures(bug_ids) set_sgns = set(signatures) for ss in _sgns.values(): if ss: set_sgns = set_sgns.union(set(ss)) signatures = list(set_sgns) queries = [] for sgns in Connection.chunks(signatures, 10): queries.append(Query(socorro.SuperSearch.URL, {'signature': ['=' + s for s in sgns], 'product': product, 'version': all_versions, 'release_channel': channel, 'date': search_date, '_aggs.signature': ['release_channel', 'platform'], '_facets_size': max(limit, 100), '_results_number': 0}, handler=handler_ss, handlerdata=__signatures)) socorro.SuperSearch(queries=queries).wait() else: socorro.SuperSearch(params={'product': product, 'version': all_versions, 'release_channel': channel, 'date': search_date, '_aggs.signature': ['release_channel', 'platform'], '_facets_size': max(limit, 100), '_results_number': 0}, handler=handler_ss, handlerdata=__signatures, timeout=300).wait() return __signatures