def autofix(self, bugs, dryrun): """Autofix the bugs according to what is returned by get_autofix_change""" ni_changes = self.set_needinfo(dryrun) change = self.get_autofix_change() if not ni_changes and not change: return bugs self.has_autofix = True new_changes = {} if not self.has_individual_autofix(change): bugids = self.get_list_bugs(bugs) for bugid in bugids: new_changes[bugid] = utils.merge_bz_changes( change, ni_changes.get(bugid, {})) else: change = {str(k): v for k, v in change.items()} bugids = set(change.keys()) | set(ni_changes.keys()) for bugid in bugids: mrg = utils.merge_bz_changes(change.get(bugid, {}), ni_changes.get(bugid, {})) if mrg: new_changes[bugid] = mrg if dryrun or self.test_mode: for bugid, ch in new_changes.items(): logger.info( 'The bugs: {}\n will be autofixed with:\n{}'.format( bugid, ch)) else: for bugid, ch in new_changes.items(): Bugzilla([str(bugid)]).put(ch) return bugs
def get_bug_info(self, bugids): logger.info("History: get bugs info: start...") def history_handler(bug, data): bugid = str(bug["id"]) for h in bug["history"]: if h["who"] == History.BOT: del h["who"] data[bugid].append(h) def comment_handler(bug, bugid, data): bugid = str(bugid) for comment in bug["comments"]: if comment["author"] == History.BOT: text = comment["text"] data[bugid].append({ "comment": text, "date": comment["creation_time"] }) data = {str(bugid): [] for bugid in bugids} Bugzilla( list(data.keys()), historyhandler=history_handler, historydata=data, commenthandler=comment_handler, commentdata=data, timeout=960, ).get_data().wait() logger.info("History: get bugs info: end.") return data
def get_bug_info(self, bugids): logger.info('History: get bugs info: start...') def history_handler(bug, data): bugid = str(bug['id']) for h in bug['history']: if h['who'] == History.BOT: del h['who'] data[bugid].append(h) def comment_handler(bug, bugid, data): bugid = str(bugid) for comment in bug['comments']: if comment['author'] == History.BOT: text = comment['text'] data[bugid].append( {'comment': text, 'date': comment['creation_time']} ) data = {str(bugid): [] for bugid in bugids} Bugzilla( list(data.keys()), historyhandler=history_handler, historydata=data, commenthandler=comment_handler, commentdata=data, timeout=960, ).get_data().wait() logger.info('History: get bugs info: end.') return data
def get_bug_info(self, bugids): logger.info('History: get bugs info: start...') def history_handler(bug, data): bugid = str(bug['id']) for h in bug['history']: if h['who'] == History.BOT: del h['who'] data[bugid].append(h) def comment_handler(bug, bugid, data): bugid = str(bugid) for comment in bug['comments']: if comment['author'] == History.BOT: text = comment['text'] data[bugid].append({ 'comment': text, 'date': comment['creation_time'] }) data = {str(bugid): [] for bugid in bugids} Bugzilla( list(data.keys()), historyhandler=history_handler, historydata=data, commenthandler=comment_handler, commentdata=data, timeout=960, ).get_data().wait() logger.info('History: get bugs info: end.') return data
def get(self, person, blacklist, **kwargs): m = NPLUS_PAT.match(self.who) if m: sup = self.people.get_nth_manager_mail(person, int(m.group(1))) elif self.who == 'director': sup = self.people.get_director_mail(person) elif self.who == 'vp': sup = self.people.get_vp_mail(person) elif self.who == 'self': sup = self.people.get_moz_mail(person) else: assert self.who in kwargs, '{} required as keyword argument in add'.format( self.who ) sup = self.people.get_moz_mail(kwargs[self.who]) if not sup or sup in blacklist: sup = self.people.get_nth_manager_mail(person, 1) if not sup: # we don't have any supervisor so nag self logger.info('No supervisor for {}: {}'.format(self, person)) sup = self.people.get_moz_mail(person) return sup
def autofix(self, bugs, dryrun): for bugid, email in self.hgdata.items(): if email: self.has_autofix = True if dryrun or self.test_mode: logger.info('Auto assign {}: {}'.format(bugid, email)) else: Bugzilla([bugid]).put({'assigned_to': email}) return bugs
def get_bugs(self): logger.info('History: get bugs: start...') def bug_handler(bug, data): data.add(bug['id']) fields = { 'changedby': [ 'keywords', 'product', 'component', 'assigned_to', 'cf_crash_signature', 'everconfirmed', 'cf_has_regression_range', 'cf_has_str', 'priority', 'bug_severity', 'resolution', 'bug_status', 'bug_type', 'cf_status_firefox68', 'cf_status_firefox67', 'cf_status_firefox66', 'cf_status_firefox65', 'cf_status_firefox64', 'cf_status_firefox63', 'cf_status_firefox62', ], 'equals': ['commenter', 'setters.login_name'], } queries = [] bugids = set() for op, fs in fields.items(): for f in fs: params = { 'include_fields': 'id', 'f1': f, 'o1': op, 'v1': History.BOT } queries.append( Bugzilla(params, bughandler=bug_handler, bugdata=bugids, timeout=20)) for q in queries: q.get_data().wait() logger.info('History: get bugs: end.') return bugids
def get_bugs(self): logger.info("History: get bugs: start...") def bug_handler(bug, data): data.add(bug["id"]) fields_map = { "changedby": [ "keywords", "product", "component", "assigned_to", "cf_crash_signature", "everconfirmed", "cf_has_regression_range", "cf_has_str", "priority", "bug_severity", "resolution", "bug_status", "bug_type", "cf_status_firefox68", "cf_status_firefox67", "cf_status_firefox66", "cf_status_firefox65", "cf_status_firefox64", "cf_status_firefox63", "cf_status_firefox62", ], "equals": ["commenter", "setters.login_name"], } queries = [] bugids = set() for operator, fields in fields_map.items(): for field in fields: params = { "include_fields": "id", "f1": field, "o1": operator, "v1": History.BOT, } queries.append( Bugzilla(params, bughandler=bug_handler, bugdata=bugids, timeout=20)) for q in queries: q.get_data().wait() logger.info("History: get bugs: end.") return bugids
def __init__(self): super(BzCleaner, self).__init__() self._set_tool_name() self.has_autofix = False self.no_manager = set() self.auto_needinfo = {} self.has_flags = False self.cache = Cache(self.name(), self.max_days_in_cache()) self.test_mode = utils.get_config("common", "test", False) self.versions = None logger.info("Run tool {}".format(self.get_tool_path()))
def autofix(self, bugs): """Autofix the bugs according to what is returned by get_autofix_change""" ni_changes = self.set_needinfo() change = self.get_autofix_change() if not ni_changes and not change: return bugs self.has_autofix = True new_changes = {} if not self.has_individual_autofix(change): bugids = self.get_list_bugs(bugs) for bugid in bugids: new_changes[bugid] = utils.merge_bz_changes( change, ni_changes.get(bugid, {}) ) else: change = {str(k): v for k, v in change.items()} bugids = set(change.keys()) | set(ni_changes.keys()) for bugid in bugids: mrg = utils.merge_bz_changes( change.get(bugid, {}), ni_changes.get(bugid, {}) ) if mrg: new_changes[bugid] = mrg if self.dryrun or self.test_mode: for bugid, ch in new_changes.items(): logger.info( "The bugs: {}\n will be autofixed with:\n{}".format(bugid, ch) ) else: extra = self.get_db_extra() max_retries = utils.get_config("common", "bugzilla_max_retries", 3) for bugid, ch in new_changes.items(): added = False for _ in range(max_retries): failures = Bugzilla([str(bugid)]).put(ch) if failures: time.sleep(1) else: added = True db.BugChange.add(self.name(), bugid, extra=extra.get(bugid, "")) break if not added: self.failure_callback(bugid) logger.error( "{}: Cannot put data for bug {} (change => {}).".format( self.name(), bugid, ch ) ) return bugs
def run(self): """Run the tool""" args = self.get_args_parser().parse_args() self.parse_custom_arguments(args) date = "" if self.ignore_date() else args.date self.dryrun = args.dryrun self.cache.set_dry_run(self.dryrun) try: self.send_email(date=date) self.terminate() logger.info("Tool {} has finished.".format(self.get_tool_path())) except Exception: logger.exception("Tool {}".format(self.name()))
def get_bugs(self): logger.info('History: get bugs: start...') def bug_handler(bug, data): data.add(bug['id']) fields = { 'changedby': [ 'keywords', 'product', 'component', 'assigned_to', 'cf_crash_signature', 'everconfirmed', 'cf_has_regression_range', 'cf_has_str', 'priority', 'bug_severity', 'resolution', 'bug_status', 'bug_type', 'cf_status_firefox68', 'cf_status_firefox67', 'cf_status_firefox66', 'cf_status_firefox65', 'cf_status_firefox64', 'cf_status_firefox63', 'cf_status_firefox62', ], 'equals': ['commenter', 'setters.login_name'], } queries = [] bugids = set() for op, fs in fields.items(): for f in fs: params = {'include_fields': 'id', 'f1': f, 'o1': op, 'v1': History.BOT} queries.append( Bugzilla(params, bughandler=bug_handler, bugdata=bugids, timeout=20) ) for q in queries: q.get_data().wait() logger.info('History: get bugs: end.') return bugids
def autofix(self, bugs): """Autofix the bugs according to what is returned by get_autofix_change""" ni_changes = self.set_needinfo() change = self.get_autofix_change() if not ni_changes and not change: return bugs self.has_autofix = True new_changes = {} if not self.has_individual_autofix(change): bugids = self.get_list_bugs(bugs) for bugid in bugids: new_changes[bugid] = utils.merge_bz_changes( change, ni_changes.get(bugid, {}) ) else: change = {str(k): v for k, v in change.items()} bugids = set(change.keys()) | set(ni_changes.keys()) for bugid in bugids: mrg = utils.merge_bz_changes( change.get(bugid, {}), ni_changes.get(bugid, {}) ) if mrg: new_changes[bugid] = mrg if self.dryrun or self.test_mode: for bugid, ch in new_changes.items(): logger.info( 'The bugs: {}\n will be autofixed with:\n{}'.format(bugid, ch) ) else: extra = self.get_db_extra() for bugid, ch in new_changes.items(): Bugzilla([str(bugid)]).put(ch) db.BugChange.add(self.name(), bugid, extra=extra.get(bugid, '')) return bugs
def get(self, person, skiplist, **kwargs): m = NPLUS_PAT.match(self.who) if m: sup = self.people.get_nth_manager_mail(person, int(m.group(1))) elif self.who == "director": sup = self.people.get_director_mail(person) elif self.who == "vp": sup = self.people.get_vp_mail(person) elif self.who == "self": sup = self.people.get_moz_mail(person) else: assert self.who in kwargs, "{} required as keyword argument in add".format( self.who) sup = self.people.get_moz_mail(kwargs[self.who]) if not sup or sup in skiplist: sup = self.people.get_nth_manager_mail(person, 1) if not sup: # we don't have any supervisor so nag self logger.info("No supervisor for {}: {}".format(self, person)) sup = self.people.get_moz_mail(person) return sup
def send_email(self, date="today"): """Send the email""" if date: date = lmdutils.get_date(date) d = lmdutils.get_date_ymd(date) if isinstance(self, Nag): self.nag_date = d if not self.must_run(d): return if not self.has_enough_data(): logger.info("The tool {} hasn't enough data to run".format( self.name())) return login_info = utils.get_login_info() title, body = self.get_email(date) if title: receivers = self.get_receivers() status = "Success" try: mail.send( login_info["ldap_username"], receivers, title, body, html=True, login=login_info, dryrun=self.dryrun, ) except Exception: logger.exception("Tool {}".format(self.name())) status = "Failure" db.Email.add(self.name(), receivers, "global", status) if isinstance(self, Nag): self.send_mails(title, dryrun=self.dryrun) else: name = self.name().upper() if date: logger.info("{}: No data for {}".format(name, date)) else: logger.info("{}: No data".format(name)) logger.info("Query: {}".format(self.query_url))
def send_email(self, date='today'): """Send the email""" if date: date = lmdutils.get_date(date) d = lmdutils.get_date_ymd(date) if isinstance(self, Nag): self.nag_date = d if not self.must_run(d): return if not self.has_enough_data(): logger.info('The tool {} hasn\'t enough data to run'.format( self.name())) return login_info = utils.get_login_info() title, body = self.get_email(date) if title: receivers = self.get_receivers() status = 'Success' try: mail.send( login_info['ldap_username'], receivers, title, body, html=True, login=login_info, dryrun=self.dryrun, ) except: # NOQA logger.exception('Tool {}'.format(self.name())) status = 'Failure' db.Email.add(self.name(), receivers, 'global', status) if isinstance(self, Nag): self.send_mails(title, dryrun=self.dryrun) else: name = self.name().upper() if date: logger.info('{}: No data for {}'.format(name, date)) else: logger.info('{}: No data'.format(name)) logger.info('Query: {}'.format(self.query_url))
def __init__(self): self.model_class = RegressorModel self.repo_dir = get_login_info()["repo_dir"] if not os.path.exists(self.repo_dir): cmd = hglib.util.cmdbuilder( "robustcheckout", "https://hg.mozilla.org/mozilla-central", self.repo_dir, purge=True, sharebase=self.repo_dir + "-shared", networkattempts=7, branch=b"tip", ) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info("mozilla-central cloned") # Remove pushlog DB to make sure it's regenerated. try: os.remove(os.path.join(self.repo_dir, ".hg", "pushlog2.db")) except FileNotFoundError: logger.info("pushlog database doesn't exist") logger.info("Pulling and updating mozilla-central") with hglib.open(self.repo_dir) as hg: hg.pull(update=True) logger.info("mozilla-central pulled and updated") db.download_version(repository.COMMITS_DB) if db.is_old_version(repository.COMMITS_DB) or not os.path.exists( repository.COMMITS_DB): db.download(repository.COMMITS_DB, force=True, support_files_too=True) super().__init__() self.model = self.model_class.load(self.retrieve_model())
def send_email(self, date='today'): """Send the email""" if date: date = lmdutils.get_date(date) d = lmdutils.get_date_ymd(date) if isinstance(self, Nag): self.nag_date = d if not self.must_run(d): return if not self.has_enough_data(): logger.info('The tool {} hasn\'t enough data to run'.format(self.name())) return login_info = utils.get_login_info() title, body = self.get_email(date) if title: receivers = self.get_receivers() status = 'Success' try: mail.send( login_info['ldap_username'], receivers, title, body, html=True, login=login_info, dryrun=self.dryrun, ) except: # NOQA logger.exception('Tool {}'.format(self.name())) status = 'Failure' db.Email.add(self.name(), receivers, 'global', status) if isinstance(self, Nag): self.send_mails(title, dryrun=self.dryrun) else: name = self.name().upper() if date: logger.info('{}: No data for {}'.format(name, date)) else: logger.info('{}: No data'.format(name)) logger.info('Query: {}'.format(self.query_url))
def send_email(self, date='today', dryrun=False): """Send the email""" if date: date = lmdutils.get_date(date) d = lmdutils.get_date_ymd(date) if isinstance(self, Nag): self.nag_date = d if not self.must_run(d): return if not self.has_enough_data(): logger.info('The tool {} hasn\'t enough data to run'.format( self.name())) return login_info = utils.get_login_info() title, body = self.get_email(login_info['bz_api_key'], date, dryrun) if title: mail.send( login_info['ldap_username'], utils.get_config(self.name(), 'receivers'), title, body, html=True, login=login_info, dryrun=dryrun, ) if isinstance(self, Nag): self.send_mails(title, dryrun=dryrun) else: name = self.name().upper() if date: logger.info('{}: No data for {}'.format(name, date)) else: logger.info('{}: No data'.format(name))
def init(): history = History().get() logger.info("Put history in db: start...") BugChange.import_from_dict(history) logger.info("Put history in db: end.")
def init(): hist = History().get() logger.info('Put history in db: start...') BugChange.import_from_dict(hist) logger.info('Put history in db: end.')