def filter_from_hg(self, bzdata, user_info): """Get the bugs where an associated revision contains the bug id in the description""" def handler_rev(bugid, json, data): if bugid in json["desc"] and not utils.is_backout(json): user = json["user"] if bugid not in data: data[bugid] = set() m = HG_MAIL.match(user) if m: hgname = m.group(1).strip() hgmail = m.group(2).strip() data[bugid].add((hgname, hgmail)) url = hgmozilla.Revision.get_url("nightly") queries = [] for bugid, info in bzdata.items(): hdler = functools.partial(handler_rev, bugid) for rev in info["revisions"]: queries.append(Query(url, {"node": rev}, hdler, self.hgdata)) if queries: hgmozilla.Revision(queries=queries).wait() self.set_autofixable(bzdata, user_info) return self.hgdata
def get_hg_patches(self, bugs): url = hgmozilla.RawRevision.get_url('nightly') queries = [] def handler(patch, data): info = self.patch_analysis(patch) if 'addlines' not in data: data.update(info) else: for k, v in info.items(): data[k] += v for info in bugs.values(): for rev, i in info['land'].items(): if not i['backedout']: queries.append(Query(url, {'node': rev}, handler, info)) if queries: hgmozilla.Revision(queries=queries).wait() torm = [] for bug, info in bugs.items(): landed_patches = [v['backedout'] for v in info['land'].values()].count(False) # bug with only backouts if landed_patches == 0: torm.append(bug) else: info['landed_patches'] = landed_patches # Remove bugs that we don't want to show for bug in torm: del bugs[bug]
def test_revisions(self): data1 = { 'first': None, 'second': None, } data2 = {'first': None} def handler1(response): if '1584ba8c1b86' in response: data1['first'] = response elif 'f5578fdc50ef' in response: data1['second'] = response def handler2(response): data2['first'] = response hgmozilla.Revision(queries=[ Query(hgmozilla.RawRevision.get_url('nightly'), [{ 'node': 'f5578fdc50ef' }, { 'node': '1584ba8c1b86' }], handler1), Query(hgmozilla.RawRevision.get_url('nightly'), {'node': '1584ba8c1b86'}, handler2), ]).wait() self.assertIn('# Node ID 1584ba8c1b86f9c4de5ccda5241cef36e80f042c', data1['first']) self.assertIn('# Node ID f5578fdc50ef11b7f12451c88297f327abb0e9da', data1['second']) self.assertIn('# Node ID 1584ba8c1b86f9c4de5ccda5241cef36e80f042c', data2['first'])
def get_pushdates(chan_rev): """Get the pushdates of the given channel/revision. """ res = [] data = {} for chan, revs in chan_rev.items(): if chan.startswith('esr'): if 'esr' not in data: data['esr'] = pd = [] else: pd = data['esr'] else: data[chan] = pd = [] for rev in revs: res.append( hgmozilla.Revision( channel=chan, params={'node': rev}, handler=get_pushdate, handlerdata=pd, ) ) return res, data
def get_pushdates(chan_rev): """Get the pushdates of the given channel/revision. """ data = {} queries = [] for chan, revs in chan_rev.items(): if chan.startswith('esr'): if 'esr' not in data: data['esr'] = pd = [] else: pd = data['esr'] else: data[chan] = pd = [] for rev in revs: queries.append(Query( hgmozilla.Revision.get_url(chan), {'node': rev}, get_pushdate, pd )) if queries: return hgmozilla.Revision(queries=queries), data return None, data
def test_revisions(self): data1 = { 'first': {}, 'second': {}, } data2 = {} def handler1(json, data): if 'tip' in json['tags']: data['first'].update(json) else: data['second'].update(json) def handler2(json, data): data.update(json) hgmozilla.Revision(queries=[ Query(hgmozilla.Revision.get_url('nightly'), [{ 'node': 'default' }, { 'node': '1584ba8c1b86' }], handler1, data1), Query(hgmozilla.Revision.get_url('nightly'), {'node': 'default'}, handler2, data2), ]).wait() for rev in [data1['first'], data1['second'], data2]: self.assertIn('pushid', rev) self.assertIn('pushuser', rev) self.assertIn('pushdate', rev) self.assertIn('user', rev) self.assertIn('branch', rev) self.assertIn('date', rev) self.assertIn('desc', rev) self.assertIn('node', rev) self.assertEqual(data1['second']['node'], '1584ba8c1b86f9c4de5ccda5241cef36e80f042c') self.assertNotEqual(data1['first']['node'], data1['second']['node']) self.assertEqual(data1['first']['node'], data2['node'])
def get_hg(self, bugs): url = hgmozilla.Revision.get_url(self.channel) queries = [] not_landed = set() def handler_rev(json, data): info = utils.get_info_from_hg(json) if info["bugid"] == data["bugid"] and not info["backedout"]: data["ok"] = True for info in bugs.values(): for rev, i in info.get("land", {}).items(): queries.append(Query(url, {"node": rev}, handler_rev, i)) if queries: hgmozilla.Revision(queries=queries).wait() for bugid, info in bugs.items(): if all(not i["ok"] for i in info.get("land", {}).values()): not_landed.add(bugid) return not_landed
def get_hg(self, bugs): url = hgmozilla.Revision.get_url('nightly') queries = [] def handler_rev(json, data): push = json['pushdate'][0] push = datetime.datetime.utcfromtimestamp(push) push = lmdutils.as_utc(push) data['date'] = lmdutils.get_date_str(push) data['backedout'] = utils.is_backout(json) m = BUG_PAT.search(json['desc']) if not m or m.group(1) != data['bugid']: data['bugid'] = '' for info in bugs.values(): for rev, i in info['land'].items(): queries.append(Query(url, {'node': rev}, handler_rev, i)) if queries: hgmozilla.Revision(queries=queries).wait() # clean bug_torm = [] for bug, info in bugs.items(): torm = [] for rev, i in info['land'].items(): if not i['bugid'] or not (self.date <= lmdutils.get_date_ymd( i['date']) < self.tomorrow): torm.append(rev) for x in torm: del info['land'][x] if not info['land']: bug_torm.append(bug) for x in bug_torm: del bugs[x] self.get_hg_patches(bugs)