示例#1
0
def walk_on_the_bt(channel, ts, max_days, info, sgn=None, verbose=False):
    files_info = {}
    treated = set()
    if sgn:
        __warn('Walk on the bt for signature %s' % sgn, verbose)

    # info is: bt->{'count', 'uuids', 'files', 'processed'}
    for i in info.values():
        if i['processed']:
            continue
        count = 0
        lines = i['lines']
        for f in i['files']:
            if f and f not in treated:
                treated.add(f)
                filename, node = get_path_node(f)
                files_info[f] = {'filename': filename, 'node': node, 'line': lines[count], 'patches': []}
                if node and is_allowed(filename):
                    if sgn:
                        __warn('file %s' % filename, verbose)
                    fs = FileStats(path=filename, channel=channel, node=node, utc_ts=ts, max_days=max_days)
                    res = fs.get_last_patches()
                    if res:
                        l = [(r['node'], r['pushdate'][0]) for r in res]
                        l = sorted(l, key=lambda p: p[1], reverse=True)
                        l = [{'node': p, 'pushdate': str(utils.get_date_from_timestamp(q))} for p, q in l]
                        files_info[f]['patches'] = l
            count += 1

    if sgn:
        __warn('Walk on the bt for signature %s finished.' % sgn, verbose)

    return files_info
示例#2
0
 def test_date_from_timestamp(self):
     date = '1975-03-16'
     dt = utils.get_date_ymd(date)
     ts = utils.get_timestamp(dt)
     self.assertEqual(dt, datetime.datetime(1975, 3, 16, tzinfo=tzutc()))
     self.assertEqual(ts, 164160000)
     new_dt = utils.get_date_from_timestamp(ts)
     self.assertEqual(new_dt, dt)
示例#3
0
    def check_phab(self, attachment):
        """Check if the patch in Phabricator has been r+
        """
        if attachment['is_obsolete'] == 1:
            return None

        phab_url = base64.b64decode(attachment['data']).decode('utf-8')

        # extract the revision
        rev = PHAB_URL_PAT.search(phab_url).group(1)
        r = requests.post(
            PHAB_API,
            data={
                'api.token': self.phab_token,
                'queryKey': 'all',
                'constraints[ids][0]': rev,
                'attachments[reviewers]': 1,
            },
        )
        r.raise_for_status()
        data = r.json()['result']['data'][0]

        # this is a timestamp
        last_modified = data['fields']['dateModified']
        last_modified = lmdutils.get_date_from_timestamp(last_modified)
        if (self.date - last_modified).days <= self.nweeks * 7:
            # Do not do anything if recent changes in the bug
            return False

        reviewers = data['attachments']['reviewers']['reviewers']
        if not reviewers:
            return False

        for reviewer in reviewers:
            if reviewer['status'] != 'accepted':
                return False

        value = data['fields']['status'].get('value', '')
        if value == 'changes-planned':
            # even if the patch is r+ and not published, some changes may be required
            # so with the value 'changes-planned', the dev can say it's still a wip
            return False

        if value != 'published':
            return True

        return False
示例#4
0
    def check_phab(self, attachment):
        """Check if the patch in Phabricator has been r+
        """
        if attachment['is_obsolete'] == 1:
            return None

        phab_url = base64.b64decode(attachment['data']).decode('utf-8')

        # extract the revision
        rev = PHAB_URL_PAT.search(phab_url).group(1)
        r = requests.post(
            PHAB_API,
            data={
                'api.token': self.phab_token,
                'queryKey': 'all',
                'constraints[ids][0]': rev,
                'attachments[reviewers]': 1,
            },
        )
        r.raise_for_status()
        data = r.json()['result']['data'][0]

        # this is a timestamp
        last_modified = data['fields']['dateModified']
        last_modified = lmdutils.get_date_from_timestamp(last_modified)
        if (self.date - last_modified).days <= self.nweeks * 7:
            # Do not do anything if recent changes in the bug
            return False

        reviewers = data['attachments']['reviewers']['reviewers']
        if not reviewers:
            return False

        for reviewer in reviewers:
            if reviewer['status'] != 'accepted':
                return False

        value = data['fields']['status'].get('value', '')
        if value == 'changes-planned':
            # even if the patch is r+ and not published, some changes may be required
            # so with the value 'changes-planned', the dev can say it's still a wip
            return False

        if value != 'published':
            return True

        return False
示例#5
0
    def check_phab(self, attachment):
        """Check if the patch in Phabricator has been r+
        """
        if attachment["is_obsolete"] == 1:
            return None

        phab_url = base64.b64decode(attachment["data"]).decode("utf-8")

        # extract the revision
        rev = PHAB_URL_PAT.search(phab_url).group(1)
        r = requests.post(
            PHAB_API,
            data={
                "api.token": self.phab_token,
                "queryKey": "all",
                "constraints[ids][0]": rev,
                "attachments[reviewers]": 1,
            },
        )
        r.raise_for_status()
        data = r.json()["result"]["data"][0]

        # this is a timestamp
        last_modified = data["fields"]["dateModified"]
        last_modified = lmdutils.get_date_from_timestamp(last_modified)
        if (self.date - last_modified).days <= self.nweeks * 7:
            # Do not do anything if recent changes in the bug
            return False

        reviewers = data["attachments"]["reviewers"]["reviewers"]
        if not reviewers:
            return False

        for reviewer in reviewers:
            if reviewer["status"] != "accepted":
                return False

        value = data["fields"]["status"].get("value", "")
        if value == "changes-planned":
            # even if the patch is r+ and not published, some changes may be required
            # so with the value 'changes-planned', the dev can say it's still a wip
            return False

        if value != "published":
            return True

        return False
def walk_on_the_bt(channel, ts, max_days, info, sgn=None, verbose=False):
    files_info = {}
    treated = set()
    if sgn:
        __warn('Walk on the bt for signature %s' % sgn, verbose)

    # info is: bt->{'count', 'uuids', 'files', 'processed'}
    for i in info.values():
        if i['processed']:
            continue
        count = 0
        lines = i['lines']
        for f in i['files']:
            if f and f not in treated:
                treated.add(f)
                filename, node = get_path_node(f)
                files_info[f] = {
                    'filename': filename,
                    'node': node,
                    'line': lines[count],
                    'patches': []
                }
                if node and is_allowed(filename):
                    if sgn:
                        __warn('file %s' % filename, verbose)
                    fs = FileStats(path=filename,
                                   channel=channel,
                                   node=node,
                                   utc_ts=ts,
                                   max_days=max_days)
                    res = fs.get_last_patches()
                    if res:
                        l = [(r['node'], r['pushdate'][0]) for r in res]
                        l = sorted(l, key=lambda p: p[1], reverse=True)
                        l = [{
                            'node': p,
                            'pushdate': str(utils.get_date_from_timestamp(q))
                        } for p, q in l]
                        files_info[f]['patches'] = l
            count += 1

    if sgn:
        __warn('Walk on the bt for signature %s finished.' % sgn, verbose)

    return files_info
示例#7
0
def collect(data, file_filter):
    """Collect the data we need in the pushlog got from hg.mozilla.org"""
    res = []
    for push in data["pushes"].values():
        pushdate = lmdutils.get_date_from_timestamp(push["date"])
        for chgset in push["changesets"]:
            files = [f for f in chgset["files"] if file_filter(f)]
            desc = chgset["desc"]
            author = chgset["author"]
            res.append({
                "date": pushdate,
                "node": utils.short_rev(chgset["node"]),
                "backedout": is_backed_out(desc),
                "files": files,
                "merge": len(chgset["parents"]) > 1,
                "bug": get_bug(desc),
                "author": hgauthors.analyze_author(author),
            })
    return res
示例#8
0
    def check_phab(self, attachment, reviewers_phid):
        """Check if the patch in Phabricator has been r+"""
        if attachment["is_obsolete"] == 1:
            return None

        phab_url = base64.b64decode(attachment["data"]).decode("utf-8")

        # extract the revision
        rev = PHAB_URL_PAT.search(phab_url).group(1)
        try:
            data = self.phab.load_revision(rev_id=int(rev),
                                           queryKey="all",
                                           attachments={"reviewers": 1})
        except PhabricatorRevisionNotFoundException:
            return None

        # this is a timestamp
        last_modified = data["fields"]["dateModified"]
        last_modified = lmdutils.get_date_from_timestamp(last_modified)
        if (self.date - last_modified).days <= self.nweeks * 7:
            # Do not do anything if recent changes in the bug
            return False

        reviewers = data["attachments"]["reviewers"]["reviewers"]

        if not reviewers:
            return False

        for reviewer in reviewers:
            if reviewer["status"] != "accepted":
                return False
            reviewers_phid.add(reviewer["reviewerPHID"])

        value = data["fields"]["status"].get("value", "")
        if value == "changes-planned":
            # even if the patch is r+ and not published, some changes may be required
            # so with the value 'changes-planned', the dev can say it's still a wip
            return False

        if value != "published":
            return True

        return False
示例#9
0
def collect(data, file_filter):
    """Collect the data we need in the pushlog got from hg.mozilla.org"""
    res = []
    for push in data['pushes'].values():
        pushdate = lmdutils.get_date_from_timestamp(push['date'])
        for chgset in push['changesets']:
            files = [f for f in chgset['files'] if file_filter(f)]
            desc = chgset['desc']
            author = chgset['author']
            res.append({
                'date': pushdate,
                'node': utils.short_rev(chgset['node']),
                'backedout': is_backed_out(desc),
                'files': files,
                'merge': len(chgset['parents']) > 1,
                'bug': get_bug(desc),
                'author': hgauthors.analyze_author(author)
            })
    return res
示例#10
0
def get_jsbugmon_regression(comment, product='Firefox'):
    m = jsbugmon_pattern.search(comment)
    major = -1
    channel = ''
    if m:
        # the date in the jsbugmon comment is the author date and not the pushdate...
        # so we need to query mercurial the get the pushdate to get the related version
        changeset_url = m.group(1)
        m = hg_rev_pattern.search(changeset_url)
        if m:
            repo = m.group(1)
            rev = m.group(2)
            channel = 'nightly' if repo == 'central' else repo
            revinfo = Revision.get_revision(channel=channel, node=rev)
            pushdate = utils.get_date_from_timestamp(revinfo['pushdate'][0])
            versions = get_all_versions(product=product)[channel]
            for __major, v in versions.items():
                if v['dates'][0] <= pushdate and (v['dates'][1] is None or pushdate <= v['dates'][1]):
                    major = __major
                    break
            # if major is 51 then it means that the regression found by jsbugmon is in 51
    return channel, major
def get_jsbugmon_regression(comment, product='Firefox'):
    m = jsbugmon_pattern.search(comment)
    major = -1
    channel = ''
    if m:
        # the date in the jsbugmon comment is the author date and not the pushdate...
        # so we need to query mercurial the get the pushdate to get the related version
        changeset_url = m.group(1)
        m = hg_rev_pattern.search(changeset_url)
        if m:
            repo = m.group(1)
            rev = m.group(2)
            channel = 'nightly' if repo == 'central' else repo
            revinfo = Revision.get_revision(channel=channel, node=rev)
            pushdate = utils.get_date_from_timestamp(revinfo['pushdate'][0])
            versions = get_all_versions(product=product)[channel]
            for __major, v in versions.items():
                if v['dates'][0] <= pushdate and (v['dates'][1] is None or
                                                  pushdate <= v['dates'][1]):
                    major = __major
                    break
            # if major is 51 then it means that the regression found by jsbugmon is in 51
    return channel, major
示例#12
0
def pushlog_for_rev_url(revision, channel, product):
    """Get the pushlog url for the build containing revision"""
    data = Revision.get_revision(channel=channel, node=revision)
    pushdate = lmdutils.get_date_from_timestamp(data["pushdate"][0])
    return pushlog_for_pushdate_url(pushdate, channel, product)
示例#13
0
 def get_date(self):
     return lmdutils.get_date_from_timestamp(self.date)