def test_backout_multiple_changesets(self):
        self.assertEqual(parse_backouts(
            'Backed out changesets 4b6aa5c0a1bf and fdf38a41d92b '
            '(bug 1150549) for Mulet crashes.'),
            (['4b6aa5c0a1bf', 'fdf38a41d92b'], [1150549]))

        self.assertEqual(parse_backouts(
            'Back out changesets ed293fc9596c and f18cb4c41578 '
            '(bug 1174700) for fatal assertions in all Windows debug '
            'reftest runs.'),
            (['ed293fc9596c', 'f18cb4c41578'], [1174700]))
    def test_backout_multiple_changesets(self):
        self.assertEqual(
            parse_backouts(
                'Backed out changesets 4b6aa5c0a1bf and fdf38a41d92b '
                '(bug 1150549) for Mulet crashes.'),
            (['4b6aa5c0a1bf', 'fdf38a41d92b'], [1150549]))

        self.assertEqual(
            parse_backouts(
                'Back out changesets ed293fc9596c and f18cb4c41578 '
                '(bug 1174700) for fatal assertions in all Windows debug '
                'reftest runs.'),
            (['ed293fc9596c', 'f18cb4c41578'], [1174700]))
    def test_backout_partial(self):
        # bug without node
        self.assertIsNone(parse_backouts('Bug 1 - More stuff; r=romulus'))

        # node without bug
        self.assertEqual(
            parse_backouts(
                'Backout f484160e0a08 for causing slow heat death of the universe'
            ), (['f484160e0a08'], []))

        # backout not on first line
        self.assertIsNone(
            parse_backouts('Bug 123 - Blah blah; r=gps\n'
                           'Backout ffffffffffff'))
    def test_backout_single(self):
        self.assertEqual(
            parse_backouts('Backed out changeset 6435d5aab611 (bug 858680)'),
            (['6435d5aab611'], [858680]))
        self.assertEqual(parse_backouts(
            'Backed out changeset 2f9d54c153ed on CLOSED TREE (bug 1067325)'),
            (['2f9d54c153ed'], [1067325]))
        self.assertEqual(
            parse_backouts('Backout b8601df335c1 (Bug 1174857) for bustage'),
            (['b8601df335c1'], [1174857]))

        self.assertEqual(
            parse_backouts('Back out b8601df335c1 (Bug 1174857) for bustage'),
            (['b8601df335c1'], [1174857]))
    def test_backout_single(self):
        self.assertEqual(
            parse_backouts('Backed out changeset 6435d5aab611 (bug 858680)'),
            (['6435d5aab611'], [858680]))
        self.assertEqual(
            parse_backouts(
                'Backed out changeset 2f9d54c153ed on CLOSED TREE (bug 1067325)'
            ), (['2f9d54c153ed'], [1067325]))
        self.assertEqual(
            parse_backouts('Backout b8601df335c1 (Bug 1174857) for bustage'),
            (['b8601df335c1'], [1174857]))

        self.assertEqual(
            parse_backouts('Back out b8601df335c1 (Bug 1174857) for bustage'),
            (['b8601df335c1'], [1174857]))
    def test_backout_multiple_changesets(self):
        # 'and' separated
        self.assertEqual(parse_backouts(
            'Backed out changesets 4b6aa5c0a1bf and fdf38a41d92b (bug 1150549) for Mulet crashes.'),
            (['4b6aa5c0a1bf', 'fdf38a41d92b'], [1150549]))

        # more than two
        self.assertEqual(parse_backouts(
            'Backed out changesets a8abdd77a92c, dda84d1fb12b and 21fdf73bbb17 (bug 1302907) for Windows build bustage'),
            (['a8abdd77a92c', 'dda84d1fb12b', '21fdf73bbb17'], [1302907]))

        # oxford comma
        self.assertEqual(parse_backouts(
            'Backed out changesets a8abdd77a92c, dda84d1fb12b, and 21fdf73bbb17 (bug 1302907) for Windows build bustage'),
            (['a8abdd77a92c', 'dda84d1fb12b', '21fdf73bbb17'], [1302907]))
Example #7
0
def template_backsoutnodes(repo, ctx, **args):
    description = encoding.fromlocal(ctx.description())
    backouts = parse_backouts(description)
    # return just the nodes, not the bug numbers
    if backouts and backouts[0]:
        # TRACKING hg47
        if templateutil:
            return templateutil.hybridlist(backouts[0], 'backouts')
        return backouts[0]
 def backoutsgen(_context):
     '''Generator for backouts list'''
     backouts = commitparser.parse_backouts(description)
     if backouts:
         for node in backouts[0]:
             try:
                 bctx = scmutil.revsymbol(repo, node)
                 yield {b'node': bctx.hex()}
             except error.RepoLookupError:
                 pass
def _find_backout_commits(hg_repo, revset):
    commits = []
    for commit in hg_repo.log('%s - merge()' % revset,
                              files=['path:servo/'], removed=True):
        if commitparser.parse_backouts(commit.desc, strict=True):
            commits.append(LightWeightCommit(commit.node,
                                             commit.desc.decode('utf-8'),
                                             commit.author.decode('utf-8')))
    # Return oldest first, so they will be applied in the correct order.
    commits.reverse()
    return commits
    def test_backout_single(self):
        # 'backed out'
        self.assertEqual(parse_backouts(
            'Backed out changeset 6435d5aab611 (bug 858680)'),
            (['6435d5aab611'], [858680]))

        # 'backout of'
        self.assertEqual(parse_backouts(
            'backout of f9abb9c83452 (bug 1319111) for crashes, r=bz'),
            (['f9abb9c83452'], [1319111]))

        # 'backout revision'
        self.assertEqual(parse_backouts(
            'Backout revision 20a9d741cdf4 (bug 1354641) a=me'),
            (['20a9d741cdf4'], [1354641]))

        # 'backout'
        self.assertEqual(parse_backouts(
            'Backout b8601df335c1 (Bug 1174857) for bustage'),
            (['b8601df335c1'], [1174857]))
Example #11
0
    def commits_backed_out(self):
        commits = []
        bugs = []
        if self.is_backout:
            nodes_bugs = commitparser.parse_backouts(self.msg)
            if nodes_bugs is None:
                # We think this a backout, but have no idea what it backs out
                # it's not clear how to handle that case so for now we pretend it isn't
                # a backout
                return commits, set(bugs)

            nodes, bugs = nodes_bugs
            # Assuming that all commits are listed.
            for node in nodes:
                commits.append(node.decode("ascii"))

        return commits, set(bugs)
Example #12
0
    def commits_backed_out(self):
        commits = []
        bugs = []
        if self.is_backout:
            nodes_bugs = commitparser.parse_backouts(self.commit.message)
            if nodes_bugs is None:
                # We think this a backout, but have no idea what it backs out
                # it's not clear how to handle that case so for now we pretend it isn't
                # a backout
                return commits, bugs

            nodes, bugs = nodes_bugs
            # Assuming that all commits are listed.
            for node in nodes:
                git_sha = self.repo.cinnabar.hg2git(node)
                commits.append(GeckoCommit(self.repo, git_sha))

        return commits, set(bugs)
Example #13
0
    def commits_backed_out(self):
        # type: () -> Tuple[List[GeckoCommit], Set[int]]
        # TODO: should bugs be int here
        commits = []  # type: List[GeckoCommit]
        bugs = []  # type: List[int]
        if self.is_backout:
            nodes_bugs = commitparser.parse_backouts(self.msg)
            if nodes_bugs is None:
                # We think this a backout, but have no idea what it backs out
                # it's not clear how to handle that case so for now we pretend it isn't
                # a backout
                return commits, set(bugs)

            nodes, bugs = nodes_bugs
            # Assuming that all commits are listed.
            for node in nodes:
                git_sha = cinnabar(self.repo).hg2git(node.decode("ascii"))
                commits.append(GeckoCommit(self.repo, git_sha))

        return commits, set(bugs)
Example #14
0
def get_backoutbynode(ext_name, repo, ctx):
    """Look for changesets that back out this one."""
    # We limit the distance we search for backouts because an exhaustive
    # search could be very intensive. e.g. you load up the root commit
    # on a repository with 200,000 changesets and that commit is never
    # backed out. This finds most backouts because backouts typically happen
    # shortly after a bad commit is introduced.
    thisshort = short(ctx.node())
    count = 0
    searchlimit = repo.ui.configint(ext_name, b'backoutsearchlimit', 100)
    for bctx in repo.set(b'%ld::', [ctx.rev()]):
        count += 1
        if count >= searchlimit:
            break

        backouts = commitparser.parse_backouts(
            encoding.fromlocal(bctx.description()))
        if backouts and thisshort in backouts[0]:
            return bctx.hex()
    return None
def _find_backed_out_urls(hg_repo, github_url, commit_desc, commit_node):
    backed_out_urls = []
    revs, bugs = commitparser.parse_backouts(commit_desc, strict=True)

    for backed_out_rev in revs:
        backed_out_desc = hg_repo.log(backed_out_rev)[0].desc
        backed_out_desc = backed_out_desc.splitlines()[0]

        m = SERVO_MERGE_RE.match(backed_out_desc)
        if m:
            backed_out_url = '%s/pull/%s' % (github_url, m.group(1))
            backed_out_urls.append(backed_out_url)
            logger.info('%s backing out %s: %s'
                        % (commit_node[:12], backed_out_rev[:12],
                           backed_out_url))

        else:
            logger.warning('failed to find merge id in #%s: %s'
                           % (backed_out_rev, backed_out_desc))

    return backed_out_urls
    def test_backout_n_changesets(self):
        # all nodes returned
        self.assertEqual(
            parse_backouts(
                'Backed out 3 changesets (bug 1310885) for heap write hazard failures\n'
                'Backed out changeset 77352010d8e8 (bug 1310885)\n'
                'Backed out changeset 9245a2fbb974 (bug 1310885)\n'
                'Backed out changeset 7c2db290c4b6 (bug 1310885)'),
            (['77352010d8e8', '9245a2fbb974', '7c2db290c4b6'], [1310885]))

        # nodes must be provided on following lines in strict mode
        self.assertIsNone(
            parse_backouts(
                'Backed out 2 changesets (bug 1335751) for mochitest devtools failures',
                strict=True))

        # .. but is ok without strict mode
        self.assertEqual(
            parse_backouts(
                'Backed out 2 changesets (bug 1335751) for mochitest devtools failures',
                strict=False), ([], [1335751]))

        # .. default should be with strict disabled
        self.assertEqual(
            parse_backouts(
                'Backed out 2 changesets (bug 1335751) for mochitest devtools failures'
            ), ([], [1335751]))

        # the correct number of nodes must be provided in strict mode
        self.assertIsNone(
            parse_backouts(
                'Backed out 2 changesets (bug 1360992) for a 70% failure rate in test_fileReader.html on ASan e10s\n'
                'Backed out changeset ab9fdee3a6a4 (bug 1360992)',
                strict=True))

        # .. but is ok without strict mode
        self.assertEqual(
            parse_backouts(
                'Backed out 2 changesets (bug 1360992) for a 70% failure rate in test_fileReader.html on ASan e10s\n'
                'Backed out changeset ab9fdee3a6a4 (bug 1360992)'),
            (['ab9fdee3a6a4'], [1360992]))
 def test_backout_n_changesets(self):
     self.assertEqual(
         parse_backouts(
             'Backed out 6 changesets (bug 1164777, bug 1163207, bug 1156914, '
             'bug 1164778) for SM(cgc) caused by something in the push.'),
         ([], [1164777, 1163207, 1156914, 1164778]))
 def test_backout_n_changesets(self):
     self.assertEqual(parse_backouts(
         'Backed out 6 changesets (bug 1164777, bug 1163207, bug 1156914, '
         'bug 1164778) for SM(cgc) caused by something in the push.'),
         ([], [1164777, 1163207, 1156914, 1164778]))
Example #19
0
def addmetadata(repo, ctx, d, onlycheap=False):
    """Add changeset metadata for hgweb templates."""
    description = encoding.fromlocal(ctx.description())

    d['bugs'] = []
    for bug in commitparser.parse_bugs(description):
        d['bugs'].append({
            'no':
            str(bug),
            'url':
            'https://bugzilla.mozilla.org/show_bug.cgi?id=%s' % bug,
        })

    d['reviewers'] = []
    for reviewer in commitparser.parse_reviewers(description):
        d['reviewers'].append({
            'name': reviewer,
            'revset': 'reviewer(%s)' % reviewer,
        })

    d['backsoutnodes'] = []
    backouts = commitparser.parse_backouts(description)
    if backouts:
        for node in backouts[0]:
            try:
                bctx = repo[node]
                d['backsoutnodes'].append({'node': bctx.hex()})
            except error.RepoLookupError:
                pass

    # Repositories can define which TreeHerder repository they are associated
    # with.
    treeherder = repo.ui.config('mozilla', 'treeherder_repo')
    if treeherder:
        d['treeherderrepourl'] = 'https://treeherder.mozilla.org/#/jobs?repo=%s' % treeherder
        d['treeherderrepo'] = treeherder

        push = repo.pushlog.pushfromchangeset(ctx)
        # Don't print Perfherder link on non-publishing repos (like Try)
        # because the previous push likely has nothing to do with this
        # push.
        if push and push.nodes and repo.ui.configbool('phases', 'publish',
                                                      True):
            lastpushhead = repo[push.nodes[0]].hex()
            d['perfherderurl'] = (
                'https://treeherder.mozilla.org/perf.html#/compare?'
                'originalProject=%s&'
                'originalRevision=%s&'
                'newProject=%s&'
                'newRevision=%s') % (treeherder, push.nodes[-1], treeherder,
                                     lastpushhead)

    # If this changeset was converted from another one and we know which repo
    # it came from, add that metadata.
    convertrevision = ctx.extra().get('convert_revision')
    if convertrevision:
        sourcerepo = repo.ui.config('hgmo', 'convertsource')
        if sourcerepo:
            d['convertsourcepath'] = sourcerepo
            d['convertsourcenode'] = convertrevision

    if onlycheap:
        return

    # Obtain the Gecko/app version/milestone.
    #
    # We could probably only do this if the repo is a known app repo (by
    # looking at the initial changeset). But, path based lookup is relatively
    # fast, so just do it. However, we need this in the "onlycheap"
    # section because resolving manifests is relatively slow and resolving
    # several on changelist pages may add seconds to page load times.
    try:
        fctx = repo.filectx('config/milestone.txt', changeid=ctx.node())
        lines = fctx.data().splitlines()
        lines = [l for l in lines if not l.startswith('#') and l.strip()]

        if lines:
            d['milestone'] = lines[0].strip()
    except error.LookupError:
        pass

    # Look for changesets that back out this one.
    #
    # We limit the distance we search for backouts because an exhaustive
    # search could be very intensive. e.g. you load up the root commit
    # on a repository with 200,000 changesets and that commit is never
    # backed out. This finds most backouts because backouts typically happen
    # shortly after a bad commit is introduced.
    thisshort = short(ctx.node())
    count = 0
    searchlimit = repo.ui.configint('hgmo', 'backoutsearchlimit', 100)
    for bctx in repo.set('%ld::', [ctx.rev()]):
        count += 1
        if count >= searchlimit:
            break

        backouts = commitparser.parse_backouts(
            encoding.fromlocal(bctx.description()))
        if backouts and thisshort in backouts[0]:
            d['backedoutbynode'] = bctx.hex()
            break
 def test_backout_missing(self):
     self.assertIsNone(parse_backouts('Bug 1 - More stuff; r=romulus'))
def is_good_message(ui, c):
    def message(fmt):
        formatted_fmt = fmt % {b'rev': c.hex()[:12]}
        ui.write(
            b'\n\n'
            b'************************** ERROR ****************************\n'
            b'%s\n%s\n%s\n'
            b'*************************************************************\n'
            b'\n\n' % (formatted_fmt, c.user(), c.description()))

    desc = c.description()
    firstline = desc.splitlines()[0]

    # Ensure backout commit descriptions are well formed.
    if commitparser.is_backout(desc):
        try:
            if not commitparser.parse_backouts(desc, strict=True):
                raise ValueError('Rev %(rev)s has malformed backout message.')
            nodes, bugs = commitparser.parse_backouts(desc, strict=True)
            if not nodes:
                raise ValueError(
                    'Rev %(rev)s is missing backed out revisions.')
        except ValueError as e:
            # Reject invalid backout messages on vendored paths, warn otherwise.
            if is_vendor_ctx(c):
                message(pycompat.bytestr(e))
                return False
            ui.write(b'Warning: %s\n' % (pycompat.bytestr(e) % {
                b'rev': c.hex()[:12]
            }))

    # Vendored merges must reference source revisions.
    if b'Source-Revision: ' in desc and is_vendor_ctx(c):
        ui.write(b'(%s looks like a vendoring change; ignoring commit message '
                 b'hook)\n' % short(c.node()))
        return True

    if c.user() in [b"ffxbld", b"seabld", b"tbirdbld", b"cltbld"]:
        return True

    if trySyntax.search(desc):
        message(b"Rev %(rev)s uses try syntax. (Did you mean to push to Try "
                b"instead?)")
        return False

    # Match against [PATCH] and [PATCH n/m]
    if b"[PATCH" in desc:
        message(b'Rev %(rev)s contains git-format-patch "[PATCH]" cruft. Use '
                b'git-format-patch -k to avoid this.')
        return False

    if INVALID_REVIEW_FLAG_RE.search(firstline):
        message(b"Rev %(rev)s contains 'r?' in the commit message. Please use "
                b"'r=' instead.")
        return False

    for r in goodMessage:
        if r.search(firstline):
            return True

    desc_lower = desc.lower()
    if desc_lower.startswith((b'merge', b'merging', b'automated merge')):
        if len(c.parents()) == 2:
            return True
        else:
            message(b"Rev %(rev)s claims to be a merge, but it has only one "
                    b"parent.")
            return False

    if desc_lower.startswith((b'back', b'revert')):
        # Purposely ambiguous: it's ok to say "backed out rev N" or
        # "reverted to rev N-1"
        message(b"Backout rev %(rev)s needs a bug number or a rev id.")
    else:
        message(
            b'Rev %(rev)s needs "Bug N" or "No bug" in the commit message.')

    return False
def addmetadata(repo, ctx, d, onlycheap=False):
    """Add changeset metadata for hgweb templates."""
    bugs = list(set(commitparser.parse_bugs(ctx.description())))
    d['bugs'] = []
    for bug in commitparser.parse_bugs(ctx.description()):
        d['bugs'].append({
            'no': str(bug),
            'url': 'https://bugzilla.mozilla.org/show_bug.cgi?id=%s' % bug,
        })

    d['reviewers'] = []
    for reviewer in commitparser.parse_reviewers(ctx.description()):
        d['reviewers'].append({
            'name': reviewer,
            'revset': 'reviewer(%s)' % reviewer,
        })

    d['backsoutnodes'] = []
    backouts = commitparser.parse_backouts(ctx.description())
    if backouts:
        for node in backouts[0]:
            try:
                bctx = repo[node]
                d['backsoutnodes'].append({'node': bctx.hex()})
            except error.LookupError:
                pass

    # Repositories can define which TreeHerder repository they are associated
    # with.
    treeherder = repo.ui.config('mozilla', 'treeherder_repo')
    if treeherder:
        d['treeherderrepourl'] = 'https://treeherder.mozilla.org/#/jobs?repo=%s' % treeherder

    if onlycheap:
        return

    # Obtain the Gecko/app version/milestone.
    #
    # We could probably only do this if the repo is a known app repo (by
    # looking at the initial changeset). But, path based lookup is relatively
    # fast, so just do it. However, we need this in the "onlycheap"
    # section because resolving manifests is relatively slow and resolving
    # several on changelist pages may add seconds to page load times.
    try:
        fctx = repo.filectx('config/milestone.txt', changeid=ctx.node())
        lines = fctx.data().splitlines()
        lines = [l for l in lines if not l.startswith('#') and l.strip()]

        if lines:
            d['milestone'] = lines[0].strip()
    except error.LookupError:
        pass

    # Look for changesets that back out this one.
    #
    # We limit the distance we search for backouts because an exhaustive
    # search could be very intensive. e.g. you load up the root commit
    # on a repository with 200,000 changesets and that commit is never
    # backed out. This finds most backouts because backouts typically happen
    # shortly after a bad commit is introduced.
    thisshort = short(ctx.node())
    count = 0
    searchlimit = repo.ui.configint('hgmo', 'backoutsearchlimit', 100)
    for bctx in repo.set('%ld::', [ctx.rev()]):
        count += 1
        if count >= searchlimit:
            break

        backouts = commitparser.parse_backouts(bctx.description())
        if backouts and thisshort in backouts[0]:
            d['backedoutbynode'] = bctx.hex()
            break
 def test_backout_missing(self):
     self.assertIsNone(parse_backouts('Bug 1 - More stuff; r=romulus'))