def test_hgmo_cache(): # HgRev.create() uses a cache. h1 = HgRev.create("abcdef", "autoland") h2 = HgRev.create("abcdef", "autoland") assert h1 == h2 # Instantiating directly ignores the cache. h1 = HgRev("abcdef", "autoland") h2 = HgRev("abcdef", "autoland") assert h1 != h2
def create_push(self, push_id): result = HgRev.load_json_push(self.branch, push_id) push = Push(result["changesets"][::-1], branch=self.branch) # avoids the need to query hgmo to find this info push._id = push_id push._date = result["date"] return push
def run_push_revisions(self, from_date, to_date, branch): result = HgRev.load_json_pushes_between_dates(branch, from_date, to_date) return [{ "pushid": int(push_id), "date": push_data["date"], "revs": push_data["changesets"][::-1], } for push_id, push_data in result.items()]
def _iterate_parents(self, max_depth=None): other = self for i in itertools.count(): yield other # Optimization to load parent json-pushes data in a single query (up # to max_depth at a time). prev_id = other.id - 1 if prev_id not in HgRev.JSON_PUSHES_CACHE: depth = max_depth or MAX_DEPTH HgRev.load_json_pushes_between_ids( self.branch, max(prev_id - 1 - depth + i, 0), prev_id) try: other = other.parent except ParentPushNotFound: break if max_depth is not None and i == max_depth: break
def _iterate_children(self, max_depth=None): other = self for i in itertools.count(): yield other # Optimization to load child json-pushes data in a single query (up # to MAX_DEPTH at a time). next_id = other.id + 1 if next_id not in HgRev.JSON_PUSHES_CACHE: depth = max_depth or MAX_DEPTH HgRev.load_json_pushes_between_ids(self.branch, other.id, next_id + depth - i) try: other = other.child except ChildPushNotFound: break if max_depth is not None and i == max_depth: break
def test_hgmo_backedoutby(responses): responses.add( responses.GET, "https://hg.mozilla.org/integration/autoland/json-automationrelevance/abcdef", json={ "changesets": [ { "node": "abcdef", "backsoutnodes": [{"node": "123456"}], "pushhead": "abcdef", } ] }, status=200, ) responses.add( responses.GET, "https://hg.mozilla.org/integration/autoland/json-automationrelevance/123456", json={ "changesets": [ { "node": "123456", "backedoutby": "abcdef", "backsoutnodes": [], "pushhead": "123456", }, ] }, status=200, ) h = HgRev("abcdef") assert h.backedoutby is None h = HgRev("123456") assert h.backedoutby == "abcdef"
def __init__(self, revs, branch="autoland"): if isinstance(revs, str): self._revs = None revs = [revs] else: self._revs = revs self.branch = branch self._hgmo = HgRev.create(revs[0], branch=self.branch) self._id = None self._date = None # Need to use full hash in the index. if len(revs[0]) == 40: self.rev = revs[0] else: self.rev = self._hgmo.node self.index = BASE_INDEX.format(branch=self.branch, rev=self.rev) # Unique identifier for a Push across branches self.push_uuid = "{}/{}".format(self.branch, self.rev)
def _is_classified_as_cause(self, first_appareance_push, classifications): """Checks a 'fixed by commit' classification to figure out what push it references. Returns: bool or None: True, if the classification references this push. False, if the classification references another push. None, if it is not clear what the classification references. """ fixed_by_commit_classification_notes = set(n[:12] for c, n in classifications if c == "fixed by commit" if n is not None) if len(fixed_by_commit_classification_notes) == 0: return None # If the failure was classified as fixed by commit, and the fixing commit # is a backout of the current push, it is definitely a regression of the # current push. # If the failure was classified as fixed by commit, and the fixing commit # is a backout of another push, it is definitely not a regression of the # current push. # Unless some condition holds which makes us doubt about the correctness of the # classification. # - the backout commit also backs out one of the commits of this push; # - the other push backed-out by the commit which is mentioned in the classification # is landed after the push where the failure occurs (so, it can't have caused it); # - the backout push also contains a commit backing out one of the commits of this push. for classification_note in fixed_by_commit_classification_notes: try: fix_hgmo = HgRev.create(classification_note, branch=self.branch) if len(fix_hgmo.backouts) == 0: continue except PushNotFound: logger.warning( f"Classification note ({classification_note}) references a revision which does not exist on push {first_appareance_push.rev}" ) return None self_fix = None other_fixes = set() # If the backout commit also backs out one of the commits of this push, then # we can consider it as a regression of this push. # NOTE: this should never happen in practice because of current development # practices. for backout, backedouts in fix_hgmo.backouts.items(): if backout[:12] != classification_note[:12]: continue if any(rev[:12] in {backedout[:12] for backedout in backedouts} for rev in self.revs): self_fix = backout[:12] break # Otherwise, if the backout push also contains the backout commit of this push, # we can consider it as a regression of this push. if self.backedout: for backout in fix_hgmo.backouts: if backout[:12] == self.backedoutby[:12]: self_fix = backout[:12] break # If one of the commits in the backout push is a bustage fix, then we could # consider it as a regression of this push. if self_fix is None: for bug in self.bugs: if bug in fix_hgmo.bugs_without_backouts: self_fix = fix_hgmo.bugs_without_backouts[bug][:12] break # Otherwise, as long as the commit which was backed-out was landed **before** # the appearance of this failure, we can be sure it was its cause and so # the current push is not at fault. # TODO: We should actually check if the failure was already happening in the parents # and compare the backout push ID with the the ID of first parent where it failed. for backout, backedouts in fix_hgmo.backouts.items(): if self.backedout and backout[:12] == self.backedoutby[:12]: continue if any( HgRev.create(backedout, branch=self.branch).pushid <= first_appareance_push.id for backedout in backedouts): other_fixes.add(backout[:12]) # If the backout push contains a bustage fix of another push, then we could # consider it as a regression of another push. if len(fix_hgmo.bugs_without_backouts) > 0: other_parent = first_appareance_push for other_parent in other_parent._iterate_parents(MAX_DEPTH): if other_parent != self: for bug in other_parent.bugs: if bug in fix_hgmo.bugs_without_backouts: other_fixes.add( fix_hgmo.bugs_without_backouts[bug][:12]) if self_fix and other_fixes: # If the classification points to a commit in the middle of the backout push and not the backout push head, # we can consider the classification to be correct. if (self_fix != fix_hgmo.pushhead[:12] and classification_note[:12] == self_fix and classification_note[:12] not in other_fixes): return True elif any(other_fix != fix_hgmo.pushhead[:12] and classification_note[:12] == other_fix and classification_note[:12] != self_fix for other_fix in other_fixes): return False return None if self_fix: return True if other_fixes: return False return None
def parent(self): """Returns the parent push of this push. Returns: Push: A `Push` instance representing the parent push. Raises: :class:`~mozci.errors.ParentPushNotFound`: When no suitable parent push can be detected. """ # Mozilla-unified and try allow multiple heads, so we can't rely on # `self.id - 1` to be the parent. if self.branch not in ("mozilla-unified", "try"): return self.create_push(self.id - 1) changesets = [ c for c in self._hgmo.changesets if c.get("phase") == "draft" ] if not changesets: # Supports mozilla-unified as well as older automationrelevance # files that don't contain the phase. changesets = self._hgmo.changesets parents = changesets[0]["parents"] if len(parents) > 1: raise ParentPushNotFound("merge commits are unsupported", rev=self.rev, branch=self.branch) # Search for this revision in the following repositories. We search # autoland last as it would run the fewest tasks, so a push from one of # the other repositories would be preferable. branches = ("mozilla-central", "mozilla-beta", "mozilla-release", "autoland") found_on = [] parent_rev = parents[0] for branch in branches: try: hgmo = HgRev.create(parent_rev, branch=branch) head = hgmo.pushhead except PushNotFound: continue found_on.append(branch) # Revision exists in repo but is not the 'pushhead', so keep searching. if head != parent_rev: continue return Push(parent_rev, branch=branch) if found_on: branches = found_on reason = "was not a push head" else: # This should be extremely rare (if not impossible). reason = "was not found" branches = [ b[len("mozilla-"):] if b.startswith("mozilla-") else b for b in branches ] msg = f"parent revision '{parent_rev[:12]}' {reason} on any of {', '.join(branches)}" raise ParentPushNotFound(msg, rev=self.rev, branch=self.branch)
def test_hgmo_backouts(responses): responses.add( responses.GET, "https://hg.mozilla.org/integration/autoland/json-automationrelevance/abcdef", json={"changesets": [{"node": "789", "backsoutnodes": [], "pushhead": "789"}]}, status=200, ) responses.add( responses.GET, "https://hg.mozilla.org/integration/autoland/json-automationrelevance/abcdef", json={ "changesets": [ { "node": "789", "backsoutnodes": [{"node": "123456"}], "pushhead": "789", } ] }, status=200, ) responses.add( responses.GET, "https://hg.mozilla.org/integration/autoland/json-automationrelevance/abcdef", json={ "changesets": [ { "node": "789", "backsoutnodes": [{"node": "123456"}], "pushhead": "789", }, {"node": "jkl", "backsoutnodes": [{"node": "asd"}, {"node": "fgh"}]}, ] }, status=200, ) responses.add( responses.GET, "https://hg.mozilla.org/integration/autoland/json-automationrelevance/abcdef", json={ "changesets": [ { "node": "789", "backsoutnodes": [{"node": "123456"}], "pushhead": "ghi", }, ] }, status=200, ) responses.add( responses.GET, "https://hg.mozilla.org/integration/autoland/json-automationrelevance/ghi", json={ "changesets": [ {"node": "ghi", "backsoutnodes": [{"node": "789"}], "pushhead": "ghi"}, { "node": "789", "backsoutnodes": [{"node": "123456"}], "pushhead": "ghi", }, ] }, status=200, ) h = HgRev("abcdef") assert h.backouts == {} assert h.changesets[0]["backsoutnodes"] == [] h = HgRev("abcdef") assert h.backouts == {"789": ["123456"]} assert h.changesets[0]["backsoutnodes"] == [{"node": "123456"}] h = HgRev("abcdef") assert h.backouts == {"789": ["123456"], "jkl": ["asd", "fgh"]} assert h.changesets[0]["backsoutnodes"] == [{"node": "123456"}] assert h.changesets[1]["backsoutnodes"] == [{"node": "asd"}, {"node": "fgh"}] h = HgRev("abcdef") assert h.backouts == {"789": ["123456"], "ghi": ["789"]} assert h.changesets[0]["backsoutnodes"] == [{"node": "123456"}]