def test_push_nothing_found(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value={})) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.push('invalid_changeset')
def test_push_404_error(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(status_code=404) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.push('invalid_changeset')
def test_push_with_date_raise_appropriate_error(): jpushes = JsonPushes(branch='inbound') jpushes.pushes_within_changes = Mock(side_effect=EmptyPushlogError) with pytest.raises(EmptyPushlogError) as ctx: jpushes.push(date(2015, 1, 1)) assert str(ctx.value) == \ 'No pushes available for the date 2015-01-01 on inbound.'
def test_push_404_error(mocker): retry_get = mocker.patch("mozregression.json_pushes.retry_get") response = Mock(status_code=404, json=Mock(return_value={"error": "unknown revision"})) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.push("invalid_changeset")
def check_for_move(self, repo, changeset): """ Checks if the warning has moved lines but still exists. """ if self.ignore_lines: return False files = retrieve_test_logs(repo, changeset[:12], self.platform, warning_re=self.warning_re) combined_warnings = Counter() for log in files: if log: combined_warnings.update(log.warnings) possible_move_found = False normalized = re.match(r'^(.*), line [0-9]+$', self.warning).group(1) for (k, v) in combined_warnings.items(): if k.startswith(normalized) and v > self.warning_limit: print("Possible line move:\n %d - %s" % (v, k)) possible_move_found = True if possible_move_found: jp = JsonPushes(repo) push = jp.push(changeset) print("Try this date: %s" % push.utc_date) return possible_move_found
def check_for_move(self, repo, changeset): """ Checks if the warning has moved lines but still exists. """ if self.ignore_lines: return False files = retrieve_test_logs( repo, changeset[:12], self.platform, warning_re=self.warning_re) combined_warnings = Counter() for log in files: if log: combined_warnings.update(log.warnings) possible_move_found = False normalized = re.match(r'^(.*), line [0-9]+$', self.warning).group(1) for (k, v) in combined_warnings.iteritems(): if k.startswith(normalized) and v > self.warning_limit: print "Possible line move:\n %d - %s" % (v, k) possible_move_found = True if possible_move_found: jp = JsonPushes(repo) push = jp.push(changeset) print "Try this date: %s" % push.utc_date return possible_move_found
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg) if not (branch and len(push.changesets) >= 2): return try: # so, this is a merge. We can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push.changesets[0]['node'] # exclude the merge commit youngest = push.changesets[-2]['node'] LOG.debug("This is a merge from %s" % branch) # we can't use directly the youngest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first, grab it. This needs to be done on the right branch. jp2 = JsonPushes(branch) raw = [int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest)] data = jp2.pushes( startID=str(min(raw) - 2), endID=str(max(raw)), ) oldest = data[0].changesets[0] youngest = data[-1].changesets[-1] # we are ready to bisect further LOG.info("************* Switching to %s" % branch) gr, br = self._reverse_if_find_fix(oldest, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg ) ) LOG.debug('End merge handling') return result
def find_bugids_in_push(branch, changeset): jp = JsonPushes(branch) push = jp.push(changeset, full='1') branches = set() for chset in push.changesets: res = RE_BUG_ID.search(chset['desc']) if res: branches.add(res.group(1)) return [b for b in branches]
def _choose_integration_branch(self, changeset): """ Tries to determine which integration branch the given changeset originated from by checking the date the changeset first showed up in each repo. The repo with the earliest date is chosen. """ landings = {} for k in ("autoland", "mozilla-inbound"): jp = JsonPushes(k) try: push = jp.push(changeset, full='1') landings[k] = push.timestamp except EmptyPushlogError: LOG.debug("Didn't find %s in %s" % (changeset, k)) repo = min(landings, key=landings.get) return repo
def test_push(mocker): pushlog = {"1": {"changesets": ["a", "b", "c"], "date": 123456}} retry_get = mocker.patch("mozregression.json_pushes.retry_get") response = Mock(json=Mock(return_value=pushlog)) retry_get.return_value = response jpushes = JsonPushes() push = jpushes.push("validchangeset") assert isinstance(push, Push) assert push.push_id == "1" assert push.changeset == "c" assert push.changesets[0] == "a" assert push.timestamp == 123456 assert push.utc_date == datetime(1970, 1, 2, 10, 17, 36) assert str(push) == "c" retry_get.assert_called_once_with( "https://hg.mozilla.org/mozilla-central/json-pushes" "?changeset=validchangeset")
def test_push(mocker): pushlog = {'1': { 'changesets': ['a', 'b', 'c'], 'date': 123456, }} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value=pushlog)) retry_get.return_value = response jpushes = JsonPushes() push = jpushes.push('validchangeset') assert isinstance(push, Push) assert push.push_id == '1' assert push.changeset == 'c' assert push.changesets[0] == 'a' assert push.timestamp == 123456 assert push.utc_date == datetime(1970, 1, 2, 10, 17, 36) assert str(push) == 'c' retry_get.assert_called_once_with( 'https://hg.mozilla.org/integration/mozilla-inbound/json-pushes' '?changeset=validchangeset' )
def test_push(mocker): pushlog = {'1': { 'changesets': ['a', 'b', 'c'], 'date': 123456, }} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value=pushlog)) retry_get.return_value = response jpushes = JsonPushes() push = jpushes.push('validchangeset') assert isinstance(push, Push) assert push.push_id == '1' assert push.changeset == 'c' assert push.changesets[0] == 'a' assert push.timestamp == 123456 assert push.utc_date == datetime(1970, 1, 2, 10, 17, 36) assert str(push) == 'c' retry_get.assert_called_once_with( 'https://hg.mozilla.org/mozilla-central/json-pushes' '?changeset=validchangeset' )
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name) if not (branch and len(push.changesets) >= 2): # We did not find a branch, lets check the integration branches if we are bisecting m-c LOG.debug( "Did not find a branch, checking all integration branches") if get_name(most_recent_push.repo_name) == 'mozilla-central' and \ len(push.changesets) >= 2: branch = self._choose_integration_branch( most_recent_push.changeset) jp2 = JsonPushes(branch) try: data = jp2.pushes_within_changes( push.changesets[0]['node'], push.changesets[-1]['node']) except MozRegressionError, exc: LOG.error( "Failed to find changes in branch '%s' (error: %s)" % (branch, exc)) raise LOG.info("************* Switching to %s by" " process of elimination (no branch detected in" " commit message)" % branch) gr, br = self._reverse_if_find_fix(data[0].changeset, data[-1].changeset) return (branch, gr, br) else: return
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.client.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def find_build_info(self, push): """ Find build info for an inbound build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`InboundBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset tk_route = self.fetch_config.tk_inbound_route(push) LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: # HACK because of # https://bugzilla.mozilla.org/show_bug.cgi?id=1199618 # and https://bugzilla.mozilla.org/show_bug.cgi?id=1211251 # TODO remove the if statement once these tasks no longer exists # (just raise BuildInfoNotFound) err = True if self.fetch_config.app_name in ('firefox', 'fennec', 'fennec-2.3') \ and push.timestamp < TIMESTAMP_GECKO_V2: err = False try: old_route = tk_route.replace(changeset, changeset[:12]) task_id = self.index.findTask(old_route)['taskId'] except TaskclusterFailure: err = True if err: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % tk_route) # find a completed run for that task run_id, build_date = None, None status = self.queue.status(task_id)['status'] for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound( "Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth('getArtifact', task_id, run_id, a['name']) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.client.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def find_build_info(self, push): """ Find build info for an inbound build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`InboundBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset tk_route = self.fetch_config.tk_inbound_route(push) LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: # HACK because of # https://bugzilla.mozilla.org/show_bug.cgi?id=1199618 # and https://bugzilla.mozilla.org/show_bug.cgi?id=1211251 # TODO remove the if statement once these tasks no longer exists # (just raise BuildInfoNotFound) err = True if self.fetch_config.app_name in ('firefox', 'fennec', 'fennec-2.3') \ and push.timestamp < TIMESTAMP_GECKO_V2: err = False try: old_route = tk_route.replace(changeset, changeset[:12]) task_id = self.index.findTask(old_route)['taskId'] except TaskclusterFailure: err = True if err: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % tk_route) # find a completed run for that task run_id, build_date = None, None status = self.queue.status(task_id)['status'] for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound("Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth( 'getArtifact', task_id, run_id, a['name'] ) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name) if not (branch and len(push.changesets) >= 2): # We did not find a branch, lets check the integration branches if we are bisecting m-c LOG.debug("Did not find a branch, checking all integration branches") if get_name(most_recent_push.repo_name) == 'mozilla-central' and \ len(push.changesets) >= 2: branch = self._choose_integration_branch(most_recent_push.changeset) oldest = push.changesets[0]['node'] youngest = push.changesets[-1]['node'] LOG.info("************* Switching to %s by" " process of elimination (no branch detected in" " commit message)" % branch) else: return else: # so, this is a merge. see how many changesets are in it, if it # is just one, we have our answer if len(push.changesets) == 2: LOG.info("Merge commit has only two revisions (one of which " "is the merge): we are done") return # Otherwise, we can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push.changesets[0]['node'] # exclude the merge commit youngest = push.changesets[-2]['node'] LOG.info("************* Switching to %s" % branch) # we can't use directly the oldest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first grab the previous push to get the last known good # changeset. This needs to be done on the right branch. try: jp2 = JsonPushes(branch) raw = [int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest)] data = jp2.pushes( startID=str(min(raw) - 2), endID=str(max(raw)), ) older = data[0].changeset youngest = data[-1].changeset # we are ready to bisect further gr, br = self._reverse_if_find_fix(older, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg ) ) LOG.debug('End merge handling') return result
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def find_build_info(self, push): """ Find build info for an inbound build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`InboundBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset tk_routes = self.fetch_config.tk_inbound_routes(push) try: task_id = None stored_failure = None for tk_route in tk_routes: LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure as ex: LOG.debug('nothing found via route %r' % tk_route) stored_failure = ex continue if task_id: status = self.queue.status(task_id)['status'] break if not task_id: raise stored_failure except TaskclusterFailure: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % self.fetch_config.tk_inbound_route(push)) # find a completed run for that task run_id, build_date = None, None for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound("Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth('getArtifact', task_id, run_id, a['name']) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def find_build_info(self, push): """ Find build info for an inbound build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`InboundBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError as exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset tk_routes = self.fetch_config.tk_inbound_routes(push) try: task_id = None stored_failure = None for tk_route in tk_routes: LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure as ex: LOG.debug('nothing found via route %r' % tk_route) stored_failure = ex continue if task_id: status = self.queue.status(task_id)['status'] break if not task_id: raise stored_failure except TaskclusterFailure: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % self.fetch_config.tk_inbound_route(push)) # find a completed run for that task run_id, build_date = None, None for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound( "Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth('getArtifact', task_id, run_id, a['name']) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
class IntegrationInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) self.jpushes = JsonPushes(branch=fetch_config.integration_branch) def find_build_info(self, push): """ Find build info for an integration build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`IntegrationBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError as exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset try: # taskcluster builds have two possible root urls: we switched # from taskcluster.net -> firefox-ci-tc.services.mozilla.com # around November 9. to make things faster, we'll iterate through # them based on the one that most likely applies to this push possible_tc_root_urls = [TC_ROOT_URL, OLD_TC_ROOT_URL] if push.utc_date < TC_ROOT_URL_MIGRATION_FLAG_DATE: possible_tc_root_urls.reverse() task_id = None status = None for tc_root_url in possible_tc_root_urls: LOG.debug("using taskcluster root url %s" % tc_root_url) options = self.fetch_config.tk_options(tc_root_url) tc_index = taskcluster.Index(options) tc_queue = taskcluster.Queue(options) tk_routes = self.fetch_config.tk_routes(push) stored_failure = None for tk_route in tk_routes: LOG.debug("using taskcluster route %r" % tk_route) try: task_id = tc_index.findTask(tk_route)["taskId"] except TaskclusterFailure as ex: LOG.debug("nothing found via route %r" % tk_route) stored_failure = ex continue if task_id: status = tc_queue.status(task_id)["status"] break if status: break if not task_id: raise stored_failure except TaskclusterFailure: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % self.fetch_config.tk_route(push)) # find a completed run for that task run_id, build_date = None, None for run in reversed(status["runs"]): if run["state"] == "completed": run_id = run["runId"] build_date = datetime.strptime(run["resolved"], "%Y-%m-%dT%H:%M:%S.%fZ") break if run_id is None: raise BuildInfoNotFound( "Unable to find completed runs for task %s" % task_id) artifacts = tc_queue.listArtifacts(task_id, run_id)["artifacts"] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a["name"]) if self.build_regex.search(name): meth = tc_queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = tc_queue.buildSignedUrl build_url = meth("getArtifact", task_id, run_id, a["name"]) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return IntegrationBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name) if not (branch and len(push.changesets) >= 2): # We did not find a branch, lets check the integration branches if we are bisecting m-c LOG.debug( "Did not find a branch, checking all integration branches") if get_name(most_recent_push.repo_name) == 'mozilla-central' and \ len(push.changesets) >= 2: branch = self._choose_integration_branch( most_recent_push.changeset) oldest = push.changesets[0]['node'] youngest = push.changesets[-1]['node'] LOG.info("************* Switching to %s by" " process of elimination (no branch detected in" " commit message)" % branch) else: return else: # so, this is a merge. see how many changesets are in it, if it # is just one, we have our answer if len(push.changesets) == 2: LOG.info("Merge commit has only two revisions (one of which " "is the merge): we are done") return # Otherwise, we can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push.changesets[0]['node'] # exclude the merge commit youngest = push.changesets[-2]['node'] LOG.info("************* Switching to %s" % branch) # we can't use directly the oldest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first grab the previous push to get the last known good # changeset. This needs to be done on the right branch. try: jp2 = JsonPushes(branch) raw = [ int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest) ] data = jp2.pushes( startID=str(min(raw) - 2), endID=str(max(raw)), ) older = data[0].changeset youngest = data[-1].changeset # we are ready to bisect further gr, br = self._reverse_if_find_fix(older, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg)) LOG.debug('End merge handling') return result