def test_pushlog_for_change_nothing_found(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value={})) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.pushlog_for_change('invalid_changeset')
def test_pushlog_for_change_404_error(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(status_code=404) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.pushlog_for_change('invalid_changeset')
def test_pushlog_for_change_404_error(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(status_code=404) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.pushlog_for_change('invalid_changeset')
def test_pushlog_for_change_nothing_found(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value={})) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.pushlog_for_change('invalid_changeset')
def test_pushlog_for_change(mocker): pushlog = {'a': 'b'} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value={'1': pushlog})) retry_get.return_value = response jpushes = JsonPushes() assert jpushes.pushlog_for_change('validchangeset') == pushlog
def test_pushlog_for_change(mocker): pushlog = {"a": "b"} retry_get = mocker.patch("mozregression.json_pushes.retry_get") response = Mock(json=Mock(return_value={"1": pushlog})) retry_get.return_value = response jpushes = JsonPushes() assert jpushes.pushlog_for_change("validchangeset") == pushlog
def test_pushlog_for_change(mocker): pushlog = {'a': 'b'} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value={'1': pushlog})) retry_get.return_value = response jpushes = JsonPushes() assert jpushes.pushlog_for_change('validchangeset') == pushlog
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None self._logger.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.pushlog_for_change(most_recent_push.changeset, full='1') msg = push['changesets'][-1]['desc'] self._logger.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg) if not (branch and len(push['changesets']) >= 2): return try: # so, this is a merge. We can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push['changesets'][0]['node'] # exclude the merge commit youngest = push['changesets'][-2]['node'] self._logger.debug("This is a merge from %s" % branch) # we can't use directly the youngest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first, grab it. This needs to be done on the right branch. jp2 = JsonPushes(branch) raw = [int(i) for i in jp2.pushlog_within_changes(oldest, youngest, raw=True)] data = jp2._request(jp2.json_pushes_url( startID=str(min(raw) - 2), endID=str(max(raw)), )) datakeys = [int(i) for i in data] oldest = data[str(min(datakeys))]["changesets"][0] youngest = data[str(max(datakeys))]["changesets"][-1] # we are ready to bisect further self._logger.info("************* Switching to %s" % branch) gr, br = self._reverse_if_find_fix(oldest, youngest) result = (branch, gr, br) except MozRegressionError: self._logger.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg ) ) self._logger.debug('End merge handling') return result
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.pushlog_for_change(most_recent_push.changeset, full='1') msg = push['changesets'][-1]['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg) if not (branch and len(push['changesets']) >= 2): return try: # so, this is a merge. We can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push['changesets'][0]['node'] # exclude the merge commit youngest = push['changesets'][-2]['node'] LOG.debug("This is a merge from %s" % branch) # we can't use directly the youngest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first, grab it. This needs to be done on the right branch. jp2 = JsonPushes(branch) raw = [ int(i) for i in jp2.pushlog_within_changes(oldest, youngest, raw=True) ] data = jp2._request( jp2.json_pushes_url( startID=str(min(raw) - 2), endID=str(max(raw)), )) datakeys = [int(i) for i in data] oldest = data[str(min(datakeys))]["changesets"][0] youngest = data[str(max(datakeys))]["changesets"][-1] # we are ready to bisect further LOG.info("************* Switching to %s" % branch) gr, br = self._reverse_if_find_fix(oldest, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg)) LOG.debug('End merge handling') return result
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) self.index = taskcluster.client.Index() self.queue = taskcluster.Queue() self.jpushes = JsonPushes(branch=fetch_config.inbound_branch, path=fetch_config.branch_path) def _check_changeset(self, changeset): # return the full changeset return self.jpushes.pushlog_for_change(changeset)['changesets'][-1] def find_build_info(self, changeset, fetch_txt_info=True, check_changeset=False): """ Find build info for an inbound build, given a changeset. if `check_changeset` is True, the given changeset might be partial (< 40 chars) because it will be verified and updated using json pushes. Return a :class:`InboundBuildInfo` instance. """ # find a task id if check_changeset: try: changeset = self._check_changeset(changeset) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) tk_route = self.fetch_config.tk_inbound_route(changeset) self._logger.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: # HACK because of # https://bugzilla.mozilla.org/show_bug.cgi?id=1199618 # and https://bugzilla.mozilla.org/show_bug.cgi?id=1211251 # TODO remove the if statement once these tasks no longer exists # (just raise BuildInfoNotFound) err = True if self.fetch_config.app_name in ('firefox', 'fennec', 'fennec-2.3'): err = False try: tk_route = tk_route.replace(changeset, changeset[:12]) task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: err = True if err: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % tk_route) # find a completed run for that task run_id, build_date = None, None status = self.queue.status(task_id)['status'] for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound("Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): build_url = self.queue.buildUrl( 'getLatestArtifact', task_id, a['name'] ) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url(), task_id=task_id, )
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.client.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def _check_changeset(self, changeset): # return the full changeset return self.jpushes.pushlog_for_change(changeset)['changesets'][-1] def find_build_info(self, changeset, fetch_txt_info=True, check_changeset=False): """ Find build info for an inbound build, given a changeset or a date. if `check_changeset` is True, the given changeset might be partial (< 40 chars) because it will be verified and updated using json pushes. Return a :class:`InboundBuildInfo` instance. """ if is_date_or_datetime(changeset): changeset = self.jpushes.revision_for_date(changeset) check_changeset = False # find a task id if check_changeset: try: changeset = self._check_changeset(changeset) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) tk_route = self.fetch_config.tk_inbound_route(changeset) LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: # HACK because of # https://bugzilla.mozilla.org/show_bug.cgi?id=1199618 # and https://bugzilla.mozilla.org/show_bug.cgi?id=1211251 # TODO remove the if statement once these tasks no longer exists # (just raise BuildInfoNotFound) err = True if self.fetch_config.app_name in ('firefox', 'fennec', 'fennec-2.3'): err = False try: tk_route = tk_route.replace(changeset, changeset[:12]) task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: err = True if err: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % tk_route) # find a completed run for that task run_id, build_date = None, None status = self.queue.status(task_id)['status'] for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound( "Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth('getArtifact', task_id, run_id, a['name']) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url(), task_id=task_id, )
def _check_changeset(self, changeset): from mozregression.json_pushes import JsonPushes jpushes = JsonPushes(branch=self.fetch_config.inbound_branch) # return the full changeset return jpushes.pushlog_for_change(changeset)['changesets'][-1]