def test_pushes_within_changes(mocker): push_first = {'1': {'changesets': ['a']}} other_pushes = {'2': {'changesets': ['b']}, '3': {'changesets': ['c']}} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(side_effect=[push_first, other_pushes])) retry_get.return_value = response jpushes = JsonPushes() pushes = jpushes.pushes_within_changes('fromchset', "tochset") assert pushes[0].push_id == '1' assert pushes[0].changeset == 'a' assert pushes[1].push_id == '2' assert pushes[1].changeset == 'b' assert pushes[2].push_id == '3' assert pushes[2].changeset == 'c' retry_get.assert_has_calls([ call('https://hg.mozilla.org/integration/mozilla-inbound/json-pushes' '?changeset=fromchset'), call().raise_for_status(), call().json(), call('https://hg.mozilla.org/integration/mozilla-inbound/json-pushes' '?fromchange=fromchset&tochange=tochset'), call().raise_for_status(), call().json() ])
def test_pushes_within_changes(mocker): push_first = {'1': {'changesets': ['a']}} other_pushes = { '2': {'changesets': ['b']}, '3': {'changesets': ['c']} } retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(side_effect=[push_first, other_pushes])) retry_get.return_value = response jpushes = JsonPushes() pushes = jpushes.pushes_within_changes('fromchset', "tochset") assert pushes[0].push_id == '1' assert pushes[0].changeset == 'a' assert pushes[1].push_id == '2' assert pushes[1].changeset == 'b' assert pushes[2].push_id == '3' assert pushes[2].changeset == 'c' retry_get.assert_has_calls([ call('https://hg.mozilla.org/integration/mozilla-inbound/json-pushes' '?changeset=fromchset'), call().raise_for_status(), call().json(), call('https://hg.mozilla.org/integration/mozilla-inbound/json-pushes' '?fromchange=fromchset&tochange=tochset'), call().raise_for_status(), call().json() ])
def check_for_move(self, repo, changeset): """ Checks if the warning has moved lines but still exists. """ if self.ignore_lines: return False files = retrieve_test_logs(repo, changeset[:12], self.platform, warning_re=self.warning_re) combined_warnings = Counter() for log in files: if log: combined_warnings.update(log.warnings) possible_move_found = False normalized = re.match(r'^(.*), line [0-9]+$', self.warning).group(1) for (k, v) in combined_warnings.items(): if k.startswith(normalized) and v > self.warning_limit: print("Possible line move:\n %d - %s" % (v, k)) possible_move_found = True if possible_move_found: jp = JsonPushes(repo) push = jp.push(changeset) print("Try this date: %s" % push.utc_date) return possible_move_found
def _bisect_integration(self, good_rev, bad_rev, ensure_good_and_bad=False, expand=0): LOG.info("Getting %s builds between %s and %s" % (self.fetch_config.integration_branch, good_rev, bad_rev)) handler = IntegrationHandler(find_fix=self.options.find_fix, ensure_good_and_bad=ensure_good_and_bad) result = self._do_bisect(handler, good_rev, bad_rev, expand=expand) if result == Bisection.FINISHED: LOG.info("No more integration revisions, bisection finished.") handler.print_range() if handler.good_revision == handler.bad_revision: LOG.warning("It seems that you used two changesets that are in" " the same push. Check the pushlog url.") elif len(handler.build_range) == 2: # range reduced to 2 pushes (at least ones with builds): # one good, one bad. result = handler.handle_merge() if result: branch, good_rev, bad_rev = result self.fetch_config.set_repo(branch) return self._bisect_integration(good_rev, bad_rev, expand=DEFAULT_EXPAND) else: # This code is broken, it prints out the message even when # there are multiple bug numbers or commits in the range. # Somebody should fix it before re-enabling it. return 0 # print a bug if: # (1) there really is only one bad push (and we're not # just missing the builds for some intermediate builds) # (2) there is only one bug number in that push jp = JsonPushes(handler.build_range[1].repo_name) num_pushes = len( jp.pushes_within_changes( handler.build_range[0].changeset, handler.build_range[1].changeset)) if num_pushes == 2: bugids = find_bugids_in_push( handler.build_range[1].repo_name, handler.build_range[1].changeset) if len(bugids) == 1: word = 'fix' if handler.find_fix else 'regression' LOG.info("Looks like the following bug has the " " changes which introduced the" " {}:\n{}".format(word, bug_url(bugids[0]))) elif result == Bisection.USER_EXIT: self._print_resume_info(handler) else: # NO_DATA. With integration branches, this can not happen if changesets # are incorrect - so builds are probably too old LOG.info( 'There are no build artifacts for these changesets (they are probably too old).' ) return 1 return 0
def check_for_move(self, repo, changeset): """ Checks if the warning has moved lines but still exists. """ if self.ignore_lines: return False files = retrieve_test_logs( repo, changeset[:12], self.platform, warning_re=self.warning_re) combined_warnings = Counter() for log in files: if log: combined_warnings.update(log.warnings) possible_move_found = False normalized = re.match(r'^(.*), line [0-9]+$', self.warning).group(1) for (k, v) in combined_warnings.iteritems(): if k.startswith(normalized) and v > self.warning_limit: print "Possible line move:\n %d - %s" % (v, k) possible_move_found = True if possible_move_found: jp = JsonPushes(repo) push = jp.push(changeset) print "Try this date: %s" % push.utc_date return possible_move_found
def test_pushlog_for_change(mocker): pushlog = {"a": "b"} retry_get = mocker.patch("mozregression.json_pushes.retry_get") response = Mock(json=Mock(return_value={"1": pushlog})) retry_get.return_value = response jpushes = JsonPushes() assert jpushes.pushlog_for_change("validchangeset") == pushlog
def test_pushlog_for_change(mocker): pushlog = {'a': 'b'} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value={'1': pushlog})) retry_get.return_value = response jpushes = JsonPushes() assert jpushes.pushlog_for_change('validchangeset') == pushlog
def test_pushlog_for_change_404_error(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(status_code=404) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.pushlog_for_change('invalid_changeset')
def test_pushlog_for_change_nothing_found(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value={})) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.pushlog_for_change('invalid_changeset')
def test_push_404_error(mocker): retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(status_code=404, json=Mock(return_value={"error": "unknown revision"})) retry_get.return_value = response jpushes = JsonPushes() with pytest.raises(MozRegressionError): jpushes.push('invalid_changeset')
def _bisect_inbounds(self, good_rev, bad_rev, ensure_good_and_bad=False, expand=0): LOG.info("Getting %s builds between %s and %s" % (self.fetch_config.inbound_branch, good_rev, bad_rev)) handler = InboundHandler(find_fix=self.options.find_fix, ensure_good_and_bad=ensure_good_and_bad) result = self._do_bisect(handler, good_rev, bad_rev, expand=expand) if result == Bisection.FINISHED: LOG.info("No more inbound revisions, bisection finished.") handler.print_range() if handler.good_revision == handler.bad_revision: LOG.warning( "It seems that you used two changesets that are in" " the same push. Check the pushlog url." ) elif len(handler.build_range) == 2: # range reduced to 2 pushes (at least ones with builds): # one good, one bad. result = handler.handle_merge() if result: branch, good_rev, bad_rev = result self.fetch_config.set_repo(branch) return self._bisect_inbounds(good_rev, bad_rev, expand=DEFAULT_EXPAND) else: # This code is broken, it prints out the message even when # there are multiple bug numbers or commits in the range. # Somebody should fix it before re-enabling it. return 0 # print a bug if: # (1) there really is only one bad push (and we're not # just missing the builds for some intermediate builds) # (2) there is only one bug number in that push jp = JsonPushes(handler.build_range[1].repo_name) num_pushes = len(jp.pushes_within_changes( handler.build_range[0].changeset, handler.build_range[1].changeset)) if num_pushes == 2: bugids = find_bugids_in_push( handler.build_range[1].repo_name, handler.build_range[1].changeset ) if len(bugids) == 1: word = 'fix' if handler.find_fix else 'regression' LOG.info("Looks like the following bug has the " " changes which introduced the" " {}:\n{}".format(word, bug_url(bugids[0]))) elif result == Bisection.USER_EXIT: self._print_resume_info(handler) else: # NO_DATA. With inbounds, this can not happen if changesets # are incorrect - so builds are probably too old LOG.info( 'There are no build artifacts on inbound for these' ' changesets (they are probably too old).') return 1 return 0
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg) if not (branch and len(push.changesets) >= 2): return try: # so, this is a merge. We can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push.changesets[0]['node'] # exclude the merge commit youngest = push.changesets[-2]['node'] LOG.debug("This is a merge from %s" % branch) # we can't use directly the youngest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first, grab it. This needs to be done on the right branch. jp2 = JsonPushes(branch) raw = [int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest)] data = jp2.pushes( startID=str(min(raw) - 2), endID=str(max(raw)), ) oldest = data[0].changesets[0] youngest = data[-1].changesets[-1] # we are ready to bisect further LOG.info("************* Switching to %s" % branch) gr, br = self._reverse_if_find_fix(oldest, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg ) ) LOG.debug('End merge handling') return result
def test_push_with_date_raise_appropriate_error(): jpushes = JsonPushes(branch="inbound") jpushes.pushes_within_changes = Mock(side_effect=EmptyPushlogError) with pytest.raises(EmptyPushlogError) as ctx: jpushes.push(date(2015, 1, 1)) assert str( ctx.value) == "No pushes available for the date 2015-01-01 on inbound."
def test_revision_for_date_raise_appropriate_error(): jpushes = JsonPushes(branch='inbound') jpushes.pushlog_within_changes = Mock(side_effect=EmptyPushlogError) with pytest.raises(EmptyPushlogError) as ctx: jpushes.revision_for_date(date(2015, 1, 1)) assert str(ctx.value) == \ 'No pushes available for the date 2015-01-01 on inbound.'
def find_bugids_in_push(branch, changeset): jp = JsonPushes(branch) push = jp.push(changeset, full='1') branches = set() for chset in push.changesets: res = RE_BUG_ID.search(chset['desc']) if res: branches.add(res.group(1)) return [b for b in branches]
def test_pushlog_within_changes(mocker): push_first = {"1": {"date": 1}} other_pushes = {"2": {"date": 2}, "3": {"date": 3}} retry_get = mocker.patch("mozregression.json_pushes.retry_get") response = Mock(json=Mock(side_effect=[push_first, other_pushes])) retry_get.return_value = response jpushes = JsonPushes() assert jpushes.pushlog_within_changes("fromchset", "tochset") == [{"date": 1}, {"date": 2}, {"date": 3}]
def range_for_inbounds(fetch_config, start_rev, end_rev): """ Creates a BuildRange for inbounds builds. """ info_fetcher = InboundInfoFetcher(fetch_config) pushlogs_finder = JsonPushes(branch=fetch_config.inbound_branch) pushlogs = pushlogs_finder.pushlog_within_changes(start_rev, end_rev) futures_builds = [] for pushlog in pushlogs: changeset = pushlog['changesets'][-1] futures_builds.append(FutureBuildInfo(info_fetcher, changeset)) return BuildRange(info_fetcher, futures_builds)
def test_pushlog_within_changes_using_dates(): p1 = {'changesets': ['abc'], 'date': 12345} p2 = {'changesets': ['def'], 'date': 67891} pushes = {'1': p1, '2': p2} jpushes = JsonPushes(branch='m-i') jpushes._request = Mock(return_value=pushes) assert jpushes.pushlog_within_changes(date(2015, 1, 1), date(2015, 2, 2)) == [p1, p2] jpushes._request.assert_called_once_with( 'https://hg.mozilla.org/integration/mozilla-inbound/json-pushes?' 'startdate=2015-01-01&enddate=2015-02-03')
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.pushlog_for_change(most_recent_push.changeset, full='1') msg = push['changesets'][-1]['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg) if not (branch and len(push['changesets']) >= 2): return try: # so, this is a merge. We can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push['changesets'][0]['node'] # exclude the merge commit youngest = push['changesets'][-2]['node'] LOG.debug("This is a merge from %s" % branch) # we can't use directly the youngest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first, grab it. This needs to be done on the right branch. jp2 = JsonPushes(branch) raw = [ int(i) for i in jp2.pushlog_within_changes(oldest, youngest, raw=True) ] data = jp2._request( jp2.json_pushes_url( startID=str(min(raw) - 2), endID=str(max(raw)), )) datakeys = [int(i) for i in data] oldest = data[str(min(datakeys))]["changesets"][0] youngest = data[str(max(datakeys))]["changesets"][-1] # we are ready to bisect further LOG.info("************* Switching to %s" % branch) gr, br = self._reverse_if_find_fix(oldest, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg)) LOG.debug('End merge handling') return result
def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.client.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch, path=fetch_config.branch_path)
def test_pushlog_within_changes_using_dates(): p1 = {'changesets': ['abc'], 'date': 12345} p2 = {'changesets': ['def'], 'date': 67891} pushes = {'1': p1, '2': p2} jpushes = JsonPushes(branch='m-i') jpushes._request = Mock(return_value=pushes) assert jpushes.pushlog_within_changes( date(2015, 1, 1), date(2015, 2, 2) ) == [p1, p2] jpushes._request.assert_called_once_with( 'https://hg.mozilla.org/integration/mozilla-inbound/json-pushes?' 'startdate=2015-01-01&enddate=2015-02-03' )
def test_pushes_within_changes_using_dates(mocker): p1 = {"changesets": ["abc"], "date": 12345} p2 = {"changesets": ["def"], "date": 67891} pushes = {"1": p1, "2": p2} retry_get = mocker.patch("mozregression.json_pushes.retry_get") retry_get.return_value = Mock(json=Mock(return_value=pushes)) jpushes = JsonPushes(branch="m-i") pushes = jpushes.pushes_within_changes(date(2015, 1, 1), date(2015, 2, 2)) assert pushes[0].push_id == "1" assert pushes[1].push_id == "2" retry_get.assert_called_once_with( "https://hg.mozilla.org/integration/mozilla-inbound/json-pushes?" "enddate=2015-02-03&startdate=2015-01-01")
def test_pushes_within_changes_using_dates(mocker): p1 = {'changesets': ['abc'], 'date': 12345} p2 = {'changesets': ['def'], 'date': 67891} pushes = {'1': p1, '2': p2} retry_get = mocker.patch('mozregression.json_pushes.retry_get') retry_get.return_value = Mock(json=Mock(return_value=pushes)) jpushes = JsonPushes(branch='m-i') pushes = jpushes.pushes_within_changes(date(2015, 1, 1), date(2015, 2, 2)) assert pushes[0].push_id == '1' assert pushes[1].push_id == '2' retry_get.assert_called_once_with( 'https://hg.mozilla.org/integration/mozilla-inbound/json-pushes?' 'enddate=2015-02-03&startdate=2015-01-01')
def _choose_integration_branch(self, changeset): """ Tries to determine which integration branch the given changeset originated from by checking the date the changeset first showed up in each repo. The repo with the earliest date is chosen. """ landings = {} for k in ("autoland", "mozilla-inbound"): jp = JsonPushes(k) try: push = jp.push(changeset, full='1') landings[k] = push.timestamp except EmptyPushlogError: LOG.debug("Didn't find %s in %s" % (changeset, k)) repo = min(landings, key=landings.get) return repo
def test_pushes_within_changes_using_dates(mocker): p1 = {'changesets': ['abc'], 'date': 12345} p2 = {'changesets': ['def'], 'date': 67891} pushes = {'1': p1, '2': p2} retry_get = mocker.patch('mozregression.json_pushes.retry_get') retry_get.return_value = Mock(json=Mock(return_value=pushes)) jpushes = JsonPushes(branch='m-i') pushes = jpushes.pushes_within_changes(date(2015, 1, 1), date(2015, 2, 2)) assert pushes[0].push_id == '1' assert pushes[1].push_id == '2' retry_get.assert_called_once_with( 'https://hg.mozilla.org/integration/mozilla-inbound/json-pushes?' 'startdate=2015-01-01&enddate=2015-02-03' )
def test_push(mocker): pushlog = {"1": {"changesets": ["a", "b", "c"], "date": 123456}} retry_get = mocker.patch("mozregression.json_pushes.retry_get") response = Mock(json=Mock(return_value=pushlog)) retry_get.return_value = response jpushes = JsonPushes() push = jpushes.push("validchangeset") assert isinstance(push, Push) assert push.push_id == "1" assert push.changeset == "c" assert push.changesets[0] == "a" assert push.timestamp == 123456 assert push.utc_date == datetime(1970, 1, 2, 10, 17, 36) assert str(push) == "c" retry_get.assert_called_once_with( "https://hg.mozilla.org/mozilla-central/json-pushes" "?changeset=validchangeset")
def check_taskcluster(options, fetch_config, logger): if not options.taskcluster or not options.artifact_name: raise MozRegressionError('--taskcluster and --artifact-name are' ' required for taskcluster regression' ' finding') if options.repo and options.inbound_branch: raise MozRegressionError('unable to define both --repo and' ' --inbound-branch for b2g-device') if options.repo: # if repo is defined, use that to bisect using taskcluster, # ie the "inbound way" for now. Just remove the "integration" # branch path. fetch_config.set_branch_path(None) fetch_config.set_inbound_branch(options.repo) if options.last_good_revision and options.first_bad_revision: # If we have revisions, use those check_inbounds(options, fetch_config, logger) print "Using revs: %s - %s" % ( options.last_good_revision, options.first_bad_revision, ) else: # If we don't have revisions, use the nightly-style date range and # convert it into a good/bad rev. check_nightlies(options, fetch_config, logger) from mozregression.json_pushes import JsonPushes jpushes = JsonPushes(branch=fetch_config.inbound_branch, path=fetch_config.branch_path) options.last_good_revision = jpushes.revision_for_date( options.good_date ) options.first_bad_revision = jpushes.revision_for_date( options.bad_date, last=True ) print "Using good rev %s for date %s" % ( options.last_good_revision, options.good_date ) print "Using bad rev %s for date %s" % ( options.first_bad_revision, options.bad_date )
def test_push(mocker): pushlog = {'1': { 'changesets': ['a', 'b', 'c'], 'date': 123456, }} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value=pushlog)) retry_get.return_value = response jpushes = JsonPushes() push = jpushes.push('validchangeset') assert isinstance(push, Push) assert push.push_id == '1' assert push.changeset == 'c' assert push.changesets[0] == 'a' assert push.timestamp == 123456 assert push.utc_date == datetime(1970, 1, 2, 10, 17, 36) assert str(push) == 'c' retry_get.assert_called_once_with( 'https://hg.mozilla.org/mozilla-central/json-pushes' '?changeset=validchangeset' )
def test_push(mocker): pushlog = {'1': { 'changesets': ['a', 'b', 'c'], 'date': 123456, }} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(return_value=pushlog)) retry_get.return_value = response jpushes = JsonPushes() push = jpushes.push('validchangeset') assert isinstance(push, Push) assert push.push_id == '1' assert push.changeset == 'c' assert push.changesets[0] == 'a' assert push.timestamp == 123456 assert push.utc_date == datetime(1970, 1, 2, 10, 17, 36) assert str(push) == 'c' retry_get.assert_called_once_with( 'https://hg.mozilla.org/integration/mozilla-inbound/json-pushes' '?changeset=validchangeset' )
def test_pushlog_within_changes(mocker): push_first = {'1': {'date': 1}} other_pushes = { '2': {'date': 2}, '3': {'date': 3} } retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(side_effect=[push_first, other_pushes])) retry_get.return_value = response jpushes = JsonPushes() assert jpushes.pushlog_within_changes('fromchset', "tochset") == [ {'date': 1}, {'date': 2}, {'date': 3} ] # raw should include push ids in the result response = Mock(json=Mock(side_effect=[push_first, other_pushes])) retry_get.return_value = response assert jpushes.pushlog_within_changes( 'fromchset', "tochset", raw=True ) == dict(push_first.items() + other_pushes.items())
def test_pushlog_within_changes(mocker): push_first = {'1': {'date': 1}} other_pushes = {'2': {'date': 2}, '3': {'date': 3}} retry_get = mocker.patch('mozregression.json_pushes.retry_get') response = Mock(json=Mock(side_effect=[push_first, other_pushes])) retry_get.return_value = response jpushes = JsonPushes() assert jpushes.pushlog_within_changes('fromchset', "tochset") == [{ 'date': 1 }, { 'date': 2 }, { 'date': 3 }] # raw should include push ids in the result response = Mock(json=Mock(side_effect=[push_first, other_pushes])) retry_get.return_value = response assert jpushes.pushlog_within_changes( 'fromchset', "tochset", raw=True) == dict(push_first.items() + other_pushes.items())
def test_pushes_within_changes(mocker): push_first = {"1": {"changesets": ["a"]}} other_pushes = {"2": {"changesets": ["b"]}, "3": {"changesets": ["c"]}} retry_get = mocker.patch("mozregression.json_pushes.retry_get") response = Mock(json=Mock(side_effect=[push_first, other_pushes])) retry_get.return_value = response jpushes = JsonPushes() pushes = jpushes.pushes_within_changes("fromchset", "tochset") assert pushes[0].push_id == "1" assert pushes[0].changeset == "a" assert pushes[1].push_id == "2" assert pushes[1].changeset == "b" assert pushes[2].push_id == "3" assert pushes[2].changeset == "c" retry_get.assert_has_calls([ call("https://hg.mozilla.org/mozilla-central/json-pushes" "?changeset=fromchset"), call("https://hg.mozilla.org/mozilla-central/json-pushes" "?fromchange=fromchset&tochange=tochset"), ])
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name) if not (branch and len(push.changesets) >= 2): # We did not find a branch, lets check the integration branches if we are bisecting m-c LOG.debug( "Did not find a branch, checking all integration branches") if get_name(most_recent_push.repo_name) == 'mozilla-central' and \ len(push.changesets) >= 2: branch = self._choose_integration_branch( most_recent_push.changeset) jp2 = JsonPushes(branch) try: data = jp2.pushes_within_changes( push.changesets[0]['node'], push.changesets[-1]['node']) except MozRegressionError, exc: LOG.error( "Failed to find changes in branch '%s' (error: %s)" % (branch, exc)) raise LOG.info("************* Switching to %s by" " process of elimination (no branch detected in" " commit message)" % branch) gr, br = self._reverse_if_find_fix(data[0].changeset, data[-1].changeset) return (branch, gr, br) else: return
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.client.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def find_build_info(self, push): """ Find build info for an inbound build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`InboundBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset tk_route = self.fetch_config.tk_inbound_route(push) LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: # HACK because of # https://bugzilla.mozilla.org/show_bug.cgi?id=1199618 # and https://bugzilla.mozilla.org/show_bug.cgi?id=1211251 # TODO remove the if statement once these tasks no longer exists # (just raise BuildInfoNotFound) err = True if self.fetch_config.app_name in ('firefox', 'fennec', 'fennec-2.3') \ and push.timestamp < TIMESTAMP_GECKO_V2: err = False try: old_route = tk_route.replace(changeset, changeset[:12]) task_id = self.index.findTask(old_route)['taskId'] except TaskclusterFailure: err = True if err: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % tk_route) # find a completed run for that task run_id, build_date = None, None status = self.queue.status(task_id)['status'] for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound( "Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth('getArtifact', task_id, run_id, a['name']) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name) if not (branch and len(push.changesets) >= 2): # We did not find a branch, lets check the integration branches if we are bisecting m-c LOG.debug("Did not find a branch, checking all integration branches") if get_name(most_recent_push.repo_name) == 'mozilla-central' and \ len(push.changesets) >= 2: branch = self._choose_integration_branch(most_recent_push.changeset) oldest = push.changesets[0]['node'] youngest = push.changesets[-1]['node'] LOG.info("************* Switching to %s by" " process of elimination (no branch detected in" " commit message)" % branch) else: return else: # so, this is a merge. see how many changesets are in it, if it # is just one, we have our answer if len(push.changesets) == 2: LOG.info("Merge commit has only two revisions (one of which " "is the merge): we are done") return # Otherwise, we can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push.changesets[0]['node'] # exclude the merge commit youngest = push.changesets[-2]['node'] LOG.info("************* Switching to %s" % branch) # we can't use directly the oldest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first grab the previous push to get the last known good # changeset. This needs to be done on the right branch. try: jp2 = JsonPushes(branch) raw = [int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest)] data = jp2.pushes( startID=str(min(raw) - 2), endID=str(max(raw)), ) older = data[0].changeset youngest = data[-1].changeset # we are ready to bisect further gr, br = self._reverse_if_find_fix(older, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg ) ) LOG.debug('End merge handling') return result
def test_json_pushes_url(branch, chsetskwargs, result_url): jpushes = JsonPushes(branch=branch) assert jpushes.json_pushes_url(**chsetskwargs) == result_url
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def find_build_info(self, push): """ Find build info for an inbound build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`InboundBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset tk_routes = self.fetch_config.tk_inbound_routes(push) try: task_id = None stored_failure = None for tk_route in tk_routes: LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure as ex: LOG.debug('nothing found via route %r' % tk_route) stored_failure = ex continue if task_id: status = self.queue.status(task_id)['status'] break if not task_id: raise stored_failure except TaskclusterFailure: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % self.fetch_config.tk_inbound_route(push)) # find a completed run for that task run_id, build_date = None, None for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound("Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth('getArtifact', task_id, run_id, a['name']) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
class InboundHandler(BisectorHandler): create_range = staticmethod(range_for_inbounds) def _print_progress(self, new_data): LOG.info("Narrowed inbound regression window from [%s, %s]" " (%d builds) to [%s, %s] (%d builds)" " (~%d steps left)" % (self.build_range[0].short_changeset, self.build_range[-1].short_changeset, len(self.build_range), new_data[0].short_changeset, new_data[-1].short_changeset, len(new_data), compute_steps_left(len(new_data)))) def user_exit(self, mid): words = self._reverse_if_find_fix('Newest', 'Oldest') LOG.info('%s known good inbound revision: %s' % (words[0], self.good_revision)) LOG.info('%s known bad inbound revision: %s' % (words[1], self.bad_revision)) def _choose_integration_branch(self, changeset): """ Tries to determine which integration branch the given changeset originated from by checking the date the changeset first showed up in each repo. The repo with the earliest date is chosen. """ landings = {} for k in ("autoland", "mozilla-inbound"): jp = JsonPushes(k) try: push = jp.push(changeset, full='1') landings[k] = push.timestamp except EmptyPushlogError: LOG.debug("Didn't find %s in %s" % (changeset, k)) repo = min(landings, key=landings.get) LOG.debug("Repo '%s' seems to have the earliest push" % repo) return repo def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name) if not (branch and len(push.changesets) >= 2): # We did not find a branch, lets check the integration branches if we are bisecting m-c LOG.debug( "Did not find a branch, checking all integration branches") if get_name(most_recent_push.repo_name) == 'mozilla-central' and \ len(push.changesets) >= 2: branch = self._choose_integration_branch( most_recent_push.changeset) jp2 = JsonPushes(branch) try: data = jp2.pushes_within_changes( push.changesets[0]['node'], push.changesets[-1]['node']) except MozRegressionError, exc: LOG.error( "Failed to find changes in branch '%s' (error: %s)" % (branch, exc)) raise LOG.info("************* Switching to %s by" " process of elimination (no branch detected in" " commit message)" % branch) gr, br = self._reverse_if_find_fix(data[0].changeset, data[-1].changeset) return (branch, gr, br) else: return try: # so, this is a merge. see how many changesets are in it, if it # is just one, we have our answer if len(push.changesets) == 2: LOG.info("Merge commit has only two revisions (one of which " "is the merge): we are done") return # Otherwise, we can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push.changesets[0]['node'] # exclude the merge commit youngest = push.changesets[-2]['node'] LOG.debug("This is a merge from %s" % branch) # we can't use directly the youngest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first, grab it. This needs to be done on the right branch. jp2 = JsonPushes(branch) raw = [ int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest) ] data = jp2.pushes( startID=str(min(raw) - 2), endID=str(max(raw)), ) oldest = data[0].changesets[0] youngest = data[-1].changesets[-1] # we are ready to bisect further LOG.info("************* Switching to %s" % branch) gr, br = self._reverse_if_find_fix(oldest, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg)) LOG.debug('End merge handling') return result
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full='1') msg = push.changeset['desc'] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg) if not (branch and len(push.changesets) >= 2): # We did not find a branch, lets check the integration branches if we are bisecting m-c if get_name(most_recent_push.repo_name) == 'mozilla-central' and \ len(push.changesets) >= 2: branch = self._choose_integration_branch( most_recent_push.changeset) jp2 = JsonPushes(branch) try: data = jp2.pushes_within_changes( push.changesets[0]['node'], push.changesets[-1]['node']) except MozRegressionError: return LOG.info("************* Switching to %s by" " process of elimination (no branch detected in" " commit message)" % branch) return (branch, data[0].changeset, data[-1].changeset) else: return try: # so, this is a merge. We can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push.changesets[0]['node'] # exclude the merge commit youngest = push.changesets[-2]['node'] LOG.debug("This is a merge from %s" % branch) # we can't use directly the youngest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first, grab it. This needs to be done on the right branch. jp2 = JsonPushes(branch) raw = [ int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest) ] data = jp2.pushes( startID=str(min(raw) - 2), endID=str(max(raw)), ) oldest = data[0].changesets[0] youngest = data[-1].changesets[-1] # we are ready to bisect further LOG.info("************* Switching to %s" % branch) gr, br = self._reverse_if_find_fix(oldest, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg)) LOG.debug('End merge handling') return result
def handle_merge(self): # let's check if we are facing a merge, and in that case, # continue the bisection from the merged branch. result = None LOG.debug("Starting merge handling...") # we have to check the commit of the most recent push most_recent_push = self.build_range[1] jp = JsonPushes(most_recent_push.repo_name) push = jp.push(most_recent_push.changeset, full="1") msg = push.changeset["desc"] LOG.debug("Found commit message:\n%s\n" % msg) branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name) if not (branch and len(push.changesets) >= 2): # We did not find a branch, lets check the integration branches if we are bisecting m-c LOG.debug( "Did not find a branch, checking all integration branches") if (get_name(most_recent_push.repo_name) == "mozilla-central" and len(push.changesets) >= 2): branch = self._choose_integration_branch( most_recent_push.changeset) oldest = push.changesets[0]["node"] youngest = push.changesets[-1]["node"] LOG.info("************* Switching to %s by" " process of elimination (no branch detected in" " commit message)" % branch) else: return else: # so, this is a merge. see how many changesets are in it, if it # is just one, we have our answer if len(push.changesets) == 2: LOG.info("Merge commit has only two revisions (one of which " "is the merge): we are done") return # Otherwise, we can find the oldest and youngest # changesets, and the branch where the merge comes from. oldest = push.changesets[0]["node"] # exclude the merge commit youngest = push.changesets[-2]["node"] LOG.info("************* Switching to %s" % branch) # we can't use directly the oldest changeset because we # don't know yet if it is good. # # PUSH1 PUSH2 # [1 2] [3 4 5 6 7] # G MERGE B # # so first grab the previous push to get the last known good # changeset. This needs to be done on the right branch. try: jp2 = JsonPushes(branch) raw = [ int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest) ] data = jp2.pushes( startID=str(min(raw) - 2), endID=str(max(raw)), ) older = data[0].changeset youngest = data[-1].changeset # we are ready to bisect further gr, br = self._reverse_if_find_fix(older, youngest) result = (branch, gr, br) except MozRegressionError: LOG.debug("Got exception", exc_info=True) raise MozRegressionError( "Unable to exploit the merge commit. Origin branch is {}, and" " the commit message for {} was:\n{}".format( most_recent_push.repo_name, most_recent_push.short_changeset, msg)) LOG.debug("End merge handling") return result
def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.client.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch)
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) self.index = taskcluster.client.Index() self.queue = taskcluster.Queue() self.jpushes = JsonPushes(branch=fetch_config.inbound_branch, path=fetch_config.branch_path) def _check_changeset(self, changeset): # return the full changeset return self.jpushes.pushlog_for_change(changeset)['changesets'][-1] def find_build_info(self, changeset, fetch_txt_info=True, check_changeset=False): """ Find build info for an inbound build, given a changeset. if `check_changeset` is True, the given changeset might be partial (< 40 chars) because it will be verified and updated using json pushes. Return a :class:`InboundBuildInfo` instance. """ # find a task id if check_changeset: try: changeset = self._check_changeset(changeset) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) tk_route = self.fetch_config.tk_inbound_route(changeset) self._logger.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: # HACK because of # https://bugzilla.mozilla.org/show_bug.cgi?id=1199618 # and https://bugzilla.mozilla.org/show_bug.cgi?id=1211251 # TODO remove the if statement once these tasks no longer exists # (just raise BuildInfoNotFound) err = True if self.fetch_config.app_name in ('firefox', 'fennec', 'fennec-2.3'): err = False try: tk_route = tk_route.replace(changeset, changeset[:12]) task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: err = True if err: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % tk_route) # find a completed run for that task run_id, build_date = None, None status = self.queue.status(task_id)['status'] for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound("Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): build_url = self.queue.buildUrl( 'getLatestArtifact', task_id, a['name'] ) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url(), task_id=task_id, )
class IntegrationInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) self.jpushes = JsonPushes(branch=fetch_config.integration_branch) def find_build_info(self, push): """ Find build info for an integration build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`IntegrationBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError as exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset try: # taskcluster builds have two possible root urls: we switched # from taskcluster.net -> firefox-ci-tc.services.mozilla.com # around November 9. to make things faster, we'll iterate through # them based on the one that most likely applies to this push possible_tc_root_urls = [TC_ROOT_URL, OLD_TC_ROOT_URL] if push.utc_date < TC_ROOT_URL_MIGRATION_FLAG_DATE: possible_tc_root_urls.reverse() task_id = None status = None for tc_root_url in possible_tc_root_urls: LOG.debug("using taskcluster root url %s" % tc_root_url) options = self.fetch_config.tk_options(tc_root_url) tc_index = taskcluster.Index(options) tc_queue = taskcluster.Queue(options) tk_routes = self.fetch_config.tk_routes(push) stored_failure = None for tk_route in tk_routes: LOG.debug("using taskcluster route %r" % tk_route) try: task_id = tc_index.findTask(tk_route)["taskId"] except TaskclusterFailure as ex: LOG.debug("nothing found via route %r" % tk_route) stored_failure = ex continue if task_id: status = tc_queue.status(task_id)["status"] break if status: break if not task_id: raise stored_failure except TaskclusterFailure: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % self.fetch_config.tk_route(push)) # find a completed run for that task run_id, build_date = None, None for run in reversed(status["runs"]): if run["state"] == "completed": run_id = run["runId"] build_date = datetime.strptime(run["resolved"], "%Y-%m-%dT%H:%M:%S.%fZ") break if run_id is None: raise BuildInfoNotFound( "Unable to find completed runs for task %s" % task_id) artifacts = tc_queue.listArtifacts(task_id, run_id)["artifacts"] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a["name"]) if self.build_regex.search(name): meth = tc_queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = tc_queue.buildSignedUrl build_url = meth("getArtifact", task_id, run_id, a["name"]) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return IntegrationBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) self.jpushes = JsonPushes(branch=fetch_config.integration_branch)
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def find_build_info(self, push): """ Find build info for an inbound build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`InboundBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError as exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset tk_routes = self.fetch_config.tk_inbound_routes(push) try: task_id = None stored_failure = None for tk_route in tk_routes: LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure as ex: LOG.debug('nothing found via route %r' % tk_route) stored_failure = ex continue if task_id: status = self.queue.status(task_id)['status'] break if not task_id: raise stored_failure except TaskclusterFailure: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % self.fetch_config.tk_inbound_route(push)) # find a completed run for that task run_id, build_date = None, None for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound( "Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth('getArtifact', task_id, run_id, a['name']) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )
def _check_changeset(self, changeset): from mozregression.json_pushes import JsonPushes jpushes = JsonPushes(branch=self.fetch_config.inbound_branch) # return the full changeset return jpushes.pushlog_for_change(changeset)['changesets'][-1]
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.client.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def _check_changeset(self, changeset): # return the full changeset return self.jpushes.pushlog_for_change(changeset)['changesets'][-1] def find_build_info(self, changeset, fetch_txt_info=True, check_changeset=False): """ Find build info for an inbound build, given a changeset or a date. if `check_changeset` is True, the given changeset might be partial (< 40 chars) because it will be verified and updated using json pushes. Return a :class:`InboundBuildInfo` instance. """ if is_date_or_datetime(changeset): changeset = self.jpushes.revision_for_date(changeset) check_changeset = False # find a task id if check_changeset: try: changeset = self._check_changeset(changeset) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) tk_route = self.fetch_config.tk_inbound_route(changeset) LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: # HACK because of # https://bugzilla.mozilla.org/show_bug.cgi?id=1199618 # and https://bugzilla.mozilla.org/show_bug.cgi?id=1211251 # TODO remove the if statement once these tasks no longer exists # (just raise BuildInfoNotFound) err = True if self.fetch_config.app_name in ('firefox', 'fennec', 'fennec-2.3'): err = False try: tk_route = tk_route.replace(changeset, changeset[:12]) task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: err = True if err: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % tk_route) # find a completed run for that task run_id, build_date = None, None status = self.queue.status(task_id)['status'] for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound( "Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth('getArtifact', task_id, run_id, a['name']) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url(), task_id=task_id, )
class InboundInfoFetcher(InfoFetcher): def __init__(self, fetch_config): InfoFetcher.__init__(self, fetch_config) options = fetch_config.tk_options() self.index = taskcluster.client.Index(options) self.queue = taskcluster.Queue(options) self.jpushes = JsonPushes(branch=fetch_config.inbound_branch) def find_build_info(self, push): """ Find build info for an inbound build, given a Push, a changeset or a date/datetime. if `push` is not an instance of Push (e.g. it is a date, datetime, or string representing the changeset), a query to json pushes will be done. Return a :class:`InboundBuildInfo` instance. """ if not isinstance(push, Push): try: push = self.jpushes.push(push) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc)) changeset = push.changeset tk_route = self.fetch_config.tk_inbound_route(push) LOG.debug('using taskcluster route %r' % tk_route) try: task_id = self.index.findTask(tk_route)['taskId'] except TaskclusterFailure: # HACK because of # https://bugzilla.mozilla.org/show_bug.cgi?id=1199618 # and https://bugzilla.mozilla.org/show_bug.cgi?id=1211251 # TODO remove the if statement once these tasks no longer exists # (just raise BuildInfoNotFound) err = True if self.fetch_config.app_name in ('firefox', 'fennec', 'fennec-2.3') \ and push.timestamp < TIMESTAMP_GECKO_V2: err = False try: old_route = tk_route.replace(changeset, changeset[:12]) task_id = self.index.findTask(old_route)['taskId'] except TaskclusterFailure: err = True if err: raise BuildInfoNotFound("Unable to find build info using the" " taskcluster route %r" % tk_route) # find a completed run for that task run_id, build_date = None, None status = self.queue.status(task_id)['status'] for run in reversed(status['runs']): if run['state'] == 'completed': run_id = run['runId'] build_date = datetime.strptime(run["resolved"], '%Y-%m-%dT%H:%M:%S.%fZ') break if run_id is None: raise BuildInfoNotFound("Unable to find completed runs for task %s" % task_id) artifacts = self.queue.listArtifacts(task_id, run_id)['artifacts'] # look over the artifacts of that run build_url = None for a in artifacts: name = os.path.basename(a['name']) if self.build_regex.search(name): meth = self.queue.buildUrl if self.fetch_config.tk_needs_auth(): meth = self.queue.buildSignedUrl build_url = meth( 'getArtifact', task_id, run_id, a['name'] ) break if build_url is None: raise BuildInfoNotFound("unable to find a build url for the" " changeset %r" % changeset) return InboundBuildInfo( self.fetch_config, build_url=build_url, build_date=build_date, changeset=changeset, repo_url=self.jpushes.repo_url, task_id=task_id, )