class TestBuildApiGetAllJobs(unittest.TestCase): def setUp(self): self.query_api = BuildApi() buildapi.JOBS_CACHE = {} query_jobs.JOBS_CACHE = {} @patch('requests.get', return_value=mock_response(JOBS_SCHEDULE, 200)) @patch('mozci.sources.pushlog.valid_revision', return_value=True) @patch('mozci.sources.buildapi.get_credentials', return_value=None) @patch('mozci.sources.buildapi.query_repo_url', return_value=None) def test_call_first_time(self, query_repo_url, get_credentials, valid_revision, get): """_get_all_jobs should return the right value after calling requests.get.""" self.assertEquals( self.query_api._get_all_jobs("try", "146071751b1e"), json.loads(JOBS_SCHEDULE)) assert get.call_count == 1 # Test that this fills our caches self.assertEquals( query_jobs.JOBS_CACHE[("try", "146071751b1e")], json.loads(JOBS_SCHEDULE)) @patch('requests.get', return_value=mock_response(JOBS_SCHEDULE, 200)) @patch('mozci.sources.pushlog.valid_revision', return_value=True) @patch('mozci.sources.buildapi.get_credentials', return_value=None) @patch('mozci.sources.buildapi.query_repo_url', return_value=None) def test_call_second_time(self, query_repo_url, get_credentials, valid_revision, get): """Calling the function again should return us the results directly from cache.""" # Making sure the cache is filled so we don't depend on the order of the tests. query_jobs.JOBS_CACHE[("try", "146071751b1e")] = json.loads(JOBS_SCHEDULE) self.assertEquals( self.query_api._get_all_jobs("try", "146071751b1e"), json.loads(JOBS_SCHEDULE)) # _get_all_jobs should return its value directly from # cache without calling get assert get.call_count == 0 @patch('requests.get', return_value=mock_response(JOBS_SCHEDULE, 400)) @patch('mozci.sources.pushlog.valid_revision', return_value=True) @patch('mozci.sources.buildapi.get_credentials', return_value=None) @patch('mozci.sources.buildapi.query_repo_url', return_value=None) def test_bad_request(self, query_repo_url, get_credentials, valid_revision, get): """If a bad return value is found in requests we should return an empty list.""" self.assertEquals( self.query_api._get_all_jobs("try", "146071751b1e"), []) @patch('mozci.sources.pushlog.valid_revision', return_value=False) @patch('mozci.sources.buildapi.query_repo_url', return_value=None) def test_bad_revision(self, query_repo_url, valid_revision): """If an invalid revision is passed, _get_all_jobs should raise an Exception .""" print "****", buildapi.JOBS_CACHE, query_jobs.JOBS_CACHE with self.assertRaises(Exception): self.query_api._get_all_jobs("try", "146071751b1e")
class TestBuildApiGetAllJobs(unittest.TestCase): def setUp(self): self.query_api = BuildApi() query_jobs.JOBS_CACHE = {} @patch('requests.get', return_value=mock_response(JOBS_SCHEDULE, 200)) @patch('mozci.query_jobs.get_credentials', return_value=None) @patch('mozci.repositories.query_repo_url', return_value=None) def test_call_first_time(self, query_repo_url, get_credentials, get): """_get_all_jobs should return the right value after calling requests.get.""" self.assertEquals( self.query_api._get_all_jobs("try", "146071751b1e"), json.loads(JOBS_SCHEDULE)) assert get.call_count == 1 # Test that this fills our caches self.assertEquals( query_jobs.JOBS_CACHE[("try", "146071751b1e")], json.loads(JOBS_SCHEDULE)) @patch('requests.get', return_value=mock_response(JOBS_SCHEDULE, 200)) @patch('mozci.query_jobs.get_credentials', return_value=None) @patch('mozci.repositories.query_repo_url', return_value=None) def test_call_second_time(self, query_repo_url, get_credentials, get): """Calling the function again should return us the results directly from cache.""" # Making sure the cache is filled so we don't depend on the order of the tests. query_jobs.JOBS_CACHE[("try", "146071751b1e")] = json.loads(JOBS_SCHEDULE) self.assertEquals( self.query_api._get_all_jobs("try", "146071751b1e"), json.loads(JOBS_SCHEDULE)) # _get_all_jobs should return its value directly from # cache without calling get assert get.call_count == 0 @patch('requests.get', return_value=mock_response(JOBS_SCHEDULE, 400)) @patch('mozci.query_jobs.get_credentials', return_value=None) @patch('mozci.repositories.query_repo_url', return_value=None) def test_bad_request(self, query_repo_url, get_credentials, get): """If a bad return value is found in requests we should return an empty list.""" self.assertEquals( self.query_api._get_all_jobs("try", "146071751b1e"), []) @patch('mozci.repositories.query_repo_url', return_value=None) @patch('mozci.query_jobs.get_credentials', return_value=None) def test_bad_revision(self, query_repo_url, get_credentials): """If an invalid revision is passed, _get_all_jobs should return an empty list.""" print "****", query_jobs.JOBS_CACHE self.assertEquals( self.query_api._get_all_jobs("try", "146071751b1e"), [])
def query_jobs_buildername(buildername, revision): """Return **status** information for a buildername on a given revision.""" # NOTE: It's unfortunate that there is scheduling and status data. # I think we might need to remove this distinction for the user's # sake. status_info = [] repo_name = query_repo_name_from_buildername(buildername) query_api = BuildApi() jobs = query_api.get_matching_jobs(repo_name, revision, buildername) # The user wants the status data rather than the scheduling data for job_schedule_info in jobs: status_info.append(_status_info(job_schedule_info)) return status_info
class TestBuildApiGetMatchingJobs(unittest.TestCase): def setUp(self): self.query_api = BuildApi() def test_matching_jobs_existing(self): """_matching_jobs should return the whole dictionary for a buildername in alljobs.""" self.assertEquals( self.query_api.get_matching_jobs("try", "146071751b1e", 'Linux x86-64 try build'), json.loads(JOBS_SCHEDULE)) def test_matching_jobs_invalid(self): """_matching_jobs should return an empty list if it receives an invalid buildername.""" self.assertEquals( self.query_api.get_matching_jobs("try", "146071751b1e", 'Invalid buildername'), [])
class TestBuildApiGetMatchingJobs(unittest.TestCase): def setUp(self): self.query_api = BuildApi() def test_matching_jobs_existing(self): """_matching_jobs should return the whole dictionary for a buildername in alljobs.""" self.assertEquals( self.query_api.get_matching_jobs( "try", "146071751b1e", 'Linux x86-64 try build'), json.loads(JOBS_SCHEDULE)) def test_matching_jobs_invalid(self): """_matching_jobs should return an empty list if it receives an invalid buildername.""" self.assertEquals( self.query_api.get_matching_jobs( "try", "146071751b1e", 'Invalid buildername'), [])
class TestBuildApiGetJobStatus(unittest.TestCase): """Test query_job_status with different types of jobs.""" def setUp(self): self.query_api = BuildApi() def test_pending_job(self): """Test get_job_status with a pending job.""" pending_job = json.loads(BASE_JSON % ('null', 'null', 0, 1433166609))[0] pending_job.pop("status") self.assertEquals(self.query_api.get_job_status(pending_job), PENDING) def test_running_job(self): """Test get_job_status with a running job.""" running_job = json.loads(BASE_JSON % ('null', 'null', 0, 'null'))[0] self.assertEquals(self.query_api.get_job_status(running_job), RUNNING) def test_unknown_job(self): """Test get_job_status with an unknown job.""" unknown_job = json.loads(BASE_JSON % ('null', 'null', 0, 1433166609))[0] self.assertEquals(self.query_api.get_job_status(unknown_job), UNKNOWN) @patch('mozci.query_jobs.BuildApi._is_coalesced', return_value=SUCCESS) def test_successful_job(self, _is_coalesced): """Test get_job_status with a successful job. We will mock _is_coalesced for that.""" successful_job = json.loads(BASE_JSON % (SUCCESS, 1433166610, 1, 1433166609))[0] self.assertEquals(self.query_api.get_job_status(successful_job), SUCCESS) @patch('mozci.query_jobs.BuildApi._is_coalesced', return_value=COALESCED) def test_coalesced_job(self, _is_coalesced): """Test get_job_status with a coalesced job. We will mock _is_coalesced for that.""" coalesced_job = json.loads(BASE_JSON % (SUCCESS, 1433166610, 1, 1433166609))[0] self.assertEquals(self.query_api.get_job_status(coalesced_job), COALESCED) def test_failed_job(self): """Test get_job_status with a failed job.""" failed_job = json.loads(BASE_JSON % (FAILURE, 1433166610, 1, 1433166609))[0] self.assertEquals(self.query_api.get_job_status(failed_job), FAILURE) def test_weird_job(self): """get_job_status should raise an Exception when it encounters an unexpected status.""" weird_job = json.loads(BASE_JSON % (20, 1433166610, 1, 1433166609))[0] with self.assertRaises(Exception): self.query_api.get_job_status(weird_job)
def trigger_missing_jobs_for_revision(self, repo_name, revision, dry_run=False, trigger_build_if_missing=True): """ Trigger missing jobs for a given revision. Jobs containing 'b2g' or 'pgo' in their buildername will not be triggered. """ builders_for_repo = BuildApi().determine_missing_jobs( repo_name=repo_name, revision=revision) buildbot_graph = buildbot_graph_builder(builders_for_repo, revision)[0] self.schedule_graph(repo_name=repo_name, revision=revision, builders_graph=buildbot_graph)
def determine_trigger_objective(revision, buildername, trigger_build_if_missing=True, will_use_buildapi=False): """ Determine if we need to trigger any jobs and which job. Returns: * The name of the builder we need to trigger * Files, if needed, to trigger such builder """ builder_to_trigger = None files = None repo_name = query_repo_name_from_buildername(buildername) build_buildername = determine_upstream_builder(buildername) if VALIDATE and not valid_builder(build_buildername): raise MozciError("Our platforms mapping system has failed.") if build_buildername == buildername: # For a build job we know that we don't need files to # trigger it and it's the build job we want to trigger return build_buildername, None, None # Let's figure out which jobs are associated to such revision query_api = BuildApi() # Let's only look at jobs that match such build_buildername build_jobs = query_api.get_matching_jobs(repo_name, revision, build_buildername) # We need to determine if we need to trigger a build job # or the test job working_job = None running_job = None failed_job = None LOG.debug("List of matching jobs:") for job in build_jobs: try: status = query_api.get_job_status(job) except buildjson.BuildjsonException: LOG.debug("We have hit bug 1159279 and have to work around it. We will " "pretend that we could not reach the files for it.") continue # Sometimes running jobs have status unknown in buildapi if status in (RUNNING, PENDING, UNKNOWN): LOG.debug("We found a running/pending build job. We don't search anymore.") running_job = job # We cannot call _find_files for a running job continue # Having a coalesced build is the same as not having a build available if status == COALESCED: LOG.debug("The build we found was a coalesced one; this is the same as " "non-existant.") continue # Successful or failed jobs may have the files we need files = _find_files(job) if files != [] and _all_urls_reachable(files.values()): working_job = job break else: LOG.debug("We can't determine the files for this build or " "can't reach them.") files = None LOG.info("We found a job that finished but it did not " "produced files. status: %d" % status) failed_job = job # End of for loop if working_job: # We found a build job with the necessary files. It could be a # successful job, a running job that already emitted files or a # testfailed job LOG.debug(str(working_job)) LOG.info("We have the necessary files to trigger the downstream job.") # We have the files needed to trigger the test job builder_to_trigger = buildername elif running_job: LOG.info("We found a running/pending build job. We will not trigger another one.") LOG.info("You have to run the script again after the build job is finished to " "trigger %s." % buildername) builder_to_trigger = None elif failed_job: LOG.info("The build job %s failed on revision %s without generating the " "necessary files. We will not trigger anything." % (build_buildername, revision)) builder_to_trigger = None else: # We were trying to build a test job, however, we determined # that we need an upstream builder instead if not trigger_build_if_missing or not _unique_build_request(build_buildername, revision): # This is a safeguard to prevent triggering a build # job multiple times if it is not intentional builder_to_trigger = None if not trigger_build_if_missing: LOG.info("We would have to triggered build '%s' in order to trigger " "job '%s'. On this mode we will not trigger either." % (build_buildername, buildername)) else: if will_use_buildapi: LOG.info("We will trigger 1) '%s'" % build_buildername) LOG.info("instead of 2) '%s'" % buildername) LOG.info("We need to trigger the build job once (1) " "in order to be able to run the test job (2).") if repo_name == 'try': LOG.info("You'll need to run the script again after (1) is done to " "trigger (2).") else: LOG.info("After (1) is done and if no coalesccing happens the test " "jobs associated with it will be triggered.") builder_to_trigger = build_buildername if files: return builder_to_trigger, files['packageUrl'], files['testsUrl'] else: return builder_to_trigger, None, None
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) validate_options(options) if not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager() else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: filters_in = options.includes.split(',') + [repo_name] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') job_names = filter_buildernames( buildernames=query_builders(repo_name=repo_name), include=filters_in, exclude=filters_out ) if len(job_names) == 0: LOG.info("0 jobs match these filters. please try again.") return if options.existing_only: # We query all succesful jobs for a given revision and filter # them by include/exclude filters. trigger_build_if_missing = False successful_jobs = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=SUCCESS) # We will filter out all the existing job from those successful job we have. job_names = [buildername for buildername in successful_jobs if buildername in job_names] cont = raw_input("The ones which have existing builds out of %i jobs will be triggered,\ do you wish to continue? y/n/d (d=show details) " % len(job_names)) else: cont = raw_input("%i jobs will be triggered, do you wish to continue? \ y/n/d (d=show details) " % len(job_names)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(job_names)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1)
def main(): options = parse_args() validate_options(options) repo_url = query_repo_url(options.repo_name) if not valid_credentials(): sys.exit(-1) if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: options.repo_name = query_repo_name_from_buildername( options.buildernames[0]) if options.rev == 'tip': revision = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo_name, revision) else: revision = query_full_revision_info(repo_url, options.rev) # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status( options.repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=options.repo_name, request_id=request_id, dry_run=options.dry_run) return # Mode #2: Fill-in a revision if options.fill_revision: trigger_missing_jobs_for_revision(repo_name=options.repo_name, revision=revision, dry_run=options.dry_run) return # Mode #3: Trigger jobs based on revision list modifiers for buildername in options.buildernames: revlist = determine_revlist(repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) try: trigger_range( buildername=buildername, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=options.trigger_build_if_missing) except Exception, e: LOG.exception(e) exit(1) if revlist: LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({ 'repo': options.repo_name, 'fromchange': revlist[-1], 'tochange': revlist[0], 'filter-searchStr': buildername }))
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.action == 'trigger-all-talos': trigger_all_talos_jobs(options.repo_name, options.rev, options.times, dry_run=options.dry_run) sys.exit(0) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager(web_auth=True) else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status( repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs or options.trigger_talos_for_build): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: _includes_excludes(options) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) elif options.trigger_talos_for_build: trigger_talos_jobs_for_build( buildername=options.buildernames[0], revision=revision, times=2, dry_run=options.dry_run, ) exit(0) for buildername in job_names: revlist = determine_revlist(repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link(revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing) except Exception as e: LOG.exception(e) exit(1)
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager() else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: filters_in = options.includes.split(',') + [repo_name] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') job_names = filter_buildernames( buildernames=query_builders(repo_name=repo_name), include=filters_in, exclude=filters_out ) if len(job_names) == 0: LOG.info("0 jobs match these filters. please try again.") return if options.existing_only: # We query all successful jobs for a given revision and filter # them by include/exclude filters. trigger_build_if_missing = False successful_jobs = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=SUCCESS) # We will filter out all the existing job from those successful job we have. job_names = [buildername for buildername in successful_jobs if buildername in job_names] cont = raw_input("The ones which have existing builds out of %i jobs will be triggered,\ do you wish to continue? y/n/d (d=show details) " % len(job_names)) else: cont = raw_input("%i jobs will be triggered, do you wish to continue? \ y/n/d (d=show details) " % len(job_names)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(job_names)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1)
def setUp(self): self.query_api = BuildApi()
def main(): options = parse_args() validate_options(options) valid_credentials() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: options.repo_name = query_repo_name_from_buildername(options.buildernames[0]) if options.rev == 'tip': repo_url = query_repo_url(options.repo_name) options.rev = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo_name, options.rev) if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(options.repo_name, options.rev, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=options.repo_name, request_id=request_id, dry_run=options.dry_run) return for buildername in options.buildernames: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=options.rev, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) try: trigger_range( buildername=buildername, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=options.trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1) if revlist: LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({'repo': options.repo_name, 'fromchange': revlist[-1], 'tochange': revlist[0], 'filter-searchStr': buildername}))
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.action == 'trigger-all-talos': trigger_all_talos_jobs(options.repo_name, options.rev, options.times, dry_run=options.dry_run) sys.exit(0) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager(web_auth=True) else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs or options.trigger_talos_for_build): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: _includes_excludes(options) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) elif options.trigger_talos_for_build: trigger_talos_jobs_for_build( buildername=options.buildernames[0], revision=revision, times=2, dry_run=options.dry_run, ) exit(0) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception as e: LOG.exception(e) exit(1)
is_downstream, list_builders, ) from mozci.sources import buildjson, pushlog from mozci.query_jobs import (PENDING, RUNNING, SUCCESS, WARNING, UNKNOWN, COALESCED, FAILURE, EXCEPTION, RETRY, BuildApi, TreeherderApi) from mozci.utils.authentication import get_credentials from mozci.utils.misc import _all_urls_reachable from mozci.utils.transfer import path_to_file, clean_directory LOG = logging.getLogger('mozci') SCHEDULING_MANAGER = {} # Default value of QUERY_SOURCE QUERY_SOURCE = BuildApi() # Set this value to False in your tool to prevent any sort of validation VALIDATE = True def disable_validations(): global VALIDATE if VALIDATE: LOG.debug("Disable validations.") VALIDATE = False def set_query_source(query_source="buildapi"): """ Function to set the global QUERY_SOURCE """ global QUERY_SOURCE
def setUp(self): self.query_api = BuildApi() query_jobs.JOBS_CACHE = {}
def determine_trigger_objective(revision, buildername, trigger_build_if_missing=True): """ Determine if we need to trigger any jobs and which job. Returns: * The name of the builder we need to trigger * Files, if needed, to trigger such builder """ builder_to_trigger = None files = None repo_name = query_repo_name_from_buildername(buildername) build_buildername = determine_upstream_builder(buildername) if VALIDATE and not valid_builder(build_buildername): raise MozciError("Our platforms mapping system has failed.") if build_buildername == buildername: # For a build job we know that we don't need files to # trigger it and it's the build job we want to trigger return build_buildername, None # Let's figure out which jobs are associated to such revision query_api = BuildApi() # Let's only look at jobs that match such build_buildername build_jobs = query_api.get_matching_jobs(repo_name, revision, build_buildername) # We need to determine if we need to trigger a build job # or the test job working_job = None running_job = None failed_job = None LOG.debug("List of matching jobs:") for job in build_jobs: try: status = query_api.get_job_status(job) except buildjson.BuildjsonException: LOG.debug( "We have hit bug 1159279 and have to work around it. We will " "pretend that we could not reach the files for it.") continue # Sometimes running jobs have status unknown in buildapi if status in (RUNNING, PENDING, UNKNOWN): LOG.debug( "We found a running/pending build job. We don't search anymore." ) running_job = job # We cannot call _find_files for a running job continue # Having a coalesced build is the same as not having a build available if status == COALESCED: LOG.debug( "The build we found was a coalesced one; this is the same as " "non-existant.") continue # Successful or failed jobs may have the files we need files = _find_files(job) if files != [] and _all_urls_reachable(files.values()): working_job = job break else: LOG.debug("We can't determine the files for this build or " "can't reach them.") files = None LOG.info("We found a job that finished but it did not " "produced files. status: %d" % status) failed_job = job # End of for loop if working_job: # We found a build job with the necessary files. It could be a # successful job, a running job that already emitted files or a # testfailed job LOG.debug(str(working_job)) LOG.info("We have the necessary files to trigger the downstream job.") # We have the files needed to trigger the test job builder_to_trigger = buildername elif running_job: LOG.info( "We found a running/pending build job. We will not trigger another one." ) LOG.info( "You have to run the script again after the build job is finished to " "trigger %s." % buildername) builder_to_trigger = None elif failed_job: LOG.info( "The build job %s failed on revision %s without generating the " "necessary files. We will not trigger anything." % (build_buildername, revision)) builder_to_trigger = None else: # We were trying to build a test job, however, we determined # that we need an upstream builder instead if not trigger_build_if_missing or not _unique_build_request( build_buildername, revision): # This is a safeguard to prevent triggering a build # job multiple times if it is not intentional builder_to_trigger = None if not trigger_build_if_missing: LOG.info( "We would have to triggered build '%s' in order to trigger " "job '%s'. On this mode we will not trigger either." % (build_buildername, buildername)) else: LOG.info("We will trigger 1) " "'%s' instead of 2) '%s'" % (build_buildername, buildername)) LOG.info("We need to trigger the build job once (1) " "in order to be able to run the test job (2).") if repo_name == 'try': LOG.info( "You'll need to run the script again after (1) is done to " "trigger (2).") else: LOG.info( "After (1) is done and if no coalesccing happens the test " "jobs associated with it will be triggered.") builder_to_trigger = build_buildername if files: return builder_to_trigger, files['packageUrl'], files[ 'testPackagesUrl'] else: return builder_to_trigger, None, None
def setUp(self): self.query_api = BuildApi() buildapi.JOBS_CACHE = {} query_jobs.JOBS_CACHE = {}