class TestTreeherderApiGetAllJobsByStatus(unittest.TestCase): """Test TreeherderApi fetch all jobs(buildernames for now) by job's status.""" def setUp(self): self.query_api = TreeherderApi() self.repo_name = 'repo_mock' self.revision = 'revision_mock' @patch('mozci.query_jobs.TreeherderApi.get_all_jobs', return_value=json.loads(MOCK_JOBS % { 'result': "success", 'state': "completed" })) @patch('mozci.query_jobs.TreeherderApi.get_job_status', return_value=SUCCESS) def test_successful_job(self, get_all_jobs, get_job_status): """Test TreeherderApi find_all_jobs_by_status with a successful job.""" self.assertEqual( self.query_api.find_all_jobs_by_status(self.repo_name, self.revision, SUCCESS), [ "Ubuntu VM 12.04 x64 mozilla-inbound opt test mochitest-1", "[TC] - Linux64 web-platform-tests-e10s-6" ]) @patch('mozci.query_jobs.TreeherderApi.get_all_jobs', return_value=json.loads(MOCK_JOBS % { 'result': "testfailed", 'state': "completed" })) @patch('mozci.query_jobs.TreeherderApi.get_job_status', return_value=FAILURE) def test_failed_job(self, get_all_jobs, get_job_status): """Test TreeherderApi find_all_jobs_by_status with a failed job.""" self.assertEqual( self.query_api.find_all_jobs_by_status(self.repo_name, self.revision, FAILURE), [ "Ubuntu VM 12.04 x64 mozilla-inbound opt test mochitest-1", "[TC] - Linux64 web-platform-tests-e10s-6" ]) @patch('mozci.query_jobs.TreeherderApi.get_all_jobs', return_value=json.loads(MOCK_JOBS % { 'result': "unknown", 'state': "pending" })) @patch('mozci.query_jobs.TreeherderApi.get_job_status', return_value=PENDING) def test_pending_job(self, get_all_jobs, get_job_status): """Test TreeherderApi find_all_jobs_by_status with a pending job.""" self.assertEqual( self.query_api.find_all_jobs_by_status(self.repo_name, self.revision, PENDING), [ "Ubuntu VM 12.04 x64 mozilla-inbound opt test mochitest-1", "[TC] - Linux64 web-platform-tests-e10s-6" ])
def getSuccessfulJobs(revision, buildername): "This function returns the number of data points for an alert." # Query TH client get_jobs method to get all jobs for a particular buildername # Then Query mozci function: https://github.com/armenzg/mozilla_ci_tools/blob/master/mozci/query_jobs.py#L187 # to get the status of job for each job treeherder_api = TreeherderApi() repo_name = query_repo_name_from_buildername(buildername) matching_jobs = treeherder_api.get_matching_jobs(repo_name, revision, buildername) successful_jobs = 0 for job in matching_jobs: status = treeherder_api.get_job_status(job) if status == SUCCESS: successful_jobs += 1 return successful_jobs
def _determine_repo_revision(data, treeherder_server_url): ''' Return repo_name and revision based on Pulse message data.''' query = TreeherderApi(server_url=treeherder_server_url) if 'project' in data: repo_name = data['project'] if 'job_id' in data: revision = query.query_revision_for_job(repo_name=repo_name, job_id=data['job_id']) elif 'resultset_id' in data: revision = query.query_revision_for_resultset( repo_name=repo_name, resultset_id=data['resultset_id']) else: LOG.error( 'We should have been able to determine the repo and revision') sys.exit(1) elif data['_meta']['exchange'] == 'exchange/build/normalized': repo_name = data['payload']['tree'] revision = data['payload']['revision'] return repo_name, revision
class TestTreeherderApiGetJobStatus(unittest.TestCase): """Test query_job_status with different types of jobs""" def setUp(self): self.query_api = TreeherderApi() def test_pending_job(self): """Test TreeherderApi get_job_status with a successful job.""" pending_job = json.loads(TREEHERDER_JOB % ("unknown", "pending")) self.assertEquals(self.query_api.get_job_status(pending_job), PENDING) def test_running_job(self): """Test TreeherderApi get_job_status with a successful job.""" running_job = json.loads(TREEHERDER_JOB % ("unknown", "running")) self.assertEquals(self.query_api.get_job_status(running_job), RUNNING) def test_successful_job(self): """Test TreeherderApi get_job_status with a successful job.""" successful_job = json.loads(TREEHERDER_JOB % ("success", "completed")) self.assertEquals(self.query_api.get_job_status(successful_job), SUCCESS) def test_failed_job(self): """Test TreeherderApi get_job_status with a successful job.""" failed_job = json.loads(TREEHERDER_JOB % ("testfailed", "completed")) self.assertEquals(self.query_api.get_job_status(failed_job), FAILURE) def test_weird_job(self): """get_job_status should raise an Exception when it encounters an unexpected status.""" weird_job = json.loads(TREEHERDER_JOB % ("weird", "null")) with self.assertRaises(TreeherderException): self.query_api.get_job_status(weird_job)
class TestTreeherderApiGetJobStatus(unittest.TestCase): """Test query_job_status with different types of jobs""" def setUp(self): self.query_api = TreeherderApi() def test_pending_job(self): """Test TreeherderApi get_job_status with a successful job.""" pending_job = json.loads(TREEHERDER_JOB % ("unknown", "pending")) self.assertEquals(self.query_api.get_job_status(pending_job), PENDING) def test_running_job(self): """Test TreeherderApi get_job_status with a successful job.""" running_job = json.loads(TREEHERDER_JOB % ("unknown", "running")) self.assertEquals(self.query_api.get_job_status(running_job), RUNNING) def test_successful_job(self): """Test TreeherderApi get_job_status with a successful job.""" successful_job = json.loads(TREEHERDER_JOB % ("success", "completed")) self.assertEquals(self.query_api.get_job_status(successful_job), SUCCESS) def test_failed_job(self): """Test TreeherderApi get_job_status with a successful job.""" failed_job = json.loads(TREEHERDER_JOB % ("testfailed", "completed")) self.assertEquals(self.query_api.get_job_status(failed_job), FAILURE) def test_weird_job(self): """get_job_status should raise an Exception when it encounters an unexpected status.""" weird_job = json.loads(TREEHERDER_JOB % ("weird", "null")) with self.assertRaises(TreeherderError): self.query_api.get_job_status(weird_job)
class TestTreeherderApiGetAllJobsByStatus(unittest.TestCase): """Test TreeherderApi fetch all jobs(buildernames for now) by job's status.""" def setUp(self): self.query_api = TreeherderApi() self.repo_name = 'repo_mock' self.revision = 'revision_mock' @patch('mozci.query_jobs.TreeherderApi.get_all_jobs', return_value=json.loads(MOCK_JOBS % {'result': "success", 'state': "completed"})) @patch('mozci.query_jobs.TreeherderApi.get_job_status', return_value=SUCCESS) def test_successful_job(self, get_all_jobs, get_job_status): """Test TreeherderApi find_all_jobs_by_status with a successful job.""" self.assertEqual(self.query_api.find_all_jobs_by_status( self.repo_name, self.revision, SUCCESS), ["Ubuntu VM 12.04 x64 mozilla-inbound opt test mochitest-1", "[TC] - Linux64 web-platform-tests-e10s-6"]) @patch('mozci.query_jobs.TreeherderApi.get_all_jobs', return_value=json.loads(MOCK_JOBS % {'result': "testfailed", 'state': "completed"})) @patch('mozci.query_jobs.TreeherderApi.get_job_status', return_value=FAILURE) def test_failed_job(self, get_all_jobs, get_job_status): """Test TreeherderApi find_all_jobs_by_status with a failed job.""" self.assertEqual(self.query_api.find_all_jobs_by_status( self.repo_name, self.revision, FAILURE), ["Ubuntu VM 12.04 x64 mozilla-inbound opt test mochitest-1", "[TC] - Linux64 web-platform-tests-e10s-6"]) @patch('mozci.query_jobs.TreeherderApi.get_all_jobs', return_value=json.loads(MOCK_JOBS % {'result': "unknown", 'state': "pending"})) @patch('mozci.query_jobs.TreeherderApi.get_job_status', return_value=PENDING) def test_pending_job(self, get_all_jobs, get_job_status): """Test TreeherderApi find_all_jobs_by_status with a pending job.""" self.assertEqual(self.query_api.find_all_jobs_by_status( self.repo_name, self.revision, PENDING), ["Ubuntu VM 12.04 x64 mozilla-inbound opt test mochitest-1", "[TC] - Linux64 web-platform-tests-e10s-6"])
def _includes_excludes(options, repo_name): filters_in = options.includes.split(',') + [repo_name] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') job_names = filter_buildernames( buildernames=query_builders(repo_name=repo_name), include=filters_in, exclude=filters_out) if len(job_names) == 0: LOG.info("0 jobs match these filters. please try again.") return if options.existing_only: # We query all successful jobs for a given revision and filter # them by include/exclude filters. trigger_build_if_missing = False successful_jobs = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=SUCCESS) # We will filter out all the existing job from those successful job we have. job_names = [ buildername for buildername in successful_jobs if buildername in job_names ] cont = raw_input( "The ones which have existing builds out of %i jobs will be triggered,\ do you wish to continue? y/n/d (d=show details) " % len(job_names)) else: cont = raw_input("%i jobs will be triggered, do you wish to continue? \ y/n/d (d=show details) " % len(job_names)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(job_names)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1)
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager() else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: filters_in = options.includes.split(',') + [repo_name] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') job_names = filter_buildernames( buildernames=query_builders(repo_name=repo_name), include=filters_in, exclude=filters_out ) if len(job_names) == 0: LOG.info("0 jobs match these filters. please try again.") return if options.existing_only: # We query all successful jobs for a given revision and filter # them by include/exclude filters. trigger_build_if_missing = False successful_jobs = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=SUCCESS) # We will filter out all the existing job from those successful job we have. job_names = [buildername for buildername in successful_jobs if buildername in job_names] cont = raw_input("The ones which have existing builds out of %i jobs will be triggered,\ do you wish to continue? y/n/d (d=show details) " % len(job_names)) else: cont = raw_input("%i jobs will be triggered, do you wish to continue? \ y/n/d (d=show details) " % len(job_names)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(job_names)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1)
def setUp(self): self.query_api = TreeherderApi()
def setUp(self): self.query_api = TreeherderApi() self.repo_name = 'repo_mock' self.revision = 'revision_mock'
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.action == 'trigger-all-talos': trigger_all_talos_jobs(options.repo_name, options.rev, options.times, dry_run=options.dry_run) sys.exit(0) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager(web_auth=True) else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status( repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs or options.trigger_talos_for_build): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: _includes_excludes(options) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) elif options.trigger_talos_for_build: trigger_talos_jobs_for_build( buildername=options.buildernames[0], revision=revision, times=2, dry_run=options.dry_run, ) exit(0) for buildername in job_names: revlist = determine_revlist(repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link(revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing) except Exception as e: LOG.exception(e) exit(1)
def determine_trigger_objective(revision, buildername, trigger_build_if_missing=True, will_use_buildapi=False): """ Determine if we need to trigger any jobs and which job. Returns: * The name of the builder we need to trigger * Files, if needed, to trigger such builder """ builder_to_trigger = None files = None repo_name = query_repo_name_from_buildername(buildername) build_buildername = determine_upstream_builder(buildername) if VALIDATE and not valid_builder(build_buildername): raise MozciError("Our platforms mapping system has failed.") if build_buildername == buildername: # For a build job we know that we don't need files to # trigger it and it's the build job we want to trigger return build_buildername, None, None # Let's figure out which jobs are associated to such revision query_api = TreeherderApi() # Let's only look at jobs that match such build_buildername build_jobs = query_api.get_matching_jobs(repo_name, revision, build_buildername) # We need to determine if we need to trigger a build job # or the test job working_job = None running_job = None failed_job = None LOG.debug("List of matching jobs:") for job in build_jobs: try: status = query_api.get_job_status(job) except buildjson.BuildjsonException: LOG.debug("We have hit bug 1159279 and have to work around it. We will " "pretend that we could not reach the files for it.") continue # Sometimes running jobs have status unknown in buildapi if status in (RUNNING, PENDING, UNKNOWN): LOG.debug("We found a running/pending build job. We don't search anymore.") running_job = job # We cannot call _find_files for a running job continue # Having a coalesced build is the same as not having a build available if status == COALESCED: LOG.debug("The build we found was a coalesced one; this is the same as " "non-existant.") continue # Successful or failed jobs may have the files we need taskId = tc.get_taskid(revision=revision, repo_name=repo_name, platform=job['build_platform']) files = _find_files(taskId) if files != [] and _all_urls_reachable(files.values()): working_job = job break else: LOG.debug("We can't determine the files for this build or " "can't reach them.") files = None LOG.info("We found a job that finished but it did not " "produced files. status: %d" % status) failed_job = job # End of for loop if working_job: # We found a build job with the necessary files. It could be a # successful job, a running job that already emitted files or a # testfailed job LOG.debug(str(working_job)) LOG.info("We have the necessary files to trigger the downstream job.") # We have the files needed to trigger the test job builder_to_trigger = buildername elif running_job: LOG.info("We found a running/pending build job. We will not trigger another one.") LOG.info("You have to run the script again after the build job is finished to " "trigger %s." % buildername) builder_to_trigger = None elif failed_job: LOG.info("The build job %s failed on revision %s without generating the " "necessary files. We will not trigger anything." % (build_buildername, revision)) builder_to_trigger = None else: # We were trying to build a test job, however, we determined # that we need an upstream builder instead if not trigger_build_if_missing or not _unique_build_request(build_buildername, revision): # This is a safeguard to prevent triggering a build # job multiple times if it is not intentional builder_to_trigger = None if not trigger_build_if_missing: LOG.info("We would have to triggered build '%s' in order to trigger " "job '%s'. On this mode we will not trigger either." % (build_buildername, buildername)) else: if will_use_buildapi: LOG.info("We will trigger 1) '%s'" % build_buildername) LOG.info("instead of 2) '%s'" % buildername) LOG.info("We need to trigger the build job once (1) " "in order to be able to run the test job (2).") if repo_name == 'try': LOG.info("You'll need to run the script again after (1) is done to " "trigger (2).") else: LOG.info("After (1) is done and if no coalesccing happens the test " "jobs associated with it will be triggered.") builder_to_trigger = build_buildername if files: return builder_to_trigger, files['packageUrl'], files['testsUrl'] else: return builder_to_trigger, None, None