def testGetLastAttemptedTryJobDetails(self): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' revision = 'r1' try_job_id = '12345' try_job_url = 'url' status = analysis_status.RUNNING analysis = MasterFlakeAnalysis.Create( master_name, builder_name, build_number, step_name, test_name) analysis.last_attempted_revision = revision analysis.put() try_job = FlakeTryJob.Create( master_name, builder_name, step_name, test_name, revision) try_job.try_job_ids = [try_job_id] try_job.status = status try_job.put() try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.try_job_key = try_job.key try_job_data.try_job_url = try_job_url try_job_data.put() self.assertEqual( { 'url': try_job_url, 'status': analysis_status.STATUS_TO_DESCRIPTION.get(status) }, check_flake._GetLastAttemptedTryJobDetails(analysis))
def testRecursiveFlakeTryJobPipelineAbortedNoUpdateCompletedTryJob( self, _): master_name = 'm' builder_name = 'b' master_build_number = 100 step_name = 's' test_name = 't' revision = 'rev' commit_position = 1 lower_boundary_commit_position = 0 analysis = MasterFlakeAnalysis.Create(master_name, builder_name, master_build_number, step_name, test_name) analysis.status = analysis_status.COMPLETED analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.status = analysis_status.COMPLETED try_job.put() rftp = RecursiveFlakeTryJobPipeline(analysis.key.urlsafe(), commit_position, revision, lower_boundary_commit_position, _DEFAULT_CACHE_NAME, None) rftp._LogUnexpectedAbort() self.assertEqual(analysis_status.COMPLETED, try_job.status)
def testNextCommitPositionNewlyAddedFlakyTest(self, mocked_fn): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' git_hash = 'r100' try_job_id = '123' revision = 'r100' commit_position = 100 url = 'url' change_log = ChangeLog(None, None, revision, commit_position, None, None, url, None) mocked_fn.return_value = change_log try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.try_job_ids.append(try_job_id) try_job.put() try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.try_job_key = try_job.key try_job_data.put() analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.status = analysis_status.COMPLETED analysis.try_job_status = analysis_status.RUNNING analysis.data_points = [ _GenerateDataPoint(pass_rate=0.9, commit_position=commit_position, build_number=12345, previous_build_commit_position=98, blame_list=['r99', 'r100']), _GenerateDataPoint(pass_rate=-1, commit_position=99, try_job_url='id1') ] analysis.suspected_flake_build_number = 12345 analysis.algorithm_parameters = DEFAULT_CONFIG_DATA[ 'check_flake_settings'] analysis.Save() self.MockPipeline( recursive_flake_try_job_pipeline.RecursiveFlakeTryJobPipeline, '', expected_args=[]) pipeline = NextCommitPositionPipeline(analysis.key.urlsafe(), try_job.key.urlsafe(), 98, _DEFAULT_CACHE_NAME, None) pipeline.start(queue_name=constants.DEFAULT_QUEUE) self.execute_queued_tasks() culprit = analysis.culprit self.assertEqual(git_hash, culprit.revision) self.assertEqual(100, culprit.commit_position)
def testNextCommitPositionPipeline(self): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' revision = 'r99' try_job_id = '123' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.try_job_ids.append(try_job_id) try_job.put() try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.try_job_key = try_job.key try_job_data.put() analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.status = analysis_status.COMPLETED analysis.try_job_status = analysis_status.RUNNING analysis.data_points = [ _GenerateDataPoint(pass_rate=0.9, commit_position=100, build_number=12345, previous_build_commit_position=90, blame_list=[ 'r91', 'r92', 'r93', 'r94', 'r95', 'r96', 'r97', 'r98', 'r99', 'r100' ]), _GenerateDataPoint(pass_rate=0.9, commit_position=99, try_job_url='u') ] analysis.suspected_flake_build_number = 12345 analysis.algorithm_parameters = DEFAULT_CONFIG_DATA[ 'check_flake_settings'] analysis.Save() self.MockPipeline( recursive_flake_try_job_pipeline.RecursiveFlakeTryJobPipeline, '', expected_args=[ analysis.key.urlsafe(), 97, 'r97', 90, _DEFAULT_CACHE_NAME, None ], expected_kwargs={}) pipeline = NextCommitPositionPipeline(analysis.key.urlsafe(), try_job.key.urlsafe(), 90, _DEFAULT_CACHE_NAME, None) pipeline.start(queue_name=constants.DEFAULT_QUEUE) self.execute_queued_tasks()
def testProperties(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3' try_job = FlakeTryJob.Create( master_name, builder_name, step_name, test_name, git_hash) self.assertEqual(master_name, try_job.master_name) self.assertEqual(builder_name, try_job.builder_name) self.assertEqual(step_name, try_job.step_name) self.assertEqual(test_name, try_job.test_name) self.assertEqual(git_hash, try_job.git_hash)
def testProcessFlakeTryJobResultPipeline(self): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' revision = 'r4' commit_position = 4 try_job_id = 'try_job_id' url = 'url' try_job_result = { 'report': { 'result': { revision: { step_name: { 'status': 'failed', 'failures': [test_name], 'valid': True, 'pass_fail_counts': { test_name: { 'pass_count': 20, 'fail_count': 80 } } } } } } } analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.flake_results = [{ 'url': url, 'report': try_job_result, 'try_job_id': try_job_id }] try_job.try_job_ids = [try_job_id] try_job.put() ProcessFlakeTryJobResultPipeline().run(revision, commit_position, try_job_result, try_job.key.urlsafe(), analysis.key.urlsafe()) resulting_data_point = analysis.data_points[-1] self.assertEqual(0.2, resulting_data_point.pass_rate) self.assertEqual(commit_position, resulting_data_point.commit_position) self.assertEqual(url, resulting_data_point.try_job_url)
def testGetLastAttemptedTryJobDetailsNoTryJobID(self): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' revision = 'r1' analysis = MasterFlakeAnalysis.Create( master_name, builder_name, build_number, step_name, test_name) analysis.last_attempted_revision = revision try_job = FlakeTryJob.Create( master_name, builder_name, step_name, test_name, revision) try_job.put() self.assertEqual({}, check_flake._GetLastAttemptedTryJobDetails(analysis))
def testGet(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3' try_job_id = 'try_job_id' try_job_before = FlakeTryJob.Create( master_name, builder_name, step_name, test_name, git_hash) try_job_before.try_job_ids = [try_job_id] try_job_before.put() try_job_after = FlakeTryJob.Get( master_name, builder_name, step_name, test_name, git_hash) self.assertEqual([try_job_id], try_job_after.try_job_ids)
def testScheduleFlakeTryJob(self, mock_module): master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' git_hash = 'a1b2c3d4' build_id = '1' url = 'url' analysis_key = ndb.Key('key', 1) build = WfBuild.Create(master_name, builder_name, build_number) build.data = { 'properties': { 'parent_mastername': 'pm', 'parent_buildername': 'pb' } } build.put() response = { 'build': { 'id': build_id, 'url': url, 'status': 'SCHEDULED', } } results = [(None, buildbucket_client.BuildbucketBuild(response['build']))] mock_module.TriggerTryJobs.return_value = results FlakeTryJob.Create(master_name, builder_name, step_name, test_name, git_hash).put() try_job_pipeline = ScheduleFlakeTryJobPipeline() try_job_id = try_job_pipeline.run(master_name, builder_name, step_name, test_name, git_hash, analysis_key.urlsafe(), None, None) try_job = FlakeTryJob.Get(master_name, builder_name, step_name, test_name, git_hash) try_job_data = FlakeTryJobData.Get(build_id) self.assertEqual(build_id, try_job_id) self.assertEqual(build_id, try_job.flake_results[-1]['try_job_id']) self.assertTrue(build_id in try_job.try_job_ids) self.assertEqual(try_job_data.try_job_key, try_job.key) self.assertEqual(analysis_key, try_job_data.analysis_key)
def testCreateTryJobData(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3d4' build_id = 'build_id' analysis_key = ndb.Key('key', 1) try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, git_hash) ScheduleFlakeTryJobPipeline()._CreateTryJobData( build_id, try_job.key, analysis_key.urlsafe()) try_job_data = FlakeTryJobData.Get(build_id) self.assertEqual(try_job_data.try_job_key, try_job.key)
def testUpdateFlakeTryJobResult(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3d4' try_job_id = '2' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, git_hash) try_job.put() pipeline = MonitorTryJobPipeline() pipeline._UpdateTryJobResult( try_job.key.urlsafe(), failure_type.FLAKY_TEST, try_job_id, 'url', buildbucket_client.BuildbucketBuild.STARTED) try_job = FlakeTryJob.Get(master_name, builder_name, step_name, test_name, git_hash) self.assertEqual(analysis_status.RUNNING, try_job.status)
def testProperties(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3d4' try_job_id = 'try_job_id' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, git_hash) try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.try_job_key = try_job.key self.assertEqual(master_name, try_job_data.master_name) self.assertEqual(builder_name, try_job_data.builder_name) self.assertEqual(step_name, try_job_data.step_name) self.assertEqual(test_name, try_job_data.test_name) self.assertEqual(git_hash, try_job_data.git_hash)
def testNextCommitPositionPipelineForFailedTryJob(self, mocked_pipeline): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' revision = 'r97' lower_boundary_commit_position = 96 try_job_id = '123' error = { 'code': 1, 'message': 'some failure message', } try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.try_job_ids.append(try_job_id) try_job.put() try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.error = error try_job_data.try_job_key = try_job.key try_job_data.put() analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.put() self.MockPipeline( recursive_flake_try_job_pipeline.UpdateFlakeBugPipeline, '', expected_args=[analysis.key.urlsafe()], expected_kwargs={}) pipeline = NextCommitPositionPipeline(analysis.key.urlsafe(), try_job.key.urlsafe(), lower_boundary_commit_position, _DEFAULT_CACHE_NAME, None) pipeline.start(queue_name=constants.DEFAULT_QUEUE) self.execute_queued_tasks() mocked_pipeline.assert_not_called() self.assertEqual(error, analysis.error)
def testRecursiveFlakeTryJobPipelineAborted(self, _): master_name = 'm' builder_name = 'b' master_build_number = 100 step_name = 's' test_name = 't' revision = 'rev' commit_position = 1 build_id = 'b1' lower_boundary_commit_position = 0 analysis = MasterFlakeAnalysis.Create(master_name, builder_name, master_build_number, step_name, test_name) analysis.status = analysis_status.COMPLETED analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.try_job_ids = [build_id] try_job.put() try_job_data = FlakeTryJobData.Create(build_id) try_job_data.try_job_key = try_job.key try_job_data.put() rftp = RecursiveFlakeTryJobPipeline(analysis.key.urlsafe(), commit_position, revision, lower_boundary_commit_position, _DEFAULT_CACHE_NAME, None) rftp._LogUnexpectedAbort() expected_error = { 'error': 'RecursiveFlakeTryJobPipeline was aborted unexpectedly', 'message': 'RecursiveFlakeTryJobPipeline was aborted unexpectedly' } self.assertEqual(analysis_status.ERROR, analysis.try_job_status) self.assertEqual(expected_error, analysis.error) self.assertEqual(analysis_status.ERROR, try_job.status) self.assertEqual(expected_error, try_job_data.error)
def testGetFlakeTryJobs(self): flake_try_job_completed = FlakeTryJobData.Create(4) flake_try_job_completed.try_job_key = FlakeTryJob.Create( 'm', 'b', 's', 't', 'a1b2c3d4').key flake_try_job_completed.start_time = datetime(2016, 5, 4, 0, 0, 1) flake_try_job_completed.request_time = datetime(2016, 5, 4, 0, 0, 0) flake_try_job_completed.created_time = datetime(2016, 5, 4, 0, 0, 0) flake_try_job_completed.end_time = datetime(2016, 5, 4, 0, 0, 2) flake_try_job_completed.try_job_url = 'url4' flake_try_job_completed.analysis_key = ndb.Key('key', 1) flake_try_job_completed.last_buildbucket_response = { 'status': 'COMPLETED' } flake_try_job_completed.put() expected_flake_try_job_completed_display_data = { 'master_name': 'm', 'builder_name': 'b', 'try_job_type': 'flake', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url4', 'last_buildbucket_response': '{"status": "COMPLETED"}', 'git_hash': 'a1b2c3d4' } response = self.test_app.get( '/try-job-dashboard?format=json&start_date=2016-05-03&category=flake' ) response_data = response.json_body successfully_completed_try_jobs = response_data.get( 'successfully_completed_try_jobs') self.assertEqual(response.status_int, 200) self.validateTryJobDisplayData( [expected_flake_try_job_completed_display_data], successfully_completed_try_jobs)
def testGet(self): try_job_in_progress = WfTryJobData.Create(1) try_job_in_progress.try_job_key = WfTryJob.Create('m', 'b', 1).key try_job_in_progress.try_job_type = 'compile' try_job_in_progress.start_time = datetime(2016, 5, 4, 0, 0, 1) try_job_in_progress.request_time = datetime(2016, 5, 4, 0, 0, 0) try_job_in_progress.created_time = datetime(2016, 5, 4, 0, 0, 0) try_job_in_progress.try_job_url = 'url1' try_job_in_progress.last_buildbucket_response = {'status': 'STARTED'} try_job_in_progress.put() try_job_with_error = WfTryJobData.Create(2) try_job_with_error.try_job_key = WfTryJob.Create('m', 'b', 2).key try_job_with_error.try_job_type = 'compile' try_job_with_error.start_time = datetime(2016, 5, 4, 0, 0, 1) try_job_with_error.request_time = datetime(2016, 5, 4, 0, 0, 0) try_job_with_error.created_time = datetime(2016, 5, 4, 0, 0, 0) try_job_with_error.end_time = datetime(2016, 5, 4, 0, 0, 2) try_job_with_error.try_job_url = 'url2' try_job_with_error.error = { 'message': 'some error', 'reason': 'some reason' } try_job_with_error.last_buildbucket_response = { 'failure_reason': 'INFRA_FAILURE' } try_job_with_error.put() try_job_completed = WfTryJobData.Create(3) try_job_completed.try_job_key = WfTryJob.Create('m', 'b', 3).key try_job_completed.try_job_type = 'compile' try_job_completed.start_time = datetime(2016, 5, 4, 0, 0, 1) try_job_completed.request_time = datetime(2016, 5, 4, 0, 0, 0) try_job_completed.created_time = datetime(2016, 5, 4, 0, 0, 0) try_job_completed.end_time = datetime(2016, 5, 4, 0, 0, 2) try_job_completed.try_job_url = 'url3' try_job_completed.culprits = {'compile': {'12345': 'failed'}} try_job_completed.last_buildbucket_response = {'status': 'COMPLETED'} try_job_completed.put() flake_try_job_completed = FlakeTryJobData.Create(4) flake_try_job_completed.try_job_key = FlakeTryJob.Create( 'm', 'b', 's', 't', 'a1b2c3d4').key flake_try_job_completed.start_time = datetime(2016, 5, 4, 0, 0, 1) flake_try_job_completed.request_time = datetime(2016, 5, 4, 0, 0, 0) flake_try_job_completed.created_time = datetime(2016, 5, 4, 0, 0, 0) flake_try_job_completed.end_time = datetime(2016, 5, 4, 0, 0, 2) flake_try_job_completed.try_job_url = 'url4' flake_try_job_completed.analysis_key = ndb.Key('key', 1) flake_try_job_completed.last_buildbucket_response = { 'status': 'COMPLETED' } flake_try_job_completed.put() expected_try_job_in_progress_display_data = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 1, 'try_job_type': 'compile', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url1', 'status': 'running', 'last_buildbucket_response': '{"status": "STARTED"}' } expected_try_job_with_error_display_data = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 2, 'try_job_type': 'compile', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url2', 'error': 'some error', 'last_buildbucket_response': '{"failure_reason": "INFRA_FAILURE"}' } expected_try_job_completed_display_data = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 3, 'try_job_type': 'compile', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url3', 'culprit_found': True, 'last_buildbucket_response': '{"status": "COMPLETED"}' } expected_flake_try_job_completed_display_data = { 'master_name': 'm', 'builder_name': 'b', 'try_job_type': 'flake', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url4', 'last_buildbucket_response': '{"status": "COMPLETED"}', 'git_hash': 'a1b2c3d4' } response = self.test_app.get( '/try-job-dashboard?format=json&start_date=2016-05-03') response_data = response.json_body try_jobs_in_progress = response_data.get('try_jobs_in_progress') try_jobs_with_error = response_data.get('try_jobs_with_error') successfully_completed_try_jobs = response_data.get( 'successfully_completed_try_jobs') self.assertEqual(response.status_int, 200) self.validateTryJobDisplayData( [expected_try_job_in_progress_display_data], try_jobs_in_progress) self.validateTryJobDisplayData( [expected_try_job_with_error_display_data], try_jobs_with_error) self.validateTryJobDisplayData([ expected_try_job_completed_display_data, expected_flake_try_job_completed_display_data ], successfully_completed_try_jobs)
def testCreate(self): try_job = FlakeTryJob.Create('m', 'b', 's', 't', 'a1b2c3') self.assertEqual([], try_job.try_job_ids) self.assertEqual([], try_job.flake_results)
def _CreateTryJobEntity(master_name, builder_name, step_name, test_name, revision): try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() return try_job
def testGetTryJobsForFlakeSuccess(self, mock_buildbucket, mock_report): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3d4' try_job_id = '1' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, git_hash) try_job.flake_results = [{ 'report': None, 'url': 'https://build.chromium.org/p/m/builders/b/builds/1234', 'try_job_id': '1', }] try_job.status = analysis_status.RUNNING try_job.put() try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.try_job_key = try_job.key try_job_data.try_job_url = ( 'https://build.chromium.org/p/m/builders/b/builds/1234') try_job_data.put() build_response = { 'id': '1', 'url': 'https://build.chromium.org/p/m/builders/b/builds/1234', 'status': 'COMPLETED', } report = { 'result': { 'r0': { 'gl_tests': { 'status': 'passed', 'valid': True, 'pass_fail_counts': { 'Test.One': { 'pass_count': 100, 'fail_count': 0 } } } } } } mock_buildbucket.GetTryJobs.return_value = [ (None, buildbucket_client.BuildbucketBuild(build_response)) ] mock_report.return_value = json.dumps(report) pipeline = MonitorTryJobPipeline() pipeline.start_test() pipeline.run(try_job.key.urlsafe(), failure_type.FLAKY_TEST, try_job_id) pipeline.callback(callback_params=pipeline.last_params) # Reload from ID to get all internal properties in sync. pipeline = MonitorTryJobPipeline.from_id(pipeline.pipeline_id) pipeline.finalized() flake_result = pipeline.outputs.default.value expected_flake_result = { 'report': { 'result': { 'r0': { 'gl_tests': { 'status': 'passed', 'valid': True, 'pass_fail_counts': { 'Test.One': { 'pass_count': 100, 'fail_count': 0 } } } } } }, 'url': 'https://build.chromium.org/p/m/builders/b/builds/1234', 'try_job_id': '1', } self.assertEqual(expected_flake_result, flake_result) try_job = FlakeTryJob.Get(master_name, builder_name, step_name, test_name, git_hash) self.assertEqual(expected_flake_result, try_job.flake_results[-1]) self.assertEqual(analysis_status.RUNNING, try_job.status) try_job_data = FlakeTryJobData.Get(try_job_id) self.assertEqual(try_job_data.last_buildbucket_response, build_response)
def testRecursiveFlakeTryJobPipeline(self): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' commit_position = 1000 revision = 'r1000' try_job_id = 'try_job_id' lower_boundary_commit_position = 998 analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.status = analysis_status.COMPLETED analysis.algorithm_parameters = DEFAULT_CONFIG_DATA[ 'check_flake_settings'] analysis.Save() iterations_to_rerun = analysis.algorithm_parameters.get( 'try_job_rerun', {}).get('iterations_to_rerun') try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job_result = { revision: { step_name: { 'status': 'failed', 'failures': [test_name], 'valid': True, 'pass_fail_counts': { 'test_name': { 'pass_count': 28, 'fail_count': 72 } } } } } self.MockPipeline( recursive_flake_try_job_pipeline.ScheduleFlakeTryJobPipeline, try_job_id, expected_args=[ master_name, builder_name, step_name, test_name, revision, analysis.key.urlsafe(), _DEFAULT_CACHE_NAME, None, iterations_to_rerun ]) self.MockPipeline( recursive_flake_try_job_pipeline.MonitorTryJobPipeline, try_job_result, expected_args=[ try_job.key.urlsafe(), failure_type.FLAKY_TEST, try_job_id ]) self.MockPipeline( recursive_flake_try_job_pipeline.ProcessFlakeTryJobResultPipeline, None, expected_args=[ revision, commit_position, try_job_result, try_job.key.urlsafe(), analysis.key.urlsafe() ]) self.MockPipeline( recursive_flake_try_job_pipeline.NextCommitPositionPipeline, '', expected_args=[analysis.key.urlsafe(), try_job.key.urlsafe()]) pipeline = RecursiveFlakeTryJobPipeline( analysis.key.urlsafe(), commit_position, revision, lower_boundary_commit_position, _DEFAULT_CACHE_NAME, None) pipeline.start(queue_name=constants.DEFAULT_QUEUE) self.execute_queued_tasks() self.assertIsNotNone( FlakeTryJob.Get(master_name, builder_name, step_name, test_name, revision)) self.assertEqual(analysis.last_attempted_revision, revision) self.assertIsNone(analysis.last_attempted_swarming_task_id)