def GetTryJob(master_name, builder_name, step_name, test_name, revision): """Ensures a FlakeTryJob exists for the configuration and returns it.""" # TODO(crbug.com/796431): Replace FlakeTryJob with a new try job entity # independent of test_name. try_job = FlakeTryJob.Get(master_name, builder_name, step_name, test_name, revision) if not try_job: try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() return try_job
def testRunImplTriggerSameJobTwice(self, mocked_schedule, _): master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' revision = 'r1000' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name='target', urlsafe_try_job_key=try_job.key.urlsafe()) try_job_pipeline = RunFlakeTryJobPipeline(pipeline_input) try_job_pipeline.RunImpl(pipeline_input) self.assertFalse(mocked_schedule.called)
def testRunImplRetryUponFailure(self, mocked_schedule, mocked_save, mocked_pipeline_id, _): master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' isolate_target_name = 'target' revision = 'r1000' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name=isolate_target_name, urlsafe_try_job_key=try_job.key.urlsafe()) mocked_pipeline_id.__get__ = mock.Mock(return_value='pipeline-id') pipeline_job = RunFlakeTryJobPipeline(pipeline_input) with self.assertRaises(pipeline.Retry): pipeline_job.RunImpl(pipeline_input) mocked_schedule.assert_called_once_with(pipeline_input, 'pipeline-id') self.assertFalse(mocked_save.called)
def testScheduleFlakeTryJobRaise(self, *_): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' revision = 'r1000' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() parameters = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), revision=revision, flake_cache_name=None, isolate_target_name='target', dimensions=ListOfBasestring()) with self.assertRaises(exceptions.RetryException): flake_try_job.ScheduleFlakeTryJob(parameters, 'pipeline')
def testCallbackImplFailedRun(self, mocked_state_changed): master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' revision = 'r1000' isolate_target_name = 'target' try_job_id = 'try_job_id' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name=isolate_target_name, urlsafe_try_job_key=try_job.key.urlsafe()) pipeline_job = RunFlakeTryJobPipeline(pipeline_input) returned_value = pipeline_job.CallbackImpl(pipeline_input, { 'try_job_id': try_job_id, 'build_json': '{"k":"v"}' }) self.assertEqual(('Error updating try job result: m', None), returned_value) mocked_state_changed.assert_called_once_with(try_job_id, {'k': 'v'})
def testGet(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3' try_job_id = 'try_job_id' try_job_before = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, git_hash) try_job_before.try_job_ids = [try_job_id] try_job_before.put() try_job_after = FlakeTryJob.Get(master_name, builder_name, step_name, test_name, git_hash) self.assertEqual([try_job_id], try_job_after.try_job_ids)
def testCreateTryJobData(self): try_job = FlakeTryJob.Create('m', 'b', 's1', 't1', 'hash') try_job.put() analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's1', 't1') analysis.put() build_id = 'build_id' pipeline_id = 'pipeline_id' flake_try_job.CreateTryJobData(build_id, try_job.key, analysis.key.urlsafe(), pipeline_id) try_job_data = FlakeTryJobData.Get(build_id) self.assertIsNotNone(try_job_data)
def testScheduleFlakeTryJobSuccess(self, *_): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' revision = 'r1000' expected_try_job_id = 'id' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() parameters = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), revision=revision, flake_cache_name=None, dimensions=ListOfBasestring(), isolate_target_name='target', urlsafe_try_job_key=try_job.key.urlsafe()) try_job_id = flake_try_job.ScheduleFlakeTryJob(parameters, 'pipeline') try_job = FlakeTryJob.Get(master_name, builder_name, step_name, test_name, revision) try_job_data = FlakeTryJobData.Get(expected_try_job_id) expected_try_job_id = 'id' self.assertEqual(expected_try_job_id, try_job_id) self.assertEqual(expected_try_job_id, try_job.flake_results[-1]['try_job_id']) self.assertTrue(expected_try_job_id in try_job.try_job_ids) self.assertIsNotNone(try_job_data) self.assertEqual(try_job_data.master_name, master_name) self.assertEqual(try_job_data.builder_name, builder_name)
def testProperties(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, git_hash) self.assertEqual(master_name, try_job.master_name) self.assertEqual(builder_name, try_job.builder_name) self.assertEqual(step_name, try_job.step_name) self.assertEqual(test_name, try_job.test_name) self.assertEqual(git_hash, try_job.git_hash)
def testGetTryJobExistingTryJob(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' revision = 'r1000' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() retrieved_try_job = flake_try_job.GetTryJob(master_name, builder_name, step_name, test_name, revision) self.assertEqual(retrieved_try_job, try_job)
def testProperties(self): master_name = 'm' builder_name = 'b' step_name = 's' test_name = 't' git_hash = 'a1b2c3d4' try_job_id = 'try_job_id' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, git_hash) try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.try_job_key = try_job.key self.assertEqual(master_name, try_job_data.master_name) self.assertEqual(builder_name, try_job_data.builder_name) self.assertEqual(step_name, try_job_data.step_name) self.assertEqual(test_name, try_job_data.test_name) self.assertEqual(git_hash, try_job_data.git_hash)
def testUpdateDataPointsWithExistingDataPoint(self): commit_position = 1000 revision = 'r1000' existing_data_points = [ DataPoint.Create(commit_position=commit_position) ] analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = existing_data_points analysis.Save() try_job = FlakeTryJob.Create('m', 'b', 's', 't', revision) try_job.put() flake_try_job.UpdateAnalysisDataPointsWithTryJobResult( analysis, try_job, commit_position, revision) self.assertEqual(existing_data_points, analysis.data_points)
def _GetLastAttemptedTryJobDetails(analysis): last_attempted_revision = analysis.last_attempted_revision if not last_attempted_revision: return {} try_job = FlakeTryJob.Get(analysis.master_name, analysis.builder_name, analysis.step_name, analysis.test_name, last_attempted_revision) if not try_job or not try_job.try_job_ids: return {} try_job_id = try_job.try_job_ids[-1] try_job_data = FlakeTryJobData.Get(try_job_id) if not try_job_data: return {} return { 'status': analysis_status.STATUS_TO_DESCRIPTION.get(try_job.status), 'url': try_job_data.try_job_url }
def testOnTimeout(self, mocked_OnTryJobTimeout): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() revision = 'r1000' try_job_url = 'url' try_job_id = 'try_job_id' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.try_job_key = try_job.key try_job_data.try_job_url = try_job_url try_job_data.put() try_job.flake_results = [{ 'report': None, 'url': try_job_url, 'try_job_id': try_job_id, }] try_job.status = analysis_status.RUNNING try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name='target', urlsafe_try_job_key=try_job.key.urlsafe()) p = RunFlakeTryJobPipeline(pipeline_input) p.OnTimeout(pipeline_input, {'try_job_id': try_job_id}) mocked_OnTryJobTimeout.assert_called_once_with(try_job_id, failure_type.FLAKY_TEST)
def testGetTryJobsForFlakeSuccess(self, mocked_schedule, mocked_save, mocked_pipeline_id, *_): master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' revision = 'r1000' isolate_target_name = 'target' try_job_id = 'try_job_id' mocked_pipeline_id.__get__ = mock.Mock(return_value='pipeline-id') mocked_schedule.return_value = try_job_id analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name=isolate_target_name, urlsafe_try_job_key=try_job.key.urlsafe()) try_job_pipeline = RunFlakeTryJobPipeline(pipeline_input) try_job_pipeline.RunImpl(pipeline_input) mocked_schedule.assert_called_once_with(pipeline_input, 'pipeline-id') mocked_save.assert_called_once_with({'try_job_id': try_job_id})
def testCallbackImplNoTryJobID(self, mocked_pipeline_id, mocked_state_changed): mocked_pipeline_id.__get__ = mock.Mock(return_value='pipeline-id') master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' isolate_target_name = 'target' revision = 'r1000' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name=isolate_target_name, urlsafe_try_job_key=try_job.key.urlsafe()) pipeline_job = RunFlakeTryJobPipeline(pipeline_input) returned_value = pipeline_job.CallbackImpl(pipeline_input, {'build_json': '{"k":"v"}'}) self.assertEqual( ('Try_job_id not found for pipeline pipeline-id', None), returned_value) self.assertFalse(mocked_state_changed.called)
def testGetFlakeTryJobs(self): flake_try_job_completed = FlakeTryJobData.Create(4) flake_try_job_completed.try_job_key = FlakeTryJob.Create( 'm', 'b', 's', 't', 'a1b2c3d4').key flake_try_job_completed.start_time = datetime(2016, 5, 4, 0, 0, 1) flake_try_job_completed.request_time = datetime(2016, 5, 4, 0, 0, 0) flake_try_job_completed.created_time = datetime(2016, 5, 4, 0, 0, 0) flake_try_job_completed.end_time = datetime(2016, 5, 4, 0, 0, 2) flake_try_job_completed.try_job_url = 'url4' flake_try_job_completed.analysis_key = ndb.Key('key', 1) flake_try_job_completed.last_buildbucket_response = { 'status': 'COMPLETED' } flake_try_job_completed.put() expected_flake_try_job_completed_display_data = { 'master_name': 'm', 'builder_name': 'b', 'try_job_type': 'flake', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url4', 'last_buildbucket_response': '{"status": "COMPLETED"}', 'git_hash': 'a1b2c3d4' } response = self.test_app.get( '/try-job-dashboard?format=json&start_date=2016-05-03&category=flake' ) response_data = response.json_body successfully_completed_try_jobs = response_data.get( 'successfully_completed_try_jobs') self.assertEqual(response.status_int, 200) self.validateTryJobDisplayData( [expected_flake_try_job_completed_display_data], successfully_completed_try_jobs)
def git_hash(self): return FlakeTryJob.GetGitHash(self.try_job_key)
def test_name(self): return FlakeTryJob.GetTestName(self.try_job_key)
def step_name(self): return FlakeTryJob.GetStepName(self.try_job_key)
def builder_name(self): return FlakeTryJob.GetBuilderName(self.try_job_key)
def master_name(self): return FlakeTryJob.GetMasterName(self.try_job_key)
def testUpdateTryJob(self): FlakeTryJob.Create('m', 'b', 's1', 't1', 'hash').put() build_id = 'build_id' try_job = flake_try_job.UpdateTryJob('m', 'b', 's1', 't1', 'hash', build_id) self.assertEqual(try_job.try_job_ids[0], build_id)
def testGet(self): try_job_in_progress = WfTryJobData.Create(1) try_job_in_progress.try_job_key = WfTryJob.Create('m', 'b', 1).key try_job_in_progress.try_job_type = 'compile' try_job_in_progress.start_time = datetime(2016, 5, 4, 0, 0, 1) try_job_in_progress.request_time = datetime(2016, 5, 4, 0, 0, 0) try_job_in_progress.created_time = datetime(2016, 5, 4, 0, 0, 0) try_job_in_progress.try_job_url = 'url1' try_job_in_progress.last_buildbucket_response = {'status': 'STARTED'} try_job_in_progress.put() try_job_with_error = WfTryJobData.Create(2) try_job_with_error.try_job_key = WfTryJob.Create('m', 'b', 2).key try_job_with_error.try_job_type = 'compile' try_job_with_error.start_time = datetime(2016, 5, 4, 0, 0, 1) try_job_with_error.request_time = datetime(2016, 5, 4, 0, 0, 0) try_job_with_error.created_time = datetime(2016, 5, 4, 0, 0, 0) try_job_with_error.end_time = datetime(2016, 5, 4, 0, 0, 2) try_job_with_error.try_job_url = 'url2' try_job_with_error.error = { 'message': 'some error', 'reason': 'some reason' } try_job_with_error.last_buildbucket_response = { 'failure_reason': 'INFRA_FAILURE' } try_job_with_error.put() try_job_completed = WfTryJobData.Create(3) try_job_completed.try_job_key = WfTryJob.Create('m', 'b', 3).key try_job_completed.try_job_type = 'compile' try_job_completed.start_time = datetime(2016, 5, 4, 0, 0, 1) try_job_completed.request_time = datetime(2016, 5, 4, 0, 0, 0) try_job_completed.created_time = datetime(2016, 5, 4, 0, 0, 0) try_job_completed.end_time = datetime(2016, 5, 4, 0, 0, 2) try_job_completed.number_of_commits_analyzed = 1 try_job_completed.try_job_url = 'url3' try_job_completed.culprits = {'compile': {'12345': 'failed'}} try_job_completed.last_buildbucket_response = {'status': 'COMPLETED'} try_job_completed.put() flake_try_job_completed = FlakeTryJobData.Create(4) flake_try_job_completed.try_job_key = FlakeTryJob.Create( 'm', 'b', 's', 't', 'a1b2c3d4').key flake_try_job_completed.start_time = datetime(2016, 5, 4, 0, 0, 1) flake_try_job_completed.request_time = datetime(2016, 5, 4, 0, 0, 0) flake_try_job_completed.created_time = datetime(2016, 5, 4, 0, 0, 0) flake_try_job_completed.end_time = datetime(2016, 5, 4, 0, 0, 2) flake_try_job_completed.try_job_url = 'url4' flake_try_job_completed.analysis_key = ndb.Key('key', 1) flake_try_job_completed.last_buildbucket_response = { 'status': 'COMPLETED' } flake_try_job_completed.put() expected_try_job_in_progress_display_data = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 1, 'try_job_type': 'compile', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url1', 'status': 'running', 'last_buildbucket_response': '{"status": "STARTED"}' } expected_try_job_with_error_display_data = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 2, 'try_job_type': 'compile', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url2', 'error': 'some error', 'last_buildbucket_response': '{"failure_reason": "INFRA_FAILURE"}' } expected_try_job_completed_display_data = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 3, 'try_job_type': 'compile', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url3', 'culprit_found': True, 'last_buildbucket_response': '{"status": "COMPLETED"}' } expected_flake_try_job_completed_display_data = { 'master_name': 'm', 'builder_name': 'b', 'try_job_type': 'flake', 'request_time': '2016-05-04 00:00:00 UTC', 'try_job_url': 'url4', 'last_buildbucket_response': '{"status": "COMPLETED"}', 'git_hash': 'a1b2c3d4' } response = self.test_app.get( '/try-job-dashboard?format=json&start_date=2016-05-03') response_data = response.json_body try_jobs_in_progress = response_data.get('try_jobs_in_progress') try_jobs_with_error = response_data.get('try_jobs_with_error') successfully_completed_try_jobs = response_data.get( 'successfully_completed_try_jobs') self.assertEqual(response.status_int, 200) self.validateTryJobDisplayData( [expected_try_job_in_progress_display_data], try_jobs_in_progress) self.validateTryJobDisplayData( [expected_try_job_with_error_display_data], try_jobs_with_error) self.validateTryJobDisplayData([ expected_try_job_completed_display_data, expected_flake_try_job_completed_display_data ], successfully_completed_try_jobs)
def testUpdateAnalysisDataPointsWithTryJobResults( self, mocked_get_swarming_task_id): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' commit_position = 1000 revision = 'r1000' try_job_id = 'try_job_id' task_id = 'swarming_task_id' mocked_get_swarming_task_id.return_value = task_id analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.try_job_ids = [try_job_id] try_job.flake_results = [{ 'report': { 'result': { revision: { step_name: { 'valid': True, 'pass_fail_counts': { test_name: { 'pass_count': 99, 'fail_count': 1 } } } } } } }] try_job.put() try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.start_time = datetime(2017, 10, 17, 1, 0, 0) try_job_data.end_time = datetime(2017, 10, 17, 2, 0, 0) try_job_data.try_job_key = try_job.key try_job_data.put() flake_try_job.UpdateAnalysisDataPointsWithTryJobResult( analysis, try_job, commit_position, revision) expected_data_points = [ DataPoint.Create(commit_position=commit_position, git_hash=revision, iterations=100, elapsed_seconds=3600, task_ids=[task_id], pass_rate=0.99) ] self.assertEqual(expected_data_points, analysis.data_points)
def testGetIsolateShaForCommitPositionPipelineCommitLevel( self, mocked_reference_build, mocked_cache, mocked_dimensions): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' dimensions = ['dimensions'] requested_commit_position = 1000 containing_build_commit_position = 1001 containing_build_revision = 'r1001' requested_revision = 'r1000' expected_sha = 'sha1' cache_name = 'cache' try_job_id = 'try_job_id' url = 'url' isolate_target_name = 'browser_tests' step_metadata = StepMetadata(canonical_step_name=None, dimensions=None, full_step_name=None, isolate_target_name=isolate_target_name, patched=True, swarm_task_ids=None, waterfall_buildername=None, waterfall_mastername=None) build_id = 100 luci_name = 'chromium' bucket_name = 'ci' gitiles_host = 'chromium.googlesource.com' gitiles_project = 'chromium/src' gitiles_ref = 'refs/heads/master' gerrit_patch = '' isolated_hash = 'isolated_hash' isolated_target = IsolatedTarget.Create( build_id, luci_name, bucket_name, master_name, builder_name, gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, isolate_target_name, isolated_hash, containing_build_commit_position, containing_build_revision) isolated_target.put() mocked_cache.return_value = cache_name mocked_dimensions.return_value = dimensions expected_isolated_tests = IsolatedTests() expected_isolated_tests[isolate_target_name] = expected_sha build = BuildInfo(master_name, builder_name, build_number) build.commit_position = containing_build_commit_position mocked_reference_build.return_value = build analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, requested_revision) try_job.put() run_flake_try_job_parameters = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), revision=requested_revision, flake_cache_name=cache_name, isolate_target_name=isolate_target_name, dimensions=ListOfBasestring.FromSerializable(dimensions), urlsafe_try_job_key=try_job.key.urlsafe()) get_sha_input = GetIsolateShaForCommitPositionParameters( analysis_urlsafe_key=unicode(analysis.key.urlsafe()), commit_position=requested_commit_position, revision=requested_revision, dimensions=ListOfBasestring.FromSerializable(dimensions), step_metadata=step_metadata, upper_bound_build_number=analysis.build_number) expected_try_job_report = FlakeTryJobReport( isolated_tests=expected_isolated_tests, last_checked_out_revision=None, previously_cached_revision=None, previously_checked_out_revision=None, metadata=None) expected_try_job_result = FlakeTryJobResult( report=expected_try_job_report, url=url, try_job_id=try_job_id) get_isolate_sha_for_try_job_pipeline = GetIsolateShaForTryJobParameters( try_job_result=expected_try_job_result, step_name=step_name) self.MockAsynchronousPipeline(RunFlakeTryJobPipeline, run_flake_try_job_parameters, expected_try_job_report) self.MockSynchronousPipeline(GetIsolateShaForTryJobPipeline, get_isolate_sha_for_try_job_pipeline, expected_sha) pipeline_job = GetIsolateShaForCommitPositionPipeline(get_sha_input) pipeline_job.start() self.execute_queued_tasks()
def testCreate(self): try_job = FlakeTryJob.Create('m', 'b', 's', 't', 'a1b2c3') self.assertEqual([], try_job.try_job_ids) self.assertEqual([], try_job.flake_results)