def testNextCommitPositionPipelineWithHeuristicResults( self, mock_heuristic_result, mock_run_heuristic, mock_next_commit): master_name = 'm' builder_name = 'b' build_number = 105 step_name = 's' test_name = 't' start_commit_position = 1000 suspect_commit_position = 95 expected_next_commit_id = CommitID(commit_position=94, revision='r94') suspect = FlakeCulprit.Create('repo', 'revision', suspect_commit_position) suspect.commit_position = suspect_commit_position suspect.put() analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.suspect_urlsafe_keys.append(suspect.key.urlsafe()) analysis.put() mock_run_heuristic.return_value = False mock_heuristic_result.return_value = expected_next_commit_id calculated_next_commit_id = CommitID(commit_position=999, revision='r999') mock_next_commit.return_value = (calculated_next_commit_id, None) next_commit_position_input = NextCommitPositionInput( analysis_urlsafe_key=analysis.key.urlsafe(), commit_position_range=IntRange(lower=None, upper=start_commit_position), step_metadata=None) pipeline_job = NextCommitPositionPipeline(next_commit_position_input) pipeline_job.start() self.execute_queued_tasks() pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id) next_commit_position_output = pipeline_job.outputs.default.value self.assertFalse(pipeline_job.was_aborted) self.assertIsNone(next_commit_position_output['culprit_commit_id']) self.assertEqual(expected_next_commit_id.ToSerializable(), next_commit_position_output['next_commit_id']) mock_heuristic_result.assert_called_once_with(analysis.key.urlsafe())
def _ObscureTriageRecordsInMasterFlakeAnalysis(): """Obscures the user names in MasterFlakeAnalysis triage history.""" count = 0 time_limit = _TimeBeforeNow(days=_TRIAGE_RECORD_RENTENSION_DAYS) query = MasterFlakeAnalysis.query( MasterFlakeAnalysis.triage_email_obscured == False, MasterFlakeAnalysis.triage_record_last_add < time_limit) more = True cursor = None while more: entities, cursor, more = query.fetch_page(_PAGE_SIZE, start_cursor=cursor) for entity in entities: for triage_record in (entity.triage_history or []): triage_record.user_name = email_util.ObscureEmails( [triage_record.user_name], ['google.com'])[0] entity.triage_email_obscured = True ndb.put_multi(entities) count += len(entities) return count
def testOnFinalizedNoError(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.Save() analyze_flake_input = AnalyzeFlakeInput( analysis_urlsafe_key=analysis.key.urlsafe(), analyze_commit_position_parameters=NextCommitPositionOutput( next_commit_id=CommitID(commit_position=1000, revision='rev'), culprit_commit_id=None), commit_position_range=IntRange(lower=None, upper=None), dimensions=ListOfBasestring.FromSerializable(['os:testOS']), manually_triggered=False, rerun=False, retries=0, step_metadata=None) pipeline_job = AnalyzeFlakePipeline(analyze_flake_input) pipeline_job.OnFinalized(analyze_flake_input) self.assertEqual(analysis_status.COMPLETED, analysis.status)
def testFindMatchingDataPoint(self): old_data_point = DataPoint.Create(commit_position=1, pass_rate=1.0, iterations=10) new_data_point = DataPoint.Create(commit_position=2, pass_rate=0.5, iterations=10) analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = [old_data_point] self.assertIsNone( analysis.FindMatchingDataPointWithCommitPosition(None)) self.assertIsNone( analysis.FindMatchingDataPointWithCommitPosition( new_data_point.commit_position)) self.assertEqual( old_data_point, analysis.FindMatchingDataPointWithCommitPosition( old_data_point.commit_position))
def testUpdateDataPointsWithExistingDataPoint(self): commit_position = 1000 revision = 'r1000' existing_data_points = [ DataPoint.Create(commit_position=commit_position) ] analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = existing_data_points analysis.Save() try_job = FlakeTryJob.Create('m', 'b', 's', 't', revision) try_job.put() flake_try_job.UpdateAnalysisDataPointsWithTryJobResult( analysis, try_job, commit_position, revision) self.assertEqual(existing_data_points, analysis.data_points)
def testShouldFileBugForAnalysis(self, test_exists_fn, id_exists_fn, sufficient_confidence_fn, previous_attempt_fn, under_limit_fn): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() self.assertTrue(flake_bug_util.ShouldFileBugForAnalysis(analysis)) id_exists_fn.assert_not_called() sufficient_confidence_fn.assert_called() previous_attempt_fn.assert_called() test_exists_fn.assert_called() under_limit_fn.assert_called_with()
def testShouldSendNotification(self, *_): repo_name = 'repo' revision = 'r1' url = 'code.review.url' commit_position = 1000 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.confidence_in_culprit = 0.6 culprit = FlakeCulprit.Create(repo_name, revision, commit_position, url) culprit.put() analysis.culprit_urlsafe_key = culprit.key.urlsafe() analysis.data_points = [ DataPoint.Create(pass_rate=1.0, commit_position=commit_position - 1), DataPoint.Create(pass_rate=0.4, commit_position=commit_position), ] analysis.confidence_in_culprit = 0.6 analysis.put() self.assertTrue(culprit_util.ShouldNotifyCulprit(analysis))
def testGetLatestRegressionRangeMultipleDataPoints(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = [ DataPoint.Create(commit_position=96, pass_rate=0.8), DataPoint.Create(commit_position=95, pass_rate=0.9, git_hash='rev95'), DataPoint.Create(commit_position=94, pass_rate=0.0, git_hash='rev94'), DataPoint.Create(commit_position=93, pass_rate=0.6), DataPoint.Create(commit_position=92, pass_rate=1.0), DataPoint.Create(commit_position=91, pass_rate=0.9), DataPoint.Create(commit_position=90, pass_rate=1.0), ] self.assertEqual( CommitIDRange(lower=CommitID(commit_position=94, revision='rev94'), upper=CommitID(commit_position=95, revision='rev95')), analysis.GetLatestRegressionRange())
def testAbort(self, abort_fn): build_key = 'mock_build_key' repo = 'chromium' rev = 'rev1' commit_position = 100 culprit = FlakeCulprit.Create(repo, rev, commit_position) culprit.put() analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.culprit_urlsafe_key = culprit.key.urlsafe() analysis.put() pipeline_input = CreateAndSubmitRevertInput( analysis_urlsafe_key=analysis.key.urlsafe(), build_key=build_key) pipeline_job = CreateAndSubmitRevertPipeline(pipeline_input) pipeline_job.OnAbort(pipeline_input) abort_fn.assert_called_once_with(pipeline_input, pipeline_job.pipeline_id)
def testReset(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.status = analysis_status.RUNNING analysis.correct_regression_range = True analysis.correct_culprit = False analysis.correct_culprit = None analysis.data_points = [DataPoint()] analysis.suspected_flake_build_number = 123 analysis.suspect_urlsafe_keys = ['some_key'] analysis.culprit_urlsafe_key = FlakeCulprit.Create( 'r', 'a1b2c3d4', 12345, 'url').key.urlsafe() analysis.Reset() self.assertEqual(analysis_status.PENDING, analysis.status) self.assertIsNone(analysis.correct_regression_range) self.assertIsNone(analysis.correct_culprit) self.assertIsNone(analysis.suspected_flake_build_number) self.assertEqual([], analysis.suspect_urlsafe_keys) self.assertEqual([], analysis.data_points) self.assertIsNone(analysis.culprit_urlsafe_key)
def testUpdateMonorailBugWithCulprit(self, mock_update, mock_comments, mock_get_issue, *_): project = 'chromium' bug_id = 12345 step_name = 's' test_name = 't' label = 'l' flake_issue = FlakeIssue.Create(project, bug_id) flake_issue.put() flake = Flake.Create(project, step_name, test_name, label) flake.flake_issue_key = flake_issue.key flake.put() analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.flake_key = flake.key analysis.put() mock_comments.return_value = [ Comment({ 'author': { 'name': '*****@*****.**' }, 'content': '', 'published': None, 'id': '12345', }), ] mock_get_issue.return_value = Issue({ 'status': 'Available', 'projectId': 'chromium', 'id': str(bug_id), 'state': 'open' }) flake_analysis_actions.UpdateMonorailBugWithCulprit( analysis.key.urlsafe()) mock_update.assert_called_once_with(bug_id, mock.ANY) self.assertIsNotNone( flake_issue.last_updated_time_with_analysis_results)
def testShouldUpdateBugForAnalysisNoBugIdWithCulprit(self): master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.original_master_name = master_name analysis.original_builder_name = builder_name analysis.original_build_number = build_number analysis.status = analysis_status.COMPLETED analysis.culprit_urlsafe_key = 'c' analysis.data_points = [DataPoint(), DataPoint(), DataPoint()] analysis.confidence_in_culprit = 0.9 self.UpdateUnitTestConfigSettings( 'action_settings', {'minimum_confidence_to_update_endpoints': 0.6}) self.assertFalse(flake_bug_util.ShouldUpdateBugForAnalysis(analysis))
def testUpdateCulpritNewCulprit(self, mocked_fn): revision = 'a1b2c3d4' commit_position = 12345 url = 'url' repo_name = 'repo_name' change_log = ChangeLog(None, None, revision, commit_position, None, None, url, None) mocked_fn.return_value = change_log analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') culprit = flake_analysis_util.UpdateCulprit(analysis.key.urlsafe(), revision, commit_position, repo_name) self.assertIsNotNone(culprit) self.assertEqual([analysis.key.urlsafe()], culprit.flake_analysis_urlsafe_keys) self.assertEqual(url, culprit.url) self.assertEqual(repo_name, culprit.repo_name) self.assertEqual(revision, culprit.revision)
def testRecursiveAnalyzeFlakePipeline(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.Save() analyze_flake_input = AnalyzeFlakeInput( analysis_urlsafe_key=analysis.key.urlsafe(), analyze_commit_position_parameters=NextCommitPositionOutput( next_commit_id=CommitID(commit_position=1000, revision='rev'), culprit_commit_id=None), commit_position_range=IntRange(lower=None, upper=None), dimensions=ListOfBasestring.FromSerializable([]), manually_triggered=False, rerun=False, retries=0, step_metadata=None) self.MockGeneratorPipeline(AnalyzeFlakePipeline, analyze_flake_input, None) pipeline_job = RecursiveAnalyzeFlakePipeline(analyze_flake_input) pipeline_job.start() self.execute_queued_tasks()
def testOnTimeout(self, mocked_OnTryJobTimeout): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() revision = 'r1000' try_job_url = 'url' try_job_id = 'try_job_id' try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job_data = FlakeTryJobData.Create(try_job_id) try_job_data.try_job_key = try_job.key try_job_data.try_job_url = try_job_url try_job_data.put() try_job.flake_results = [{ 'report': None, 'url': try_job_url, 'try_job_id': try_job_id, }] try_job.status = analysis_status.RUNNING try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name='target', urlsafe_try_job_key=try_job.key.urlsafe()) p = RunFlakeTryJobPipeline(pipeline_input) p.OnTimeout(pipeline_input, {'try_job_id': try_job_id}) mocked_OnTryJobTimeout.assert_called_once_with(try_job_id, failure_type.FLAKY_TEST)
def testScheduleFlakeTryJobSuccess(self, *_): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' revision = 'r1000' expected_try_job_id = 'id' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() parameters = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), revision=revision, flake_cache_name=None, dimensions=ListOfBasestring(), isolate_target_name='target', urlsafe_try_job_key=try_job.key.urlsafe()) try_job_id = flake_try_job.ScheduleFlakeTryJob(parameters, 'pipeline') try_job = FlakeTryJob.Get(master_name, builder_name, step_name, test_name, revision) try_job_data = FlakeTryJobData.Get(expected_try_job_id) expected_try_job_id = 'id' self.assertEqual(expected_try_job_id, try_job_id) self.assertEqual(expected_try_job_id, try_job.flake_results[-1]['try_job_id']) self.assertTrue(expected_try_job_id in try_job.try_job_ids) self.assertIsNotNone(try_job_data) self.assertEqual(try_job_data.master_name, master_name) self.assertEqual(try_job_data.builder_name, builder_name)
def testGetDataPointsWithinCommitPositionRange(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = [ DataPoint.Create(commit_position=1000), DataPoint.Create(commit_position=1005), DataPoint.Create(commit_position=1007), DataPoint.Create(commit_position=1010) ] self.assertEqual( analysis.data_points[-2:], analysis.GetDataPointsWithinCommitPositionRange( IntRange(lower=1007, upper=2000))) self.assertEqual([analysis.data_points[0]], analysis.GetDataPointsWithinCommitPositionRange( IntRange(lower=None, upper=1000))) self.assertEqual([analysis.data_points[-1]], analysis.GetDataPointsWithinCommitPositionRange( IntRange(lower=1010, upper=None))) self.assertEqual( analysis.data_points, analysis.GetDataPointsWithinCommitPositionRange( IntRange(lower=None, upper=None)))
def testAbortCreateAndSubmitRevertNothingMatchesNothingChanged(self): pipeline_id = 'foobar' build_key = 'buildid' repo = 'chromium' rev = 'rev1' commit_position = 100 pipeline_id = 'foo' culprit = FlakeCulprit.Create(repo, rev, commit_position) culprit.put() culprit.put = mock.Mock() analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.culprit_urlsafe_key = culprit.key.urlsafe() analysis.put() pipeline_input = CreateAndSubmitRevertInput( analysis_urlsafe_key=analysis.key.urlsafe(), build_key=build_key) culprit_util.AbortCreateAndSubmitRevert(pipeline_input, pipeline_id) culprit.put.assert_not_called()
def testGenerateMessageTextWithCulprit(self): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' task_id = 'task_id' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.original_master_name = master_name analysis.original_builder_name = builder_name analysis.original_build_number = build_number analysis.status = analysis_status.COMPLETED analysis.data_points = [DataPoint.Create(task_ids=[task_id])] culprit = FlakeCulprit.Create('c', 'r', 123, 'http://') culprit.flake_analysis_urlsafe_keys.append(analysis.key.urlsafe()) culprit.put() analysis.culprit_urlsafe_key = culprit.key.urlsafe() analysis.confidence_in_culprit = 0.6713 comment = issue_generator._GenerateMessageText(analysis) self.assertIn('r123', comment) self.assertIn(culprit.key.urlsafe(), comment)
def testGetCulpritSuccess(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.confidence_in_culprit = 0.7 analysis.put() culprit = FlakeCulprit.Create('chromium', 'r1', 1000) culprit.flake_analysis_urlsafe_keys.append(analysis.key.urlsafe()) culprit.cr_notification_status = analysis_status.COMPLETED culprit.cr_notification_time = datetime(2017, 07, 19, 10, 03, 00) culprit.put() expected_result = { 'project_name': 'chromium', 'revision': 'r1', 'commit_position': 1000, 'cr_notified': True, 'cr_notification_time': '2017-07-19 10:03:00 UTC', 'analyses': [{ 'master_name': 'm', 'builder_name': 'b', 'step_name': 's', 'test_name': 't', 'key': analysis.key.urlsafe(), 'confidence_in_culprit': 0.7, }], 'key': culprit.key.urlsafe(), } response = self.test_app.get( '/p/chromium/flake-portal/analysis/culprit?key=%s&format=json' % culprit.key.urlsafe()) self.assertEqual(200, response.status_int) self.assertEqual(expected_result, response.json_body)
def testOnCulpritIdentifiedAttachCulpritFlakeIssue(self, mocked_update_monorail, mocked_merge): project = 'chromium' master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' label = 'l' merged_bug_id = 12344 revision = 'r1000' commit_position = 1000 merged_issue = FlakeIssue.Create(project, merged_bug_id) merged_issue.put() flake = Flake.Create(project, step_name, test_name, label) flake.put() culprit = FlakeCulprit.Create(project, revision, commit_position) culprit.flake_issue_key = merged_issue.key culprit.put() analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.flake_key = flake.key analysis.culprit_urlsafe_key = culprit.key.urlsafe() analysis.confidence_in_culprit = 0.9 analysis.put() flake_analysis_actions.OnCulpritIdentified(analysis.key.urlsafe()) self.assertFalse(mocked_merge.called) mocked_update_monorail.assert_called_once_with(analysis.key.urlsafe()) flake = flake.key.get() self.assertEqual(merged_issue.key, flake.flake_issue_key)
def testAbortCreateAndSubmitRevertRevertCreationFails(self): pipeline_id = 'foobar' build_key = 'buildid' repo = 'chromium' rev = 'rev1' commit_position = 100 pipeline_id = 'foo' culprit = FlakeCulprit.Create(repo, rev, commit_position) culprit.revert_pipeline_id = pipeline_id culprit.revert_status = analysis_status.SKIPPED culprit.put() analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.culprit_urlsafe_key = culprit.key.urlsafe() analysis.put() pipeline_input = CreateAndSubmitRevertInput( analysis_urlsafe_key=analysis.key.urlsafe(), build_key=build_key) culprit_util.AbortCreateAndSubmitRevert(pipeline_input, pipeline_id) self.assertIsNone(culprit.revert_pipeline_id) self.assertEqual(analysis_status.ERROR, culprit.revert_status)
def testUpdateSuspectedBuildExistingSuspectedBuild(self): lower_bound_commit_position = 90 upper_bound_commit_position = 100 build_id = 1000 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = [ DataPoint.Create(commit_position=upper_bound_commit_position), DataPoint.Create(commit_position=lower_bound_commit_position), ] analysis.suspected_flake_build_number = 123 analysis.Save() lower_bound_target = IsolatedTarget.Create( build_id - 1, '', '', 'm', 'b', '', '', '', '', '', '', lower_bound_commit_position, '') upper_bound_target = IsolatedTarget.Create( build_id, '', '', 'm', 'b', '', '', '', '', '', '', upper_bound_commit_position, '') analysis.UpdateSuspectedBuild(lower_bound_target, upper_bound_target) self.assertEqual(123, analysis.suspected_flake_build_number)
def testGenerateRevertReason(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 2, 's', 't') analysis.original_step_name = 's' analysis.original_test_name = 't' analysis.put() culprit = FlakeCulprit.Create('chromium', 'r1', 123) culprit.flake_analysis_urlsafe_keys = [analysis.key.urlsafe()] expected_reason = textwrap.dedent(""" Findit (https://goo.gl/kROfz5) identified CL at revision %s as the culprit for flakes in the build cycles as shown on: https://analysis.chromium.org/p/chromium/flake-portal/analysis/culprit?key=%s\n Sample Failed Build: %s\n Sample Failed Step: s\n Sample Flaky Test: t""") % ( 123, culprit.key.urlsafe(), 'https://ci.chromium.org/buildbot/m/b/2', ) self.assertEqual( expected_reason, culprit.GenerateRevertReason('m/b/2', 123, 'r123', 's'))
def _CreateAndSaveMasterFlakeAnalysis(self, master_name, builder_name, build_number, step_name, test_name, request_time, status_code=None, culprit_urlsafe_key=None): analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.original_master_name = master_name analysis.original_builder_name = builder_name analysis.original_build_number = build_number analysis.original_step_name = step_name analysis.original_test_name = test_name analysis.request_time = request_time analysis.status = analysis_status.COMPLETED analysis.result_status = status_code analysis.culprit_urlsafe_key = culprit_urlsafe_key analysis.put() return analysis
def testAnalyzeRecentFlakinessPipelineAlreadyUpToDate( self, mocked_commit_position, mocked_step_metadata): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.data_points = [DataPoint.Create(commit_position=1001)] analysis.Save() latest_revision = 'r' latest_commit_position = 1000 mocked_commit_position.return_value = (latest_commit_position, latest_revision) step_metadata = StepMetadata(canonical_step_name=step_name, dimensions=None, full_step_name='s', patched=False, swarm_task_ids=None, waterfall_buildername=builder_name, waterfall_mastername=master_name, isolate_target_name=step_name) mocked_step_metadata.return_value = step_metadata.ToSerializable() analyze_recent_flakiness_input = AnalyzeRecentFlakinessInput( analysis_urlsafe_key=analysis.key.urlsafe()) pipeline_job = AnalyzeRecentFlakinessPipeline( analyze_recent_flakiness_input) pipeline_job.start() self.execute_queued_tasks() self.assertEqual(analysis_status.COMPLETED, analysis.analyze_recent_flakiness_status)
def testCallbackImplNoTryJobID(self, mocked_pipeline_id, mocked_state_changed): mocked_pipeline_id.__get__ = mock.Mock(return_value='pipeline-id') master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' isolate_target_name = 'target' revision = 'r1000' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name=isolate_target_name, urlsafe_try_job_key=try_job.key.urlsafe()) pipeline_job = RunFlakeTryJobPipeline(pipeline_input) returned_value = pipeline_job.CallbackImpl(pipeline_input, {'build_json': '{"k":"v"}'}) self.assertEqual( ('Try_job_id not found for pipeline pipeline-id', None), returned_value) self.assertFalse(mocked_state_changed.called)
def testConvertAnalysisToDict(self): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.confidence_in_culprit = 0.9 analysis.put() expected_result = { 'master_name': master_name, 'builder_name': builder_name, 'step_name': step_name, 'test_name': test_name, 'key': analysis.key.urlsafe(), 'confidence_in_culprit': 0.9, } self.assertEqual( expected_result, flake_culprit._ConvertAnalysisToDict(analysis.key.urlsafe()))
def testGetTryJobsForFlakeSuccess(self, mocked_schedule, mocked_save, mocked_pipeline_id, *_): master_name = 'm' builder_name = 'b' build_number = 1 step_name = 's' test_name = 't' revision = 'r1000' isolate_target_name = 'target' try_job_id = 'try_job_id' mocked_pipeline_id.__get__ = mock.Mock(return_value='pipeline-id') mocked_schedule.return_value = try_job_id analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, revision) try_job.put() pipeline_input = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), flake_cache_name=None, dimensions=ListOfBasestring(), revision=revision, isolate_target_name=isolate_target_name, urlsafe_try_job_key=try_job.key.urlsafe()) try_job_pipeline = RunFlakeTryJobPipeline(pipeline_input) try_job_pipeline.RunImpl(pipeline_input) mocked_schedule.assert_called_once_with(pipeline_input, 'pipeline-id') mocked_save.assert_called_once_with({'try_job_id': try_job_id})
def testCreateAndSubmitRevertSubmitFailed(self, under_limit, can_revert, revert_fn, commit_fn): build_key = 'mock_build_key' repo = 'chromium' rev = 'rev1' commit_position = 100 pipeline_id = 'foo' culprit = FlakeCulprit.Create(repo, rev, commit_position) culprit.put() analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.culprit_urlsafe_key = culprit.key.urlsafe() analysis.put() revert_expected = CreateRevertCLParameters( cl_key=culprit.key.urlsafe(), build_key=build_key, failure_type=failure_type.FLAKY_TEST) submit_expected = SubmitRevertCLParameters( cl_key=culprit.key.urlsafe(), revert_status=constants.CREATED_BY_FINDIT, failure_type=failure_type.FLAKY_TEST) pipeline_input = CreateAndSubmitRevertInput( analysis_urlsafe_key=analysis.key.urlsafe(), build_key=build_key) culprit_util.CreateAndSubmitRevert(pipeline_input, pipeline_id) under_limit.assert_called_once() can_revert.assert_called_once_with(analysis) revert_fn.assert_called_once_with(revert_expected, pipeline_id) commit_fn.assert_called_once_with(submit_expected, pipeline_id) self.assertTrue(analysis.has_created_autorevert) self.assertFalse(analysis.has_submitted_autorevert) self.assertIsNone(analysis.autorevert_submission_time)