def testUpdateAnalysisDataPointsExistingDataPointWithErrorSalvagable( self, _): commit_position = 1000 revision = 'r1000' iterations = 100 pass_count = 50 completed_time = datetime(2018, 1, 1, 0, 1, 0) error = SwarmingTaskError(code=1, message='m') started_time = datetime(2018, 1, 1, 0, 0, 0) task_id_1 = 'task_1' task_id_2 = 'task_2' build_url = 'url' try_job_url = None swarming_task_output = FlakeSwarmingTaskOutput( completed_time=completed_time, error=error, iterations=iterations, pass_count=pass_count, started_time=started_time, task_id=task_id_2) initial_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=60, error=None, failed_swarming_task_attempts=0, iterations=50, pass_rate=0.5, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([task_id_1])) expected_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=120, error=None, # Only set error if no more retries. failed_swarming_task_attempts=0, # Task was salvaged. iterations=150, pass_rate=0.5, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([task_id_1, task_id_2])) resulting_flakiness = flakiness_util.UpdateFlakiness( initial_flakiness, swarming_task_output) self.assertEqual(expected_flakiness, resulting_flakiness)
def testUpdateAnalysisDataPointsExistingDataPointNoError(self): commit_position = 1000 revision = 'r1000' iterations = 100 pass_count = 60 failed_swarming_task_attempts = 2 completed_time = datetime(2018, 1, 1, 1, 0, 0) error = None started_time = datetime(2018, 1, 1, 0, 0, 0) task_id = 'task_2' build_url = None try_job_url = 'url' initial_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=1800, error=None, failed_swarming_task_attempts=failed_swarming_task_attempts, iterations=iterations, pass_rate=0.5, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_1'])) swarming_task_output = FlakeSwarmingTaskOutput( completed_time=completed_time, error=error, iterations=iterations, pass_count=pass_count, started_time=started_time, task_id=task_id) resulting_flakiness = flakiness_util.UpdateFlakiness( initial_flakiness, swarming_task_output) expected_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=5400, error=None, failed_swarming_task_attempts=failed_swarming_task_attempts, iterations=200, pass_rate=0.55, revision=revision, task_ids=ListOfBasestring.FromSerializable(['task_1', 'task_2']), try_job_url=try_job_url) self.assertEqual(expected_flakiness, resulting_flakiness)
def testUpdateExistingFlakinessWithErrorWithSuccessfulRun(self, _): commit_position = 1000 revision = 'r1000' iterations = 10 pass_count = 5 completed_time = datetime(2018, 1, 1, 0, 1, 0) started_time = datetime(2018, 1, 1, 0, 0, 0) task_id_1 = 'task_1' task_id_2 = 'task_2' build_url = 'url' try_job_url = None swarming_task_output = FlakeSwarmingTaskOutput( completed_time=completed_time, error=None, iterations=iterations, pass_count=pass_count, started_time=started_time, task_id=task_id_2) # Simulate first run failing. initial_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=60, error=None, failed_swarming_task_attempts=1, iterations=0, pass_rate=None, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([task_id_1])) expected_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=120, # No change due to unrecoverable error. error=None, failed_swarming_task_attempts=1, iterations=10, pass_rate=0.5, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([task_id_1, task_id_2])) resulting_flakiness = flakiness_util.UpdateFlakiness( initial_flakiness, swarming_task_output) self.assertEqual(expected_flakiness, resulting_flakiness)
def testUpdateFlakinessWithErrorUnsalvagable(self, _): commit_position = 1000 completed_time = datetime(2018, 1, 1, 1, 0, 0) error = SwarmingTaskError(code=1, message='message') iterations = None pass_count = None revision = 'r1000' started_time = datetime(2018, 1, 1, 0, 0, 0) task_id = 'task_id' build_url = 'url' try_job_url = None swarming_task_output = FlakeSwarmingTaskOutput( completed_time=completed_time, error=error, iterations=iterations, pass_count=pass_count, started_time=started_time, task_id=task_id) flakiness_to_update = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=0, error=None, failed_swarming_task_attempts=0, iterations=0, pass_rate=None, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([])) expected_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=0, error=None, failed_swarming_task_attempts=1, iterations=0, pass_rate=None, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([task_id])) resulting_flakiness = flakiness_util.UpdateFlakiness( flakiness_to_update, swarming_task_output) self.assertEqual(expected_flakiness, resulting_flakiness)
def testUpdateFlakinessNewFlakinessNoError(self): commit_position = 1000 completed_time = datetime(2018, 1, 1, 0, 1, 0) error = None iterations = 100 pass_count = 50 revision = 'r1000' started_time = datetime(2018, 1, 1, 0, 0, 0) task_id = 'task_id' build_url = None try_job_url = 'url' swarming_task_output = FlakeSwarmingTaskOutput( completed_time=completed_time, error=error, iterations=iterations, pass_count=pass_count, started_time=started_time, task_id=task_id) initial_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=None, error=None, failed_swarming_task_attempts=0, iterations=None, pass_rate=None, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([])) expected_flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=60, error=None, failed_swarming_task_attempts=0, iterations=iterations, pass_rate=0.5, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([task_id])) resulting_flakiness = flakiness_util.UpdateFlakiness( initial_flakiness, swarming_task_output) self.assertEqual(expected_flakiness, resulting_flakiness)
def testUpdateFlakeAnalysisDataPointsPipelineTooManyErrors( self, _, mocked_error_reporting): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.Save() flakiness = Flakiness(build_number=None, build_url='url', commit_position=1000, total_test_run_seconds=100, error=None, failed_swarming_task_attempts=3, iterations=50, pass_rate=0.5, revision='r1000', try_job_url=None, task_ids=ListOfBasestring.FromSerializable( ['task_id'])) update_data_points_input = UpdateFlakeAnalysisDataPointsInput( analysis_urlsafe_key=analysis.key.urlsafe(), flakiness=flakiness) pipeline_job = UpdateFlakeAnalysisDataPointsPipeline( update_data_points_input) pipeline_job.start() self.execute_queued_tasks() self.assertTrue(mocked_error_reporting.called)
def testDetermineApproximatePassRateConverged(self, *_): master_name = 'm' builder_name = 'b' reference_build_number = 123 step_name = 's' test_name = 't' commit_position = 1000 incoming_pass_count = 15 iterations = 30 incoming_pass_rate = 0.5 isolate_sha = 'sha1' revision = 'r1000' started_time = datetime(2018, 1, 1, 0, 0, 0) completed_time = datetime(2018, 1, 1, 1, 0, 0) build_url = 'url' try_job_url = None isolate_sha_output = GetIsolateShaOutput( build_number=None, build_url=None, isolate_sha=isolate_sha, try_job_url='url') flake_swarming_task_output = FlakeSwarmingTaskOutput( error=None, pass_count=incoming_pass_count, iterations=iterations, started_time=started_time, completed_time=completed_time, task_id='task_id') flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=3600, error=None, failed_swarming_task_attempts=0, iterations=iterations, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) determine_approximate_pass_rate_input = DetermineApproximatePassRateInput( builder_name=builder_name, commit_position=commit_position, get_isolate_sha_output=isolate_sha_output, flakiness_thus_far=flakiness_thus_far, previous_swarming_task_output=flake_swarming_task_output, master_name=master_name, reference_build_number=reference_build_number, revision=revision, step_name=step_name, test_name=test_name) pipeline_job = DetermineApproximatePassRatePipeline( determine_approximate_pass_rate_input) pipeline_job.start() self.execute_queued_tasks()
def testCalculateRunParametersForSwarmingTaskExceedsMaxTasks(self): flakiness = Flakiness(commit_position=1000, iterations=100, total_test_run_seconds=100, pass_rate=1.0) self.assertEqual( (200, 3600), run_swarming_util.CalculateRunParametersForSwarmingTask( flakiness, None))
def testEstimateSwarmingIterationTimeoutWithDefaultValues(self): flakiness = Flakiness(build_url=None, commit_position=1000, total_test_run_seconds=0, error=None, failed_swarming_task_attempts=0, iterations=0, pass_rate=None, revision='r1000', try_job_url='url') self.assertEqual( 180, run_swarming_util._EstimateSwarmingIterationTimeout(flakiness))
def testCalculateRunParametersForSwarmingTaskWithError(self): expected_iterations_to_run_after_timeout = 10 flakiness = Flakiness(commit_position=1000, pass_rate=1.0, iterations=1, total_test_run_seconds=400) self.assertEqual( (expected_iterations_to_run_after_timeout, 3600), run_swarming_util.CalculateRunParametersForSwarmingTask( flakiness, SwarmingTaskError(code=swarming_task_error.TIMED_OUT, message='m')))
def testDetermineApproximatePassRatePipelineWrapper(self): master_name = 'm' builder_name = 'b' reference_build_number = 123 step_name = 's' test_name = 't' commit_position = 1000 incoming_pass_rate = 0.5 isolate_sha = 'sha1' revision = 'r1000' build_url = None try_job_url = 'url' isolate_sha_output = GetIsolateShaOutput( build_number=None, build_url=build_url, isolate_sha=isolate_sha, try_job_url=try_job_url) flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=60, error=None, failed_swarming_task_attempts=0, iterations=10, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) determine_approximate_pass_rate_input = DetermineApproximatePassRateInput( builder_name=builder_name, commit_position=commit_position, get_isolate_sha_output=isolate_sha_output, flakiness_thus_far=flakiness_thus_far, master_name=master_name, previous_swarming_task_output=None, reference_build_number=reference_build_number, revision=revision, step_name=step_name, test_name=test_name) self.MockGeneratorPipeline(DetermineApproximatePassRatePipeline, determine_approximate_pass_rate_input, None) pipeline_job = DetermineApproximatePassRatePipelineWrapper( determine_approximate_pass_rate_input) pipeline_job.start() self.execute_queued_tasks()
def testCalculateRunParametersForSwarmingTaskDefault(self): flakiness = Flakiness(build_url=None, commit_position=1000, total_test_run_seconds=None, error=None, failed_swarming_task_attempts=0, iterations=None, pass_rate=None, revision='r1000', try_job_url='url') self.assertEqual( (20, 3600), run_swarming_util.CalculateRunParametersForSwarmingTask( flakiness, None))
def testUpdateFlakeAnalysisDataPointsPipeline(self, mocked_change_log): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.Save() commit_position = 1000 pass_rate = 0.5 revision = 'r1000' expected_time = datetime(2018, 9, 18, 0, 0, 0) committer = Contributor(name='name', email='email', time=expected_time) change_log = ChangeLog(None, committer, revision, None, None, None, None, None) mocked_change_log.return_value = change_log flakiness = Flakiness(build_number=123, build_url='url', commit_position=commit_position, total_test_run_seconds=100, error=None, failed_swarming_task_attempts=0, iterations=50, pass_rate=pass_rate, revision=revision, try_job_url=None, task_ids=ListOfBasestring.FromSerializable( ['task_id'])) expected_data_point = DataPoint.Create(build_number=123, build_url='url', commit_position=commit_position, elapsed_seconds=100, error=None, failed_swarming_task_attempts=0, iterations=50, pass_rate=pass_rate, git_hash=revision, try_job_url=None, task_ids=['task_id'], commit_timestamp=expected_time) update_data_points_input = UpdateFlakeAnalysisDataPointsInput( analysis_urlsafe_key=analysis.key.urlsafe(), flakiness=flakiness) pipeline_job = UpdateFlakeAnalysisDataPointsPipeline( update_data_points_input) pipeline_job.start() self.execute_queued_tasks() self.assertEqual(1, len(analysis.data_points)) self.assertEqual(expected_data_point, analysis.data_points[0])
def testUpdateFlakiness(self): flakiness = Flakiness(build_number=None, build_url='url', commit_position=1000, total_test_run_seconds=0, error=None, failed_swarming_task_attempts=0, iterations=0, pass_rate=None, revision='r1000', try_job_url=None, task_ids=ListOfBasestring.FromSerializable([])) self.assertEqual(flakiness, flakiness_util.UpdateFlakiness(flakiness, None))
def testConvertFlakinessToDataPoint(self): build_url = 'url' commit_position = 1000 total_test_run_seconds = 60 failed_swarming_task_attempts = 0 iterations = 10 pass_rate = 0.3 revision = 'r1000' try_job_url = None task_id = 'task_id' flakiness = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=total_test_run_seconds, error=None, failed_swarming_task_attempts=failed_swarming_task_attempts, iterations=iterations, pass_rate=pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable([task_id])) expected_data_point = DataPoint.Create( build_url=build_url, commit_position=commit_position, elapsed_seconds=total_test_run_seconds, failed_swarming_task_attempts=failed_swarming_task_attempts, iterations=iterations, pass_rate=pass_rate, git_hash=revision, try_job_url=try_job_url, task_ids=[task_id]) data_point = data_point_util.ConvertFlakinessToDataPoint(flakiness) self.assertEqual(expected_data_point, data_point)
def testAnalyzeRecentFlakinessPipeline(self, mocked_commit_position, mocked_step_metadata): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.data_points = [DataPoint.Create(commit_position=999)] analysis.Save() isolate_sha = 'sha' latest_revision = 'r' latest_commit_position = 1000 mocked_commit_position.return_value = (latest_commit_position, latest_revision) pass_rate = 0.5 get_sha_output = GetIsolateShaOutput(isolate_sha=isolate_sha, build_url='url', try_job_url=None) step_metadata = StepMetadata(canonical_step_name=step_name, dimensions=None, full_step_name='s', patched=False, swarm_task_ids=None, waterfall_buildername=builder_name, waterfall_mastername=master_name, isolate_target_name=step_name) mocked_step_metadata.return_value = step_metadata.ToSerializable() expected_flakiness = Flakiness(build_url='url', commit_position=latest_commit_position, revision=latest_revision, pass_rate=pass_rate) analyze_recent_flakiness_input = AnalyzeRecentFlakinessInput( analysis_urlsafe_key=analysis.key.urlsafe()) expected_isolate_sha_input = GetIsolateShaForCommitPositionParameters( analysis_urlsafe_key=analysis.key.urlsafe(), commit_position=latest_commit_position, dimensions=None, revision=latest_revision, step_metadata=step_metadata, upper_bound_build_number=analysis.build_number) expected_pass_rate_input = DetermineApproximatePassRateInput( builder_name=analysis.builder_name, commit_position=latest_commit_position, flakiness_thus_far=None, get_isolate_sha_output=get_sha_output, master_name=analysis.master_name, previous_swarming_task_output=None, reference_build_number=analysis.build_number, revision=latest_revision, step_name=analysis.step_name, test_name=analysis.test_name) expected_save_flakiness_verification_input = SaveFlakinessVerificationInput( analysis_urlsafe_key=analysis.key.urlsafe(), flakiness=expected_flakiness) self.MockGeneratorPipeline(GetIsolateShaForCommitPositionPipeline, expected_isolate_sha_input, get_sha_output) self.MockGeneratorPipeline(DetermineApproximatePassRatePipeline, expected_pass_rate_input, expected_flakiness) self.MockGeneratorPipeline(SaveFlakinessVerificationPipeline, expected_save_flakiness_verification_input, None) pipeline_job = AnalyzeRecentFlakinessPipeline( analyze_recent_flakiness_input) pipeline_job.start() self.execute_queued_tasks() mocked_step_metadata.assert_called_with(master_name, builder_name, build_number, step_name) mocked_commit_position.assert_called_with(master_name, builder_name, step_name)
def testMaximumSwarmingTaskRetriesReached(self, _): flakiness = Flakiness(failed_swarming_task_attempts=4) self.assertTrue( flakiness_util.MaximumSwarmingTaskRetriesReached(flakiness))
def testAnalyzeFlakePipelineCanStartAnalysisImmediately(self, _): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.Save() start_commit_position = 1000 start_revision = 'r1000' isolate_sha = 'sha1' next_commit_position = 999 pass_rate = 0.5 build_url = 'url' try_job_url = None get_sha_output = GetIsolateShaOutput( isolate_sha=isolate_sha, build_url=build_url, try_job_url=try_job_url) step_metadata = StepMetadata( canonical_step_name='s', dimensions=None, full_step_name='s', patched=False, swarm_task_ids=None, waterfall_buildername='b', waterfall_mastername='w', isolate_target_name='s') expected_flakiness = Flakiness( build_url=build_url, commit_position=start_commit_position, revision=start_revision, pass_rate=pass_rate) analyze_flake_input = AnalyzeFlakeInput( analysis_urlsafe_key=analysis.key.urlsafe(), analyze_commit_position_parameters=NextCommitPositionOutput( next_commit_id=CommitID( commit_position=start_commit_position, revision=start_revision), culprit_commit_id=None), commit_position_range=IntRange(lower=None, upper=None), dimensions=ListOfBasestring.FromSerializable(['os:testOS']), manually_triggered=False, rerun=False, retries=0, step_metadata=step_metadata) expected_isolate_sha_input = GetIsolateShaForCommitPositionParameters( analysis_urlsafe_key=analysis.key.urlsafe(), commit_position=start_commit_position, dimensions=ListOfBasestring.FromSerializable(['os:testOS']), revision=start_revision, step_metadata=step_metadata, upper_bound_build_number=analysis.build_number) expected_pass_rate_input = DetermineApproximatePassRateInput( builder_name=analysis.builder_name, commit_position=start_commit_position, flakiness_thus_far=None, get_isolate_sha_output=get_sha_output, master_name=analysis.master_name, previous_swarming_task_output=None, reference_build_number=analysis.build_number, revision=start_revision, step_name=analysis.step_name, test_name=analysis.test_name) expected_update_data_points_input = UpdateFlakeAnalysisDataPointsInput( analysis_urlsafe_key=analysis.key.urlsafe(), flakiness=expected_flakiness) expected_next_commit_position_input = NextCommitPositionInput( analysis_urlsafe_key=analysis.key.urlsafe(), commit_position_range=IntRange(lower=None, upper=None), step_metadata=step_metadata) next_commit_id = CommitID( commit_position=next_commit_position, revision='r999') expected_next_commit_position_output = NextCommitPositionOutput( next_commit_id=next_commit_id, culprit_commit_id=None) expected_recursive_analyze_flake_input = AnalyzeFlakeInput( analysis_urlsafe_key=analysis.key.urlsafe(), analyze_commit_position_parameters=expected_next_commit_position_output, commit_position_range=IntRange(lower=None, upper=None), dimensions=ListOfBasestring.FromSerializable(['os:testOS']), manually_triggered=False, rerun=False, retries=0, step_metadata=step_metadata) self.MockGeneratorPipeline(GetIsolateShaForCommitPositionPipeline, expected_isolate_sha_input, get_sha_output) self.MockGeneratorPipeline(DetermineApproximatePassRatePipeline, expected_pass_rate_input, expected_flakiness) self.MockSynchronousPipeline(UpdateFlakeAnalysisDataPointsPipeline, expected_update_data_points_input, None) self.MockSynchronousPipeline(NextCommitPositionPipeline, expected_next_commit_position_input, expected_next_commit_position_output) self.MockGeneratorPipeline(RecursiveAnalyzeFlakePipeline, expected_recursive_analyze_flake_input, None) pipeline_job = AnalyzeFlakePipeline(analyze_flake_input) pipeline_job.start() self.execute_queued_tasks()
def RunImpl(self, parameters): """Pipeline to find the true pass rate of a test at a commit position.""" master_name = parameters.master_name builder_name = parameters.builder_name reference_build_number = parameters.reference_build_number step_name = parameters.step_name test_name = parameters.test_name commit_position = parameters.commit_position get_isolate_sha_output = parameters.get_isolate_sha_output build_url = get_isolate_sha_output.build_url try_job_url = get_isolate_sha_output.try_job_url flakiness_thus_far = parameters.flakiness_thus_far previous_swarming_task_output = parameters.previous_swarming_task_output # Extract pass rate and iterations already-completed up to this point. if previous_swarming_task_output: assert flakiness_thus_far, ( 'Previous swarming task output not captured properly') error = previous_swarming_task_output.error pass_rate_at_commit_position = flakiness_thus_far.pass_rate previous_pass_count = previous_swarming_task_output.pass_count previous_iterations = previous_swarming_task_output.iterations previous_pass_rate = (float(previous_pass_count / previous_iterations) if previous_iterations else None) else: error = None pass_rate_at_commit_position = None previous_iterations = 0 previous_pass_count = 0 previous_pass_rate = None # Create a fresh Flakiness instance to aggregate swarming rerun data. flakiness_thus_far = Flakiness( build_number=get_isolate_sha_output.build_number, build_url=build_url, commit_position=commit_position, total_test_run_seconds=0, error=None, failed_swarming_task_attempts=0, iterations=0, pass_rate=None, revision=parameters.revision, task_ids=ListOfBasestring.FromSerializable([]), try_job_url=try_job_url) # Bail out if there were too many errors. if (error and flakiness_util.MaximumSwarmingTaskRetriesReached( flakiness_thus_far)): logging.error( 'Swarming task ended in error after %d failed attempts. Giving ' 'up' % flakiness_thus_far.failed_swarming_task_attempts) flakiness_thus_far.error = error yield AggregateFlakinessPipeline( self.CreateInputObjectInstance( AggregateFlakinessInput, flakiness_thus_far=flakiness_thus_far, incoming_swarming_task_output=None)) return # Move on if the maximum number of iterations has been reached or exceeded. if flakiness_util.MaximumIterationsReached(flakiness_thus_far): logging.info('Max iterations reached for commit_position %d' % commit_position) yield AggregateFlakinessPipeline( self.CreateInputObjectInstance( AggregateFlakinessInput, flakiness_thus_far=flakiness_thus_far, incoming_swarming_task_output=None)) return # Move on if the test doesn't exist. if pass_rate_util.TestDoesNotExist(pass_rate_at_commit_position): logging.info('No test found at commit position %d' % commit_position) yield AggregateFlakinessPipeline( self.CreateInputObjectInstance( AggregateFlakinessInput, flakiness_thus_far=flakiness_thus_far, incoming_swarming_task_output=None)) return # Move on if there is sufficient information about the pass rate. if pass_rate_util.HasSufficientInformation( pass_rate_at_commit_position, flakiness_thus_far.iterations, previous_pass_rate, previous_iterations): logging.info( 'There is sufficient information for commit position %d with pass ' 'rate %s after %d iterations' % (commit_position, pass_rate_at_commit_position, flakiness_thus_far.iterations)) yield AggregateFlakinessPipeline( self.CreateInputObjectInstance( AggregateFlakinessInput, flakiness_thus_far=flakiness_thus_far, incoming_swarming_task_output=None)) return # Another swarming task is needed. Determine parameters for it to run. iterations_for_task, time_for_task_seconds = ( run_swarming_util.CalculateRunParametersForSwarmingTask( flakiness_thus_far, error)) # Run swarming task, update data points with results, and recurse. with pipeline.InOrder(): swarming_task_output = yield RunFlakeSwarmingTaskPipeline( self.CreateInputObjectInstance( RunFlakeSwarmingTaskInput, master_name=master_name, builder_name=builder_name, reference_build_number=reference_build_number, step_name=step_name, test_name=test_name, commit_position=commit_position, isolate_sha=get_isolate_sha_output.isolate_sha, iterations=iterations_for_task, timeout_seconds=time_for_task_seconds)) aggregated_flakiness = yield AggregateFlakinessPipeline( self.CreateInputObjectInstance( AggregateFlakinessInput, flakiness_thus_far=flakiness_thus_far, incoming_swarming_task_output=swarming_task_output)) yield DetermineApproximatePassRatePipelineWrapper( self.CreateInputObjectInstance( DetermineApproximatePassRateInput, builder_name=parameters.builder_name, commit_position=commit_position, flakiness_thus_far=aggregated_flakiness, get_isolate_sha_output=get_isolate_sha_output, master_name=parameters.master_name, previous_swarming_task_output=swarming_task_output, reference_build_number=parameters.reference_build_number, revision=parameters.revision, step_name=parameters.step_name, test_name=parameters.test_name))
def testMaximumIterationsReached(self, _): flakiness = Flakiness(iterations=150) self.assertTrue(flakiness_util.MaximumIterationsReached(flakiness))
def testDetermineApproximatePassRateNotYetConverged(self, *_): master_name = 'm' builder_name = 'b' reference_build_number = 123 step_name = 's' test_name = 't' commit_position = 1000 incoming_pass_count = 15 iterations_completed = 30 expected_iterations = 15 incoming_pass_rate = 0.5 isolate_sha = 'sha1' revision = 'r1000' timeout_seconds = 3600 started_time = datetime(2018, 1, 1, 0, 0, 0) completed_time = datetime(2018, 1, 1, 1, 0, 0) build_url = None try_job_url = 'url' isolate_sha_output = GetIsolateShaOutput( build_number=None, build_url=build_url, isolate_sha=isolate_sha, try_job_url=try_job_url) flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=timeout_seconds, error=None, failed_swarming_task_attempts=0, iterations=iterations_completed, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) incoming_flake_swarming_task_output = FlakeSwarmingTaskOutput( error=None, pass_count=incoming_pass_count, iterations=iterations_completed, started_time=started_time, completed_time=completed_time, task_id='task_id_2') expected_aggregate_flakiness_input = AggregateFlakinessInput( flakiness_thus_far=flakiness_thus_far, incoming_swarming_task_output=incoming_flake_swarming_task_output) expected_aggregate_flakiness_output = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=timeout_seconds, error=None, failed_swarming_task_attempts=0, iterations=45, pass_rate=0.5, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) determine_approximate_pass_rate_input = DetermineApproximatePassRateInput( builder_name=builder_name, commit_position=commit_position, flakiness_thus_far=flakiness_thus_far, get_isolate_sha_output=isolate_sha_output, master_name=master_name, previous_swarming_task_output=incoming_flake_swarming_task_output, reference_build_number=reference_build_number, revision=revision, step_name=step_name, test_name=test_name) flake_swarming_task_input = RunFlakeSwarmingTaskInput( builder_name=builder_name, commit_position=commit_position, isolate_sha=isolate_sha, iterations=expected_iterations, master_name=master_name, reference_build_number=reference_build_number, step_name=step_name, test_name=test_name, timeout_seconds=timeout_seconds) recursive_input = DetermineApproximatePassRateInput( builder_name=builder_name, commit_position=commit_position, flakiness_thus_far=expected_aggregate_flakiness_output, get_isolate_sha_output=isolate_sha_output, master_name=master_name, previous_swarming_task_output=incoming_flake_swarming_task_output, reference_build_number=reference_build_number, revision=revision, step_name=step_name, test_name=test_name) self.MockAsynchronousPipeline(RunFlakeSwarmingTaskPipeline, flake_swarming_task_input, incoming_flake_swarming_task_output) self.MockSynchronousPipeline(AggregateFlakinessPipeline, expected_aggregate_flakiness_input, expected_aggregate_flakiness_output) self.MockGeneratorPipeline(DetermineApproximatePassRatePipelineWrapper, recursive_input, None) pipeline_job = DetermineApproximatePassRatePipeline( determine_approximate_pass_rate_input) pipeline_job.start() self.execute_queued_tasks()
def testDetermineApproximatePassRateMaximumRetriesPerSwarmingTaskReached( self, _): master_name = 'm' builder_name = 'b' reference_build_number = 123 step_name = 's' test_name = 't' commit_position = 1000 incoming_pass_count = 15 iterations = 30 incoming_pass_rate = float(incoming_pass_count / iterations) isolate_sha = 'sha1' revision = 'r1000' task_id = 'task_id_2' started_time = datetime(2018, 1, 1, 0, 0, 0) completed_time = datetime(2018, 1, 1, 1, 0, 0) build_url = 'url' try_job_url = None swarming_task_error = SwarmingTaskError(code=1, message='error') isolate_sha_output = GetIsolateShaOutput( build_number=None, build_url=build_url, isolate_sha=isolate_sha, try_job_url=try_job_url) flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=3600, error=None, failed_swarming_task_attempts=0, iterations=iterations, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) expected_flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=3600, error=swarming_task_error, failed_swarming_task_attempts=0, iterations=iterations, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) incoming_flake_swarming_task_output = FlakeSwarmingTaskOutput( completed_time=completed_time, error=swarming_task_error, pass_count=incoming_pass_count, iterations=iterations, started_time=started_time, task_id=task_id) determine_approximate_pass_rate_input = DetermineApproximatePassRateInput( builder_name=builder_name, commit_position=commit_position, flakiness_thus_far=flakiness_thus_far, get_isolate_sha_output=isolate_sha_output, master_name=master_name, previous_swarming_task_output=incoming_flake_swarming_task_output, reference_build_number=reference_build_number, revision=revision, step_name=step_name, test_name=test_name) pipeline_job = DetermineApproximatePassRatePipeline( determine_approximate_pass_rate_input) pipeline_job.start() self.execute_queued_tasks() pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id) self.assertEqual(expected_flakiness_thus_far.ToSerializable(), pipeline_job.outputs.default.value)
def testDetermineApproximatePassRateTestDoesNotExist(self, *_): master_name = 'm' builder_name = 'b' reference_build_number = 123 step_name = 's' test_name = 't' commit_position = 1000 incoming_pass_count = 0 iterations = 10 incoming_pass_rate = flake_constants.PASS_RATE_TEST_NOT_FOUND isolate_sha = 'sha1' revision = 'r1000' task_id = 'task_id' started_time = datetime(2018, 1, 1, 0, 0, 0) completed_time = datetime(2018, 1, 1, 1, 0, 0) build_url = 'url' try_job_url = None flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=3600, error=None, failed_swarming_task_attempts=0, iterations=iterations, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) isolate_sha_output = GetIsolateShaOutput( build_number=None, build_url=build_url, isolate_sha=isolate_sha, try_job_url=try_job_url) flake_swarming_task_output = FlakeSwarmingTaskOutput( error=None, pass_count=incoming_pass_count, iterations=iterations, task_id=task_id, started_time=started_time, completed_time=completed_time) determine_approximate_pass_rate_input = DetermineApproximatePassRateInput( builder_name=builder_name, commit_position=commit_position, flakiness_thus_far=flakiness_thus_far, get_isolate_sha_output=isolate_sha_output, master_name=master_name, previous_swarming_task_output=flake_swarming_task_output, reference_build_number=reference_build_number, revision=revision, step_name=step_name, test_name=test_name) pipeline_job = DetermineApproximatePassRatePipeline( determine_approximate_pass_rate_input) pipeline_job.start() self.execute_queued_tasks() pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id) self.assertEqual(flakiness_thus_far.ToSerializable(), pipeline_job.outputs.default.value)