def testDetermineApproximatePassRateTestDoesNotExist(self, *_): master_name = 'm' builder_name = 'b' reference_build_number = 123 step_name = 's' test_name = 't' commit_position = 1000 incoming_pass_count = 0 iterations = 10 incoming_pass_rate = flake_constants.PASS_RATE_TEST_NOT_FOUND isolate_sha = 'sha1' revision = 'r1000' task_id = 'task_id' started_time = datetime(2018, 1, 1, 0, 0, 0) completed_time = datetime(2018, 1, 1, 1, 0, 0) build_url = 'url' try_job_url = None flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=3600, error=None, failed_swarming_task_attempts=0, iterations=iterations, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) isolate_sha_output = GetIsolateShaOutput( build_number=None, build_url=build_url, isolate_sha=isolate_sha, try_job_url=try_job_url) flake_swarming_task_output = FlakeSwarmingTaskOutput( error=None, pass_count=incoming_pass_count, iterations=iterations, task_id=task_id, started_time=started_time, completed_time=completed_time) determine_approximate_pass_rate_input = DetermineApproximatePassRateInput( builder_name=builder_name, commit_position=commit_position, flakiness_thus_far=flakiness_thus_far, get_isolate_sha_output=isolate_sha_output, master_name=master_name, previous_swarming_task_output=flake_swarming_task_output, reference_build_number=reference_build_number, revision=revision, step_name=step_name, test_name=test_name) pipeline_job = DetermineApproximatePassRatePipeline( determine_approximate_pass_rate_input) pipeline_job.start() self.execute_queued_tasks() pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id) self.assertEqual(flakiness_thus_far.ToSerializable(), pipeline_job.outputs.default.value)
def testDetermineApproximatePassRateMaximumRetriesPerSwarmingTaskReached( self, _): master_name = 'm' builder_name = 'b' reference_build_number = 123 step_name = 's' test_name = 't' commit_position = 1000 incoming_pass_count = 15 iterations = 30 incoming_pass_rate = float(incoming_pass_count / iterations) isolate_sha = 'sha1' revision = 'r1000' task_id = 'task_id_2' started_time = datetime(2018, 1, 1, 0, 0, 0) completed_time = datetime(2018, 1, 1, 1, 0, 0) build_url = 'url' try_job_url = None swarming_task_error = SwarmingTaskError(code=1, message='error') isolate_sha_output = GetIsolateShaOutput( build_number=None, build_url=build_url, isolate_sha=isolate_sha, try_job_url=try_job_url) flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=3600, error=None, failed_swarming_task_attempts=0, iterations=iterations, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) expected_flakiness_thus_far = Flakiness( build_number=None, build_url=build_url, commit_position=commit_position, total_test_run_seconds=3600, error=swarming_task_error, failed_swarming_task_attempts=0, iterations=iterations, pass_rate=incoming_pass_rate, revision=revision, try_job_url=try_job_url, task_ids=ListOfBasestring.FromSerializable(['task_id_1'])) incoming_flake_swarming_task_output = FlakeSwarmingTaskOutput( completed_time=completed_time, error=swarming_task_error, pass_count=incoming_pass_count, iterations=iterations, started_time=started_time, task_id=task_id) determine_approximate_pass_rate_input = DetermineApproximatePassRateInput( builder_name=builder_name, commit_position=commit_position, flakiness_thus_far=flakiness_thus_far, get_isolate_sha_output=isolate_sha_output, master_name=master_name, previous_swarming_task_output=incoming_flake_swarming_task_output, reference_build_number=reference_build_number, revision=revision, step_name=step_name, test_name=test_name) pipeline_job = DetermineApproximatePassRatePipeline( determine_approximate_pass_rate_input) pipeline_job.start() self.execute_queued_tasks() pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id) self.assertEqual(expected_flakiness_thus_far.ToSerializable(), pipeline_job.outputs.default.value)