def testGetElapsedSecondsNoStartEndTimes(self):
     task_output = FlakeSwarmingTaskOutput(completed_time=None,
                                           error=None,
                                           iterations=50,
                                           pass_count=25,
                                           started_time=None,
                                           task_id='task_id')
     self.assertIsNone(task_output.GetElapsedSeconds())
 def testGetElapsedSeconds(self):
     task_output = FlakeSwarmingTaskOutput(
         completed_time=datetime(2018, 2, 21, 0, 1, 0),
         error=None,
         iterations=50,
         pass_count=25,
         started_time=datetime(2018, 2, 21, 0, 0, 0),
         task_id='task_id')
     self.assertEqual(60, task_output.GetElapsedSeconds())
Пример #3
0
def _ParseFlakeSwarmingTaskOutput(task_data, output_json, error, parameters):
    """Returns swarming task results as a FlakeswarmingTaskOutput object.

  Assumption: only 1 test was run in the Swarming task.
  """
    assert task_data

    iterations = parameters.iterations

    if output_json:
        # Gets the total numbers of runs and number of successful runs from
        # test results
        tries, successes = flake_test_results.GetCountsFromSwarmingRerun(
            output_json)

        if tries is None or successes is None:
            # Something went wrong preventing even a single test from being processed
            # which counts as an error.
            error = error or SwarmingTaskError.GenerateError(
                code=swarming_task_error.UNKNOWN)
            tries = None
            successes = None
        elif (tries == 1 and task_data['state'] == constants.STATE_COMPLETED
              and not task_data.get('failure')
              and not task_data.get('infra_failure')):
            # webkit_layout_tests special case: test results will be combined into
            # one if all results are the same.
            # Use iterations instead assuming the test repeated that many times.
            # Currently only do this if task completes successfully.
            tries = iterations
            successes = iterations * successes

        return FlakeSwarmingTaskOutput(
            completed_time=time_util.DatetimeFromString(
                task_data.get('completed_ts')),
            error=error,
            iterations=tries,
            pass_count=successes,
            started_time=time_util.DatetimeFromString(
                task_data.get('started_ts')),
            task_id=task_data['task_id'])
    else:
        return FlakeSwarmingTaskOutput(
            completed_time=time_util.DatetimeFromString(
                task_data.get('completed_ts')),
            error=error or
            SwarmingTaskError.GenerateError(code=swarming_task_error.UNKNOWN),
            iterations=None,
            pass_count=None,
            started_time=time_util.DatetimeFromString(
                task_data.get('started_ts')),
            task_id=task_data['task_id'])
Пример #4
0
  def testDetermineApproximatePassRateConverged(self, *_):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_count = 15
    iterations = 30
    incoming_pass_rate = 0.5
    isolate_sha = 'sha1'
    revision = 'r1000'
    started_time = datetime(2018, 1, 1, 0, 0, 0)
    completed_time = datetime(2018, 1, 1, 1, 0, 0)
    build_url = 'url'
    try_job_url = None

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=None,
        isolate_sha=isolate_sha,
        try_job_url='url')

    flake_swarming_task_output = FlakeSwarmingTaskOutput(
        error=None,
        pass_count=incoming_pass_count,
        iterations=iterations,
        started_time=started_time,
        completed_time=completed_time,
        task_id='task_id')

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=3600,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=iterations,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        get_isolate_sha_output=isolate_sha_output,
        flakiness_thus_far=flakiness_thus_far,
        previous_swarming_task_output=flake_swarming_task_output,
        master_name=master_name,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    pipeline_job = DetermineApproximatePassRatePipeline(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()
Пример #5
0
    def testParseFlakeSwarmingTaskOutputNoOutput(self):
        task_data = {
            'created_ts': '2018-04-02T18:32:06.538220',
            'started_ts': '2018-04-02T19:32:06.538220',
            'completed_ts': '2018-04-02T20:32:06.538220',
            'task_id': 'task_id'
        }
        error = SwarmingTaskError(code=1, message='m')

        commit_position = 1000
        isolate_sha = 'sha1'
        iterations = 50
        timeout_seconds = 1200
        parameters = RunFlakeSwarmingTaskInput(builder_name='b',
                                               commit_position=commit_position,
                                               isolate_sha=isolate_sha,
                                               iterations=iterations,
                                               master_name='m',
                                               reference_build_number=123,
                                               step_name='s',
                                               test_name='t',
                                               timeout_seconds=timeout_seconds)

        expected_result = FlakeSwarmingTaskOutput(
            task_id='task_id',
            started_time=datetime(2018, 4, 2, 19, 32, 6, 538220),
            completed_time=datetime(2018, 4, 2, 20, 32, 6, 538220),
            iterations=None,
            error=error,
            pass_count=None)

        self.assertEqual(
            expected_result,
            flake_swarming._ParseFlakeSwarmingTaskOutput(
                task_data, None, error, parameters))
Пример #6
0
 def testGetPassRate(self):
     swarming_task_output = FlakeSwarmingTaskOutput(error=None,
                                                    iterations=10,
                                                    pass_count=4,
                                                    started_time=None,
                                                    completed_time=None,
                                                    task_id='task_id')
     self.assertEqual(0.4, pass_rate_util.GetPassRate(swarming_task_output))
Пример #7
0
 def testGetPassRateNonexistentTest(self):
     swarming_task_output = FlakeSwarmingTaskOutput(error=None,
                                                    iterations=0,
                                                    pass_count=0,
                                                    started_time=None,
                                                    completed_time=None,
                                                    task_id='task_id')
     self.assertEqual(flake_constants.PASS_RATE_TEST_NOT_FOUND,
                      pass_rate_util.GetPassRate(swarming_task_output))
Пример #8
0
def OnSwarmingTaskError(task_id, error):
    """Returns a FlakeSwarmingTaskOutput object representing a failed task."""
    return FlakeSwarmingTaskOutput(
        completed_time=None,
        error=error
        or SwarmingTaskError.GenerateError(code=swarming_task_error.UNKNOWN),
        iterations=None,
        pass_count=None,
        started_time=None,
        task_id=task_id)
Пример #9
0
    def testCanFailedSwarmingTaskBeSalvaged(self):
        completed_time = datetime(2018, 1, 1)
        started_time = completed_time - timedelta(hours=1)
        error = swarming_task_error.SwarmingTaskError(code=1, message='test')
        tries = 100
        successes = 50
        task_id = 'task'
        task_output = FlakeSwarmingTaskOutput(completed_time=completed_time,
                                              error=error,
                                              iterations=tries,
                                              pass_count=successes,
                                              started_time=started_time,
                                              task_id=task_id)
        self.assertTrue(
            flake_analysis_util.CanFailedSwarmingTaskBeSalvaged(task_output))

        task_output.iterations = None
        self.assertFalse(
            flake_analysis_util.CanFailedSwarmingTaskBeSalvaged(task_output))
Пример #10
0
    def testOnSwarmingTaskError(self):
        task_id = 'task_id'
        error = SwarmingTaskError(code=1000, message='Unknown error')
        expected_result = FlakeSwarmingTaskOutput(task_id=task_id,
                                                  started_time=None,
                                                  completed_time=None,
                                                  iterations=None,
                                                  error=error,
                                                  pass_count=None)

        self.assertEqual(expected_result,
                         flake_swarming.OnSwarmingTaskError(task_id, error))
Пример #11
0
    def testUpdateAnalysisDataPointsExistingDataPointWithErrorSalvagable(
            self, _):
        commit_position = 1000
        revision = 'r1000'
        iterations = 100
        pass_count = 50
        completed_time = datetime(2018, 1, 1, 0, 1, 0)
        error = SwarmingTaskError(code=1, message='m')
        started_time = datetime(2018, 1, 1, 0, 0, 0)
        task_id_1 = 'task_1'
        task_id_2 = 'task_2'
        build_url = 'url'
        try_job_url = None

        swarming_task_output = FlakeSwarmingTaskOutput(
            completed_time=completed_time,
            error=error,
            iterations=iterations,
            pass_count=pass_count,
            started_time=started_time,
            task_id=task_id_2)

        initial_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=60,
            error=None,
            failed_swarming_task_attempts=0,
            iterations=50,
            pass_rate=0.5,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable([task_id_1]))

        expected_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=120,
            error=None,  # Only set error if no more retries.
            failed_swarming_task_attempts=0,  # Task was salvaged.
            iterations=150,
            pass_rate=0.5,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable([task_id_1, task_id_2]))

        resulting_flakiness = flakiness_util.UpdateFlakiness(
            initial_flakiness, swarming_task_output)

        self.assertEqual(expected_flakiness, resulting_flakiness)
Пример #12
0
    def testUpdateAnalysisDataPointsExistingDataPointNoError(self):
        commit_position = 1000
        revision = 'r1000'
        iterations = 100
        pass_count = 60
        failed_swarming_task_attempts = 2
        completed_time = datetime(2018, 1, 1, 1, 0, 0)
        error = None
        started_time = datetime(2018, 1, 1, 0, 0, 0)
        task_id = 'task_2'
        build_url = None
        try_job_url = 'url'

        initial_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=1800,
            error=None,
            failed_swarming_task_attempts=failed_swarming_task_attempts,
            iterations=iterations,
            pass_rate=0.5,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable(['task_1']))

        swarming_task_output = FlakeSwarmingTaskOutput(
            completed_time=completed_time,
            error=error,
            iterations=iterations,
            pass_count=pass_count,
            started_time=started_time,
            task_id=task_id)

        resulting_flakiness = flakiness_util.UpdateFlakiness(
            initial_flakiness, swarming_task_output)

        expected_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=5400,
            error=None,
            failed_swarming_task_attempts=failed_swarming_task_attempts,
            iterations=200,
            pass_rate=0.55,
            revision=revision,
            task_ids=ListOfBasestring.FromSerializable(['task_1', 'task_2']),
            try_job_url=try_job_url)

        self.assertEqual(expected_flakiness, resulting_flakiness)
Пример #13
0
    def testUpdateExistingFlakinessWithErrorWithSuccessfulRun(self, _):
        commit_position = 1000
        revision = 'r1000'
        iterations = 10
        pass_count = 5
        completed_time = datetime(2018, 1, 1, 0, 1, 0)
        started_time = datetime(2018, 1, 1, 0, 0, 0)
        task_id_1 = 'task_1'
        task_id_2 = 'task_2'
        build_url = 'url'
        try_job_url = None

        swarming_task_output = FlakeSwarmingTaskOutput(
            completed_time=completed_time,
            error=None,
            iterations=iterations,
            pass_count=pass_count,
            started_time=started_time,
            task_id=task_id_2)

        # Simulate first run failing.
        initial_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=60,
            error=None,
            failed_swarming_task_attempts=1,
            iterations=0,
            pass_rate=None,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable([task_id_1]))

        expected_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=120,  # No change due to unrecoverable error.
            error=None,
            failed_swarming_task_attempts=1,
            iterations=10,
            pass_rate=0.5,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable([task_id_1, task_id_2]))

        resulting_flakiness = flakiness_util.UpdateFlakiness(
            initial_flakiness, swarming_task_output)

        self.assertEqual(expected_flakiness, resulting_flakiness)
Пример #14
0
    def testOnSwarmingTaskTimeoutNoData(self, mocked_result):
        error = SwarmingTaskError(code=1000, message='Unknown error')
        mocked_result.return_value = None, None, error
        task_id = 'task_id'

        expected_result = FlakeSwarmingTaskOutput(task_id=task_id,
                                                  started_time=None,
                                                  completed_time=None,
                                                  iterations=None,
                                                  error=error,
                                                  pass_count=None)

        self.assertEqual(expected_result,
                         flake_swarming.OnSwarmingTaskTimeout(None, task_id))
Пример #15
0
    def testOnSwarmingTaskTimeoutNoTaskId(self, mocked_result):
        error = SwarmingTaskError(
            code=350, message='Runner to run swarming task timed out')
        mocked_result.return_value = None, None, error

        expected_result = FlakeSwarmingTaskOutput(task_id=None,
                                                  started_time=None,
                                                  completed_time=None,
                                                  iterations=None,
                                                  error=error,
                                                  pass_count=None)

        self.assertEqual(expected_result,
                         flake_swarming.OnSwarmingTaskTimeout(None, None))
Пример #16
0
    def testUpdateFlakinessNewFlakinessNoError(self):
        commit_position = 1000
        completed_time = datetime(2018, 1, 1, 0, 1, 0)
        error = None
        iterations = 100
        pass_count = 50
        revision = 'r1000'
        started_time = datetime(2018, 1, 1, 0, 0, 0)
        task_id = 'task_id'
        build_url = None
        try_job_url = 'url'

        swarming_task_output = FlakeSwarmingTaskOutput(
            completed_time=completed_time,
            error=error,
            iterations=iterations,
            pass_count=pass_count,
            started_time=started_time,
            task_id=task_id)

        initial_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=None,
            error=None,
            failed_swarming_task_attempts=0,
            iterations=None,
            pass_rate=None,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable([]))

        expected_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=60,
            error=None,
            failed_swarming_task_attempts=0,
            iterations=iterations,
            pass_rate=0.5,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable([task_id]))

        resulting_flakiness = flakiness_util.UpdateFlakiness(
            initial_flakiness, swarming_task_output)

        self.assertEqual(expected_flakiness, resulting_flakiness)
Пример #17
0
    def testUpdateFlakinessWithErrorUnsalvagable(self, _):
        commit_position = 1000
        completed_time = datetime(2018, 1, 1, 1, 0, 0)
        error = SwarmingTaskError(code=1, message='message')
        iterations = None
        pass_count = None
        revision = 'r1000'
        started_time = datetime(2018, 1, 1, 0, 0, 0)
        task_id = 'task_id'
        build_url = 'url'
        try_job_url = None

        swarming_task_output = FlakeSwarmingTaskOutput(
            completed_time=completed_time,
            error=error,
            iterations=iterations,
            pass_count=pass_count,
            started_time=started_time,
            task_id=task_id)

        flakiness_to_update = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=0,
            error=None,
            failed_swarming_task_attempts=0,
            iterations=0,
            pass_rate=None,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable([]))

        expected_flakiness = Flakiness(
            build_number=None,
            build_url=build_url,
            commit_position=commit_position,
            total_test_run_seconds=0,
            error=None,
            failed_swarming_task_attempts=1,
            iterations=0,
            pass_rate=None,
            revision=revision,
            try_job_url=try_job_url,
            task_ids=ListOfBasestring.FromSerializable([task_id]))

        resulting_flakiness = flakiness_util.UpdateFlakiness(
            flakiness_to_update, swarming_task_output)

        self.assertEqual(expected_flakiness, resulting_flakiness)
Пример #18
0
  def testParseFlakeSwarmingTaskOutput(self, mocked_pass_fail):
    iterations = 50
    pass_count = 25
    task_data = {
        'created_ts': '2018-04-02T18:32:06.538220',
        'started_ts': '2018-04-02T19:32:06.538220',
        'completed_ts': '2018-04-02T20:32:06.538220',
        'task_id': 'task_id'
    }

    mocked_pass_fail.return_value = (iterations, pass_count)

    commit_position = 1000
    isolate_sha = 'sha1'
    iterations = 50
    timeout_seconds = 1200

    parameters = RunFlakeSwarmingTaskInput(
        builder_name='b',
        commit_position=commit_position,
        isolate_sha=isolate_sha,
        iterations=iterations,
        master_name='m',
        reference_build_number=123,
        step_name='s',
        test_name='t',
        timeout_seconds=timeout_seconds)

    expected_result = FlakeSwarmingTaskOutput(
        task_id='task_id',
        started_time=datetime(2018, 4, 2, 19, 32, 6, 538220),
        completed_time=datetime(2018, 4, 2, 20, 32, 6, 538220),
        iterations=iterations,
        error=None,
        pass_count=pass_count)

    self.assertEqual(
        expected_result,
        flake_swarming._ParseFlakeSwarmingTaskOutput(task_data, 'content', None,
                                                     parameters))
Пример #19
0
    def testParseFlakeSwarmingTaskOutputConsolidatedResult(self, _):
        task_data = {
            'created_ts': '2018-04-02T18:32:06.538220',
            'started_ts': '2018-04-02T19:32:06.538220',
            'task_id': 'task_id',
            'state': 'COMPLETED',
            'failure': False,
            'internal_failure': False
        }

        commit_position = 1000
        isolate_sha = 'sha1'
        iterations = 50
        timeout_seconds = 1200
        parameters = RunFlakeSwarmingTaskInput(builder_name='b',
                                               commit_position=commit_position,
                                               isolate_sha=isolate_sha,
                                               iterations=iterations,
                                               master_name='m',
                                               reference_build_number=123,
                                               step_name='s',
                                               test_name='t',
                                               timeout_seconds=timeout_seconds)

        expected_result = FlakeSwarmingTaskOutput(task_id='task_id',
                                                  started_time=datetime(
                                                      2018, 4, 2, 19, 32, 6,
                                                      538220),
                                                  completed_time=None,
                                                  iterations=50,
                                                  error=None,
                                                  pass_count=0)

        self.assertEqual(
            expected_result,
            flake_swarming._ParseFlakeSwarmingTaskOutput(
                task_data, {'bla': 'bla'}, None, parameters))
  def testCallbackImplCompleted(self, mocked_output):
    master_name = 'm'
    builder_name = 'b'
    build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    isolate_sha = 'sha1'
    iterations = 50
    timeout_seconds = 1200

    run_flake_swarming_task_input = RunFlakeSwarmingTaskInput(
        builder_name=builder_name,
        commit_position=commit_position,
        isolate_sha=isolate_sha,
        iterations=iterations,
        master_name=master_name,
        reference_build_number=build_number,
        step_name=step_name,
        test_name=test_name,
        timeout_seconds=timeout_seconds)

    flake_swarming_task_output = FlakeSwarmingTaskOutput(
        completed_time=datetime(2018, 4, 1, 0, 1, 0),
        error=None,
        iterations=iterations,
        pass_count=iterations,
        started_time=datetime(2018, 4, 1, 0, 0, 0),
        task_id='task_id')

    mocked_output.return_value = flake_swarming_task_output

    pipeline_job = RunFlakeSwarmingTaskPipeline(run_flake_swarming_task_input)
    result = pipeline_job.CallbackImpl(run_flake_swarming_task_input,
                                       {'task_id': 'task_id'})
    self.assertEqual((None, flake_swarming_task_output), result)
Пример #21
0
  def testDetermineApproximatePassRateMaximumRetriesPerSwarmingTaskReached(
      self, _):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_count = 15
    iterations = 30
    incoming_pass_rate = float(incoming_pass_count / iterations)
    isolate_sha = 'sha1'
    revision = 'r1000'
    task_id = 'task_id_2'
    started_time = datetime(2018, 1, 1, 0, 0, 0)
    completed_time = datetime(2018, 1, 1, 1, 0, 0)
    build_url = 'url'
    try_job_url = None
    swarming_task_error = SwarmingTaskError(code=1, message='error')

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=build_url,
        isolate_sha=isolate_sha,
        try_job_url=try_job_url)

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=3600,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=iterations,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    expected_flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=3600,
        error=swarming_task_error,
        failed_swarming_task_attempts=0,
        iterations=iterations,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    incoming_flake_swarming_task_output = FlakeSwarmingTaskOutput(
        completed_time=completed_time,
        error=swarming_task_error,
        pass_count=incoming_pass_count,
        iterations=iterations,
        started_time=started_time,
        task_id=task_id)

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        flakiness_thus_far=flakiness_thus_far,
        get_isolate_sha_output=isolate_sha_output,
        master_name=master_name,
        previous_swarming_task_output=incoming_flake_swarming_task_output,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    pipeline_job = DetermineApproximatePassRatePipeline(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()

    pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id)
    self.assertEqual(expected_flakiness_thus_far.ToSerializable(),
                     pipeline_job.outputs.default.value)
Пример #22
0
  def testDetermineApproximatePassRateNotYetConverged(self, *_):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_count = 15
    iterations_completed = 30
    expected_iterations = 15
    incoming_pass_rate = 0.5
    isolate_sha = 'sha1'
    revision = 'r1000'
    timeout_seconds = 3600
    started_time = datetime(2018, 1, 1, 0, 0, 0)
    completed_time = datetime(2018, 1, 1, 1, 0, 0)
    build_url = None
    try_job_url = 'url'

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=build_url,
        isolate_sha=isolate_sha,
        try_job_url=try_job_url)

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=timeout_seconds,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=iterations_completed,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    incoming_flake_swarming_task_output = FlakeSwarmingTaskOutput(
        error=None,
        pass_count=incoming_pass_count,
        iterations=iterations_completed,
        started_time=started_time,
        completed_time=completed_time,
        task_id='task_id_2')

    expected_aggregate_flakiness_input = AggregateFlakinessInput(
        flakiness_thus_far=flakiness_thus_far,
        incoming_swarming_task_output=incoming_flake_swarming_task_output)

    expected_aggregate_flakiness_output = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=timeout_seconds,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=45,
        pass_rate=0.5,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        flakiness_thus_far=flakiness_thus_far,
        get_isolate_sha_output=isolate_sha_output,
        master_name=master_name,
        previous_swarming_task_output=incoming_flake_swarming_task_output,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    flake_swarming_task_input = RunFlakeSwarmingTaskInput(
        builder_name=builder_name,
        commit_position=commit_position,
        isolate_sha=isolate_sha,
        iterations=expected_iterations,
        master_name=master_name,
        reference_build_number=reference_build_number,
        step_name=step_name,
        test_name=test_name,
        timeout_seconds=timeout_seconds)

    recursive_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        flakiness_thus_far=expected_aggregate_flakiness_output,
        get_isolate_sha_output=isolate_sha_output,
        master_name=master_name,
        previous_swarming_task_output=incoming_flake_swarming_task_output,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    self.MockAsynchronousPipeline(RunFlakeSwarmingTaskPipeline,
                                  flake_swarming_task_input,
                                  incoming_flake_swarming_task_output)
    self.MockSynchronousPipeline(AggregateFlakinessPipeline,
                                 expected_aggregate_flakiness_input,
                                 expected_aggregate_flakiness_output)
    self.MockGeneratorPipeline(DetermineApproximatePassRatePipelineWrapper,
                               recursive_input, None)

    pipeline_job = DetermineApproximatePassRatePipeline(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()
Пример #23
0
  def testDetermineApproximatePassRateTestDoesNotExist(self, *_):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_count = 0
    iterations = 10
    incoming_pass_rate = flake_constants.PASS_RATE_TEST_NOT_FOUND
    isolate_sha = 'sha1'
    revision = 'r1000'
    task_id = 'task_id'
    started_time = datetime(2018, 1, 1, 0, 0, 0)
    completed_time = datetime(2018, 1, 1, 1, 0, 0)
    build_url = 'url'
    try_job_url = None

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=3600,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=iterations,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=build_url,
        isolate_sha=isolate_sha,
        try_job_url=try_job_url)

    flake_swarming_task_output = FlakeSwarmingTaskOutput(
        error=None,
        pass_count=incoming_pass_count,
        iterations=iterations,
        task_id=task_id,
        started_time=started_time,
        completed_time=completed_time)

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        flakiness_thus_far=flakiness_thus_far,
        get_isolate_sha_output=isolate_sha_output,
        master_name=master_name,
        previous_swarming_task_output=flake_swarming_task_output,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    pipeline_job = DetermineApproximatePassRatePipeline(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()

    pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id)
    self.assertEqual(flakiness_thus_far.ToSerializable(),
                     pipeline_job.outputs.default.value)