Esempio n. 1
0
  def testDetermineApproximatePassRateConverged(self, *_):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_count = 15
    iterations = 30
    incoming_pass_rate = 0.5
    isolate_sha = 'sha1'
    revision = 'r1000'
    started_time = datetime(2018, 1, 1, 0, 0, 0)
    completed_time = datetime(2018, 1, 1, 1, 0, 0)
    build_url = 'url'
    try_job_url = None

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=None,
        isolate_sha=isolate_sha,
        try_job_url='url')

    flake_swarming_task_output = FlakeSwarmingTaskOutput(
        error=None,
        pass_count=incoming_pass_count,
        iterations=iterations,
        started_time=started_time,
        completed_time=completed_time,
        task_id='task_id')

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=3600,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=iterations,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        get_isolate_sha_output=isolate_sha_output,
        flakiness_thus_far=flakiness_thus_far,
        previous_swarming_task_output=flake_swarming_task_output,
        master_name=master_name,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    pipeline_job = DetermineApproximatePassRatePipeline(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()
Esempio n. 2
0
  def testDetermineApproximatePassRatePipelineWrapper(self):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_rate = 0.5
    isolate_sha = 'sha1'
    revision = 'r1000'
    build_url = None
    try_job_url = 'url'

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=build_url,
        isolate_sha=isolate_sha,
        try_job_url=try_job_url)

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=60,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=10,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        get_isolate_sha_output=isolate_sha_output,
        flakiness_thus_far=flakiness_thus_far,
        master_name=master_name,
        previous_swarming_task_output=None,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    self.MockGeneratorPipeline(DetermineApproximatePassRatePipeline,
                               determine_approximate_pass_rate_input, None)

    pipeline_job = DetermineApproximatePassRatePipelineWrapper(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()
Esempio n. 3
0
    def testGetIsolateShaForTryJobPipeline(self):
        step_name = 'interactive_ui_tests'
        expected_sha = 'ddd3e494ed97366f99453e5a8321b77449f26a58'
        url = (
            'https://ci.chromium.org/p/chromium/builders/luci.chromium.findit/'
            'findit_variable/1391')
        expected_output = GetIsolateShaOutput(
            isolate_sha=expected_sha,
            build_number=None,
            build_url=None,
            try_job_url=url).ToSerializable()

        try_job_result = FlakeTryJobResult.FromSerializable({
            'report': {
                'previously_checked_out_revision':
                'f5dc74a384d48f1a0929dc056cadae2a0019f8b5',
                'previously_cached_revision':
                '17b7ff2ff8e107b0e9cebcd9d6894072acc98639',
                'isolated_tests': {
                    'interactive_ui_tests':
                    'ddd3e494ed97366f99453e5a8321b77449f26a58'
                },
                'last_checked_out_revision': None,
                'metadata': {}
            },
            'url':
            url,
            'try_job_id':
            '8951342990533358272'
        })

        get_try_job_sha_parameters = GetIsolateShaForTryJobParameters(
            try_job_result=try_job_result, step_name=step_name)

        pipeline_job = GetIsolateShaForTryJobPipeline(
            get_try_job_sha_parameters)
        pipeline_job.start()
        self.execute_queued_tasks()

        pipeline_job = pipelines.pipeline.Pipeline.from_id(
            pipeline_job.pipeline_id)
        pipeline_output = pipeline_job.outputs.default.value

        self.assertEqual(expected_output, pipeline_output)
    def testAnalyzeRecentFlakinessPipeline(self, mocked_commit_position,
                                           mocked_step_metadata):
        master_name = 'm'
        builder_name = 'b'
        build_number = 123
        step_name = 's'
        test_name = 't'
        analysis = MasterFlakeAnalysis.Create(master_name, builder_name,
                                              build_number, step_name,
                                              test_name)
        analysis.data_points = [DataPoint.Create(commit_position=999)]
        analysis.Save()
        isolate_sha = 'sha'
        latest_revision = 'r'
        latest_commit_position = 1000
        mocked_commit_position.return_value = (latest_commit_position,
                                               latest_revision)
        pass_rate = 0.5

        get_sha_output = GetIsolateShaOutput(isolate_sha=isolate_sha,
                                             build_url='url',
                                             try_job_url=None)

        step_metadata = StepMetadata(canonical_step_name=step_name,
                                     dimensions=None,
                                     full_step_name='s',
                                     patched=False,
                                     swarm_task_ids=None,
                                     waterfall_buildername=builder_name,
                                     waterfall_mastername=master_name,
                                     isolate_target_name=step_name)

        mocked_step_metadata.return_value = step_metadata.ToSerializable()

        expected_flakiness = Flakiness(build_url='url',
                                       commit_position=latest_commit_position,
                                       revision=latest_revision,
                                       pass_rate=pass_rate)

        analyze_recent_flakiness_input = AnalyzeRecentFlakinessInput(
            analysis_urlsafe_key=analysis.key.urlsafe())

        expected_isolate_sha_input = GetIsolateShaForCommitPositionParameters(
            analysis_urlsafe_key=analysis.key.urlsafe(),
            commit_position=latest_commit_position,
            dimensions=None,
            revision=latest_revision,
            step_metadata=step_metadata,
            upper_bound_build_number=analysis.build_number)

        expected_pass_rate_input = DetermineApproximatePassRateInput(
            builder_name=analysis.builder_name,
            commit_position=latest_commit_position,
            flakiness_thus_far=None,
            get_isolate_sha_output=get_sha_output,
            master_name=analysis.master_name,
            previous_swarming_task_output=None,
            reference_build_number=analysis.build_number,
            revision=latest_revision,
            step_name=analysis.step_name,
            test_name=analysis.test_name)

        expected_save_flakiness_verification_input = SaveFlakinessVerificationInput(
            analysis_urlsafe_key=analysis.key.urlsafe(),
            flakiness=expected_flakiness)

        self.MockGeneratorPipeline(GetIsolateShaForCommitPositionPipeline,
                                   expected_isolate_sha_input, get_sha_output)

        self.MockGeneratorPipeline(DetermineApproximatePassRatePipeline,
                                   expected_pass_rate_input,
                                   expected_flakiness)

        self.MockGeneratorPipeline(SaveFlakinessVerificationPipeline,
                                   expected_save_flakiness_verification_input,
                                   None)

        pipeline_job = AnalyzeRecentFlakinessPipeline(
            analyze_recent_flakiness_input)
        pipeline_job.start()
        self.execute_queued_tasks()
        mocked_step_metadata.assert_called_with(master_name, builder_name,
                                                build_number, step_name)
        mocked_commit_position.assert_called_with(master_name, builder_name,
                                                  step_name)
Esempio n. 5
0
  def testDetermineApproximatePassRateNotYetConverged(self, *_):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_count = 15
    iterations_completed = 30
    expected_iterations = 15
    incoming_pass_rate = 0.5
    isolate_sha = 'sha1'
    revision = 'r1000'
    timeout_seconds = 3600
    started_time = datetime(2018, 1, 1, 0, 0, 0)
    completed_time = datetime(2018, 1, 1, 1, 0, 0)
    build_url = None
    try_job_url = 'url'

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=build_url,
        isolate_sha=isolate_sha,
        try_job_url=try_job_url)

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=timeout_seconds,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=iterations_completed,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    incoming_flake_swarming_task_output = FlakeSwarmingTaskOutput(
        error=None,
        pass_count=incoming_pass_count,
        iterations=iterations_completed,
        started_time=started_time,
        completed_time=completed_time,
        task_id='task_id_2')

    expected_aggregate_flakiness_input = AggregateFlakinessInput(
        flakiness_thus_far=flakiness_thus_far,
        incoming_swarming_task_output=incoming_flake_swarming_task_output)

    expected_aggregate_flakiness_output = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=timeout_seconds,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=45,
        pass_rate=0.5,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        flakiness_thus_far=flakiness_thus_far,
        get_isolate_sha_output=isolate_sha_output,
        master_name=master_name,
        previous_swarming_task_output=incoming_flake_swarming_task_output,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    flake_swarming_task_input = RunFlakeSwarmingTaskInput(
        builder_name=builder_name,
        commit_position=commit_position,
        isolate_sha=isolate_sha,
        iterations=expected_iterations,
        master_name=master_name,
        reference_build_number=reference_build_number,
        step_name=step_name,
        test_name=test_name,
        timeout_seconds=timeout_seconds)

    recursive_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        flakiness_thus_far=expected_aggregate_flakiness_output,
        get_isolate_sha_output=isolate_sha_output,
        master_name=master_name,
        previous_swarming_task_output=incoming_flake_swarming_task_output,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    self.MockAsynchronousPipeline(RunFlakeSwarmingTaskPipeline,
                                  flake_swarming_task_input,
                                  incoming_flake_swarming_task_output)
    self.MockSynchronousPipeline(AggregateFlakinessPipeline,
                                 expected_aggregate_flakiness_input,
                                 expected_aggregate_flakiness_output)
    self.MockGeneratorPipeline(DetermineApproximatePassRatePipelineWrapper,
                               recursive_input, None)

    pipeline_job = DetermineApproximatePassRatePipeline(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()
Esempio n. 6
0
  def testDetermineApproximatePassRateTestDoesNotExist(self, *_):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_count = 0
    iterations = 10
    incoming_pass_rate = flake_constants.PASS_RATE_TEST_NOT_FOUND
    isolate_sha = 'sha1'
    revision = 'r1000'
    task_id = 'task_id'
    started_time = datetime(2018, 1, 1, 0, 0, 0)
    completed_time = datetime(2018, 1, 1, 1, 0, 0)
    build_url = 'url'
    try_job_url = None

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=3600,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=iterations,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=build_url,
        isolate_sha=isolate_sha,
        try_job_url=try_job_url)

    flake_swarming_task_output = FlakeSwarmingTaskOutput(
        error=None,
        pass_count=incoming_pass_count,
        iterations=iterations,
        task_id=task_id,
        started_time=started_time,
        completed_time=completed_time)

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        flakiness_thus_far=flakiness_thus_far,
        get_isolate_sha_output=isolate_sha_output,
        master_name=master_name,
        previous_swarming_task_output=flake_swarming_task_output,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    pipeline_job = DetermineApproximatePassRatePipeline(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()

    pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id)
    self.assertEqual(flakiness_thus_far.ToSerializable(),
                     pipeline_job.outputs.default.value)
Esempio n. 7
0
  def testDetermineApproximatePassRateMaximumRetriesPerSwarmingTaskReached(
      self, _):
    master_name = 'm'
    builder_name = 'b'
    reference_build_number = 123
    step_name = 's'
    test_name = 't'
    commit_position = 1000
    incoming_pass_count = 15
    iterations = 30
    incoming_pass_rate = float(incoming_pass_count / iterations)
    isolate_sha = 'sha1'
    revision = 'r1000'
    task_id = 'task_id_2'
    started_time = datetime(2018, 1, 1, 0, 0, 0)
    completed_time = datetime(2018, 1, 1, 1, 0, 0)
    build_url = 'url'
    try_job_url = None
    swarming_task_error = SwarmingTaskError(code=1, message='error')

    isolate_sha_output = GetIsolateShaOutput(
        build_number=None,
        build_url=build_url,
        isolate_sha=isolate_sha,
        try_job_url=try_job_url)

    flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=3600,
        error=None,
        failed_swarming_task_attempts=0,
        iterations=iterations,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    expected_flakiness_thus_far = Flakiness(
        build_number=None,
        build_url=build_url,
        commit_position=commit_position,
        total_test_run_seconds=3600,
        error=swarming_task_error,
        failed_swarming_task_attempts=0,
        iterations=iterations,
        pass_rate=incoming_pass_rate,
        revision=revision,
        try_job_url=try_job_url,
        task_ids=ListOfBasestring.FromSerializable(['task_id_1']))

    incoming_flake_swarming_task_output = FlakeSwarmingTaskOutput(
        completed_time=completed_time,
        error=swarming_task_error,
        pass_count=incoming_pass_count,
        iterations=iterations,
        started_time=started_time,
        task_id=task_id)

    determine_approximate_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=builder_name,
        commit_position=commit_position,
        flakiness_thus_far=flakiness_thus_far,
        get_isolate_sha_output=isolate_sha_output,
        master_name=master_name,
        previous_swarming_task_output=incoming_flake_swarming_task_output,
        reference_build_number=reference_build_number,
        revision=revision,
        step_name=step_name,
        test_name=test_name)

    pipeline_job = DetermineApproximatePassRatePipeline(
        determine_approximate_pass_rate_input)
    pipeline_job.start()
    self.execute_queued_tasks()

    pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id)
    self.assertEqual(expected_flakiness_thus_far.ToSerializable(),
                     pipeline_job.outputs.default.value)
Esempio n. 8
0
    def testGetIsolateShaForCommitPositionPipelineFallbackBuildLevel(
            self, _, mocked_reference_build, mocked_url, mocked_build_info):
        master_name = 'm'
        builder_name = 'b'
        build_number = 100
        step_name = 's'
        test_name = 't'
        requested_commit_position = 1000
        requested_revision = 'r1000'
        expected_sha = 'sha1'
        build_url = 'url'
        isolate_target_name = 'browser_tests'
        step_metadata = StepMetadata(canonical_step_name=None,
                                     dimensions=None,
                                     full_step_name=None,
                                     isolate_target_name=isolate_target_name,
                                     patched=True,
                                     swarm_task_ids=None,
                                     waterfall_buildername=None,
                                     waterfall_mastername=None)

        mocked_url.return_value = build_url

        expected_output = GetIsolateShaOutput(isolate_sha=expected_sha,
                                              build_number=build_number,
                                              build_url=build_url,
                                              try_job_url=None)

        build = BuildInfo(master_name, builder_name, build_number)
        build.commit_position = requested_commit_position
        mocked_build_info.return_value = (None, build)
        mocked_reference_build.return_value = build

        analysis = MasterFlakeAnalysis.Create(master_name, builder_name,
                                              build_number, step_name,
                                              test_name)
        analysis.Save()

        get_build_sha_parameters = GetIsolateShaForBuildParameters(
            master_name=master_name,
            builder_name=builder_name,
            build_number=build_number,
            url=build_url,
            step_name=step_name)

        get_sha_input = GetIsolateShaForCommitPositionParameters(
            analysis_urlsafe_key=unicode(analysis.key.urlsafe()),
            commit_position=requested_commit_position,
            dimensions=ListOfBasestring.FromSerializable([]),
            revision=requested_revision,
            step_metadata=step_metadata,
            upper_bound_build_number=analysis.build_number)

        self.MockSynchronousPipeline(GetIsolateShaForBuildPipeline,
                                     get_build_sha_parameters, expected_output)

        pipeline_job = GetIsolateShaForCommitPositionPipeline(get_sha_input)
        pipeline_job.start()
        self.execute_queued_tasks()

        pipeline_job = pipelines.pipeline.Pipeline.from_id(
            pipeline_job.pipeline_id)
        pipeline_output = pipeline_job.outputs.default.value

        self.assertEqual(expected_output.ToSerializable(), pipeline_output)
Esempio n. 9
0
    def testGetIsolateShaForCommitPositionPipelineMatchingTarget(
            self, mocked_reference_build):
        master_name = 'm'
        builder_name = 'b'
        parent_mastername = 'p_m'
        parent_buildername = 'p_b'
        build_number = 100
        build_id = 123
        test_name = 't'
        requested_commit_position = 1000
        requested_revision = 'r1000'
        expected_sha = 'sha1'
        build_url = 'url'
        luci_name = 'chromium'
        bucket_name = 'ci'
        gitiles_host = 'chromium.googlesource.com'
        gitiles_project = 'chromium/src'
        gitiles_ref = 'refs/heads/master'
        gerrit_patch = ''
        isolate_target_name = 'browser_tests'
        step_name = 's'
        isolated_hash = 'isolated_hash'

        expected_output = GetIsolateShaOutput(isolate_sha=expected_sha,
                                              build_number=None,
                                              build_url=build_url,
                                              try_job_url=None)

        analysis = MasterFlakeAnalysis.Create(master_name, builder_name,
                                              build_number, step_name,
                                              test_name)
        analysis.Save()

        build = BuildInfo(master_name, builder_name, build_number)
        build.commit_position = requested_commit_position
        build.parent_mastername = parent_mastername
        build.parent_buildername = parent_buildername
        mocked_reference_build.return_value = build

        isolated_target = IsolatedTarget.Create(
            build_id, luci_name, bucket_name, parent_mastername,
            parent_buildername, gitiles_host, gitiles_project, gitiles_ref,
            gerrit_patch, isolate_target_name, isolated_hash,
            requested_commit_position, requested_revision)
        isolated_target.put()

        step_metadata = StepMetadata(canonical_step_name=None,
                                     dimensions=None,
                                     full_step_name=None,
                                     isolate_target_name=isolate_target_name,
                                     patched=True,
                                     swarm_task_ids=None,
                                     waterfall_buildername=None,
                                     waterfall_mastername=None)

        get_sha_input = GetIsolateShaForCommitPositionParameters(
            analysis_urlsafe_key=unicode(analysis.key.urlsafe()),
            commit_position=requested_commit_position,
            dimensions=ListOfBasestring.FromSerializable([]),
            revision=requested_revision,
            step_metadata=step_metadata,
            upper_bound_build_number=analysis.build_number)

        get_sha_for_target_input = GetIsolateShaForTargetInput(
            isolated_target_urlsafe_key=isolated_target.key.urlsafe())

        self.MockSynchronousPipeline(GetIsolateShaForTargetPipeline,
                                     get_sha_for_target_input, expected_output)

        pipeline_job = GetIsolateShaForCommitPositionPipeline(get_sha_input)
        pipeline_job.start()
        self.execute_queued_tasks()

        pipeline_job = pipelines.pipeline.Pipeline.from_id(
            pipeline_job.pipeline_id)
        pipeline_output = pipeline_job.outputs.default.value

        self.assertEqual(expected_output.ToSerializable(), pipeline_output)
Esempio n. 10
0
  def testAnalyzeFlakePipelineCanStartAnalysisImmediately(self, _):
    analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
    analysis.Save()

    start_commit_position = 1000
    start_revision = 'r1000'
    isolate_sha = 'sha1'
    next_commit_position = 999
    pass_rate = 0.5
    build_url = 'url'
    try_job_url = None

    get_sha_output = GetIsolateShaOutput(
        isolate_sha=isolate_sha, build_url=build_url, try_job_url=try_job_url)

    step_metadata = StepMetadata(
        canonical_step_name='s',
        dimensions=None,
        full_step_name='s',
        patched=False,
        swarm_task_ids=None,
        waterfall_buildername='b',
        waterfall_mastername='w',
        isolate_target_name='s')

    expected_flakiness = Flakiness(
        build_url=build_url,
        commit_position=start_commit_position,
        revision=start_revision,
        pass_rate=pass_rate)

    analyze_flake_input = AnalyzeFlakeInput(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        analyze_commit_position_parameters=NextCommitPositionOutput(
            next_commit_id=CommitID(
                commit_position=start_commit_position, revision=start_revision),
            culprit_commit_id=None),
        commit_position_range=IntRange(lower=None, upper=None),
        dimensions=ListOfBasestring.FromSerializable(['os:testOS']),
        manually_triggered=False,
        rerun=False,
        retries=0,
        step_metadata=step_metadata)

    expected_isolate_sha_input = GetIsolateShaForCommitPositionParameters(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        commit_position=start_commit_position,
        dimensions=ListOfBasestring.FromSerializable(['os:testOS']),
        revision=start_revision,
        step_metadata=step_metadata,
        upper_bound_build_number=analysis.build_number)

    expected_pass_rate_input = DetermineApproximatePassRateInput(
        builder_name=analysis.builder_name,
        commit_position=start_commit_position,
        flakiness_thus_far=None,
        get_isolate_sha_output=get_sha_output,
        master_name=analysis.master_name,
        previous_swarming_task_output=None,
        reference_build_number=analysis.build_number,
        revision=start_revision,
        step_name=analysis.step_name,
        test_name=analysis.test_name)

    expected_update_data_points_input = UpdateFlakeAnalysisDataPointsInput(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        flakiness=expected_flakiness)

    expected_next_commit_position_input = NextCommitPositionInput(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        commit_position_range=IntRange(lower=None, upper=None),
        step_metadata=step_metadata)

    next_commit_id = CommitID(
        commit_position=next_commit_position, revision='r999')
    expected_next_commit_position_output = NextCommitPositionOutput(
        next_commit_id=next_commit_id, culprit_commit_id=None)

    expected_recursive_analyze_flake_input = AnalyzeFlakeInput(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        analyze_commit_position_parameters=expected_next_commit_position_output,
        commit_position_range=IntRange(lower=None, upper=None),
        dimensions=ListOfBasestring.FromSerializable(['os:testOS']),
        manually_triggered=False,
        rerun=False,
        retries=0,
        step_metadata=step_metadata)

    self.MockGeneratorPipeline(GetIsolateShaForCommitPositionPipeline,
                               expected_isolate_sha_input, get_sha_output)

    self.MockGeneratorPipeline(DetermineApproximatePassRatePipeline,
                               expected_pass_rate_input, expected_flakiness)

    self.MockSynchronousPipeline(UpdateFlakeAnalysisDataPointsPipeline,
                                 expected_update_data_points_input, None)

    self.MockSynchronousPipeline(NextCommitPositionPipeline,
                                 expected_next_commit_position_input,
                                 expected_next_commit_position_output)

    self.MockGeneratorPipeline(RecursiveAnalyzeFlakePipeline,
                               expected_recursive_analyze_flake_input, None)

    pipeline_job = AnalyzeFlakePipeline(analyze_flake_input)
    pipeline_job.start()
    self.execute_queued_tasks()