def testNextCommitPositionPipelineNotReproducible(self, mock_next_commit):
    master_name = 'm'
    builder_name = 'b'
    build_number = 100
    step_name = 's'
    test_name = 't'
    start_commit_position = 1000

    mock_next_commit.return_value = (None, None)

    analysis = MasterFlakeAnalysis.Create(master_name, builder_name,
                                          build_number, step_name, test_name)
    analysis.Save()

    next_commit_position_input = NextCommitPositionInput(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        commit_position_range=IntRange(lower=None, upper=start_commit_position),
        step_metadata=None)

    pipeline_job = NextCommitPositionPipeline(next_commit_position_input)
    pipeline_job.start()
    self.execute_queued_tasks()

    pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id)
    next_commit_position_output = pipeline_job.outputs.default.value

    self.assertFalse(pipeline_job.was_aborted)
    self.assertIsNone(next_commit_position_output['culprit_commit_id'])
    self.assertIsNone(next_commit_position_output['next_commit_id'])
  def testNextCommitPositionPipelineWithHeuristicResults(
      self, mock_heuristic_result, mock_run_heuristic, mock_next_commit):
    master_name = 'm'
    builder_name = 'b'
    build_number = 105
    step_name = 's'
    test_name = 't'
    start_commit_position = 1000
    suspect_commit_position = 95
    expected_next_commit_id = CommitID(commit_position=94, revision='r94')

    suspect = FlakeCulprit.Create('repo', 'revision', suspect_commit_position)
    suspect.commit_position = suspect_commit_position
    suspect.put()

    analysis = MasterFlakeAnalysis.Create(master_name, builder_name,
                                          build_number, step_name, test_name)
    analysis.suspect_urlsafe_keys.append(suspect.key.urlsafe())
    analysis.put()

    mock_run_heuristic.return_value = False
    mock_heuristic_result.return_value = expected_next_commit_id

    calculated_next_commit_id = CommitID(commit_position=999, revision='r999')
    mock_next_commit.return_value = (calculated_next_commit_id, None)

    next_commit_position_input = NextCommitPositionInput(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        commit_position_range=IntRange(lower=None, upper=start_commit_position),
        step_metadata=None)

    pipeline_job = NextCommitPositionPipeline(next_commit_position_input)
    pipeline_job.start()
    self.execute_queued_tasks()

    pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id)
    next_commit_position_output = pipeline_job.outputs.default.value

    self.assertFalse(pipeline_job.was_aborted)
    self.assertIsNone(next_commit_position_output['culprit_commit_id'])
    self.assertEqual(expected_next_commit_id.ToSerializable(),
                     next_commit_position_output['next_commit_id'])
    mock_heuristic_result.assert_called_once_with(analysis.key.urlsafe())
    def RunImpl(self, parameters):
        analysis_urlsafe_key = parameters.analysis_urlsafe_key
        analysis = ndb.Key(urlsafe=analysis_urlsafe_key).get()
        assert analysis, 'Cannot retrieve analysis entry from datastore'
        if analysis.request_time:
            monitoring.pipeline_times.increment_by(
                int((time_util.GetUTCNow() -
                     analysis.request_time).total_seconds()),
                {'type': 'flake'})

        commit_position_parameters = parameters.analyze_commit_position_parameters
        commit_position_to_analyze = (
            commit_position_parameters.next_commit_id.commit_position
            if commit_position_parameters.next_commit_id else None)

        if commit_position_to_analyze is None:
            # No further commit position to analyze. The analysis is completed.
            culprit_commit_position = (
                commit_position_parameters.culprit_commit_id.commit_position
                if commit_position_parameters.culprit_commit_id else None)

            if culprit_commit_position is None:
                analysis.LogInfo('Analysis completed with no findings')
                analysis.Update(
                    result_status=result_status.NOT_FOUND_UNTRIAGED)

                if not parameters.rerun:  # pragma: no branch
                    # Don't double report for reruns.
                    yield ReportAnalysisEventPipeline(
                        self.CreateInputObjectInstance(
                            ReportEventInput,
                            analysis_urlsafe_key=analysis_urlsafe_key))
                return

            # Create a FlakeCulprit.
            culprit_revision = commit_position_parameters.culprit_commit_id.revision
            assert culprit_revision, 'No revision for commit {}'.format(
                culprit_commit_position)
            culprit = flake_analysis_util.UpdateCulprit(
                analysis_urlsafe_key, culprit_revision,
                culprit_commit_position)
            confidence_score = confidence_score_util.CalculateCulpritConfidenceScore(
                analysis, culprit_commit_position)

            # Associate FlakeCulprit with the analysis.
            analysis.Update(confidence_in_culprit=confidence_score,
                            culprit_urlsafe_key=culprit.key.urlsafe(),
                            result_status=result_status.FOUND_UNTRIAGED)

            with pipeline.InOrder():
                if flake_analysis_util.ShouldTakeAutoAction(
                        analysis, parameters.rerun):  # pragma: no branch

                    # Check recent flakiness.
                    yield AnalyzeRecentFlakinessPipeline(
                        self.CreateInputObjectInstance(
                            AnalyzeRecentFlakinessInput,
                            analysis_urlsafe_key=analysis_urlsafe_key))

                    # Perform auto actions after checking recent flakiness.
                    yield _PerformAutoActionsPipeline(
                        self.CreateInputObjectInstance(
                            _PerformAutoActionsInput,
                            analysis_urlsafe_key=analysis_urlsafe_key))

                if not parameters.rerun:  # pragma: no branch
                    # Report events to BQ.
                    yield ReportAnalysisEventPipeline(
                        self.CreateInputObjectInstance(
                            ReportEventInput,
                            analysis_urlsafe_key=analysis_urlsafe_key))
                return

        revision_to_analyze = commit_position_parameters.next_commit_id.revision
        assert revision_to_analyze, 'No revision for commit {}'.format(
            commit_position_to_analyze)

        # Check for bot availability. If this is a user rerun or the maximum retries
        # have been reached, continue regardless of bot availability.
        if flake_analysis_util.CanStartAnalysisImmediately(
                parameters.step_metadata, parameters.retries,
                parameters.manually_triggered):

            # Set analysis status to RUNNING if not already.
            analysis.InitializeRunning()

            analysis.LogInfo('Analyzing commit position {}'.format(
                commit_position_to_analyze))

            with pipeline.InOrder():
                # Determine isolate sha to run swarming tasks on.
                upper_bound_build_number = analysis.GetLowestUpperBoundBuildNumber(
                    commit_position_to_analyze)
                get_sha_output = yield GetIsolateShaForCommitPositionPipeline(
                    self.CreateInputObjectInstance(
                        GetIsolateShaForCommitPositionParameters,
                        analysis_urlsafe_key=analysis_urlsafe_key,
                        commit_position=commit_position_to_analyze,
                        dimensions=parameters.dimensions,
                        step_metadata=parameters.step_metadata,
                        revision=revision_to_analyze,
                        upper_bound_build_number=upper_bound_build_number))

                # Determine approximate pass rate at the commit position/isolate sha.
                flakiness = yield DetermineApproximatePassRatePipeline(
                    self.CreateInputObjectInstance(
                        DetermineApproximatePassRateInput,
                        builder_name=analysis.builder_name,
                        commit_position=commit_position_to_analyze,
                        flakiness_thus_far=None,
                        get_isolate_sha_output=get_sha_output,
                        master_name=analysis.master_name,
                        previous_swarming_task_output=None,
                        reference_build_number=analysis.build_number,
                        revision=revision_to_analyze,
                        step_name=analysis.step_name,
                        test_name=analysis.test_name))

                yield UpdateFlakeAnalysisDataPointsPipeline(
                    self.CreateInputObjectInstance(
                        UpdateFlakeAnalysisDataPointsInput,
                        analysis_urlsafe_key=analysis_urlsafe_key,
                        flakiness=flakiness))

                # Determine the next commit position to analyze.
                next_commit_position_output = yield NextCommitPositionPipeline(
                    self.CreateInputObjectInstance(
                        NextCommitPositionInput,
                        analysis_urlsafe_key=analysis_urlsafe_key,
                        commit_position_range=parameters.commit_position_range,
                        step_metadata=parameters.step_metadata))

                # Recurse on the new commit position.
                yield RecursiveAnalyzeFlakePipeline(
                    self.CreateInputObjectInstance(
                        AnalyzeFlakeInput,
                        analysis_urlsafe_key=analysis_urlsafe_key,
                        analyze_commit_position_parameters=
                        next_commit_position_output,
                        commit_position_range=parameters.commit_position_range,
                        dimensions=parameters.dimensions,
                        manually_triggered=parameters.manually_triggered,
                        rerun=parameters.rerun,
                        retries=0,
                        step_metadata=parameters.step_metadata))
        else:
            # Can't start the analysis just yet, reschedule.
            parameters.retries += 1
            delay_seconds = flake_analysis_util.CalculateDelaySecondsBetweenRetries(
                analysis, parameters.retries, parameters.manually_triggered)
            delay = yield DelayPipeline(delay_seconds)

            with pipeline.After(delay):
                yield RecursiveAnalyzeFlakePipeline(parameters)
  def testNextCommitPositionPipelineContinueAnalysisFallbackToBuildInfo(
      self, mock_update, mock_heuristic, mock_targets, mock_bounding_builds,
      mock_next_commit, mock_reference_build):
    master_name = 'm'
    builder_name = 'b'
    build_number = 100
    step_name = 's'
    test_name = 't'
    start_commit_position = 1000
    expected_next_commit_id = CommitID(commit_position=990, revision='r990')

    mock_heuristic.return_value = False

    calculated_next_commit_id = CommitID(commit_position=999, revision='r999')
    mock_next_commit.return_value = (calculated_next_commit_id, None)

    target_name = 'browser_tests'
    step_metadata = StepMetadata(
        canonical_step_name=None,
        dimensions=None,
        full_step_name=None,
        isolate_target_name=target_name,
        patched=True,
        swarm_task_ids=None,
        waterfall_buildername=None,
        waterfall_mastername=None)

    reference_build = BuildInfo(master_name, builder_name, build_number)
    reference_build.commit_position = start_commit_position
    mock_reference_build.return_value = reference_build

    lower_bound_build = BuildInfo(master_name, builder_name, build_number - 1)
    lower_bound_build.commit_position = expected_next_commit_id.commit_position
    lower_bound_build.chromium_revision = expected_next_commit_id.revision
    upper_bound_build = BuildInfo(master_name, builder_name, build_number)
    upper_bound_build.commit_position = start_commit_position
    mock_bounding_builds.return_value = (lower_bound_build, upper_bound_build)

    mock_targets.side_effect = AssertionError()

    analysis = MasterFlakeAnalysis.Create(master_name, builder_name,
                                          build_number, step_name, test_name)
    analysis.data_points = [
        DataPoint.Create(commit_position=start_commit_position)
    ]
    analysis.Save()

    next_commit_position_input = NextCommitPositionInput(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        commit_position_range=IntRange(lower=None, upper=start_commit_position),
        step_metadata=step_metadata)

    pipeline_job = NextCommitPositionPipeline(next_commit_position_input)
    pipeline_job.start()
    self.execute_queued_tasks()

    mock_update.assert_called_once_with(lower_bound_build, upper_bound_build)
    pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id)
    next_commit_position_output = pipeline_job.outputs.default.value

    self.assertFalse(pipeline_job.was_aborted)
    self.assertIsNone(next_commit_position_output['culprit_commit_id'])
    self.assertEqual(expected_next_commit_id.ToSerializable(),
                     next_commit_position_output['next_commit_id'])
  def testNextCommitPositionPipelineContinueAnalysis(
      self, mock_reference_build, mock_heuristic, mock_next_commit,
      mock_bound_commits):
    master_name = 'm'
    builder_name = 'b'
    parent_mastername = 'p_m'
    parent_buildername = 'p_b'
    build_number = 100
    build_id = 10000
    step_name = 's'
    test_name = 't'
    start_commit_position = 1000
    expected_next_commit_id = CommitID(commit_position=990, revision='r990')

    reference_build = BuildInfo(master_name, builder_name, build_number)
    reference_build.commit_position = start_commit_position
    reference_build.parent_mastername = parent_mastername
    reference_build.parent_buildername = parent_buildername
    mock_reference_build.return_value = reference_build
    mock_heuristic.return_value = False

    calculated_next_commit_id = CommitID(commit_position=999, revision='r999')
    mock_next_commit.return_value = (calculated_next_commit_id, None)

    target_name = 'browser_tests'
    step_metadata = StepMetadata(
        canonical_step_name=None,
        dimensions=None,
        full_step_name=None,
        isolate_target_name=target_name,
        patched=True,
        swarm_task_ids=None,
        waterfall_buildername=None,
        waterfall_mastername=None)

    luci_name = 'chromium'
    bucket_name = 'ci'
    gitiles_host = 'chromium.googlesource.com'
    gitiles_project = 'chromium/src'
    gitiles_ref = 'refs/heads/master'
    gerrit_patch = ''

    lower_bound_target = IsolatedTarget.Create(
        build_id - 1, luci_name, bucket_name, parent_mastername,
        parent_buildername, gitiles_host, gitiles_project, gitiles_ref,
        gerrit_patch, target_name, 'hash_1',
        expected_next_commit_id.commit_position, None)
    lower_bound_target.put()

    upper_bound_target = IsolatedTarget.Create(
        build_id, luci_name, bucket_name, parent_mastername, parent_buildername,
        gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, target_name,
        'hash_2', start_commit_position, None)
    upper_bound_target.put()
    mock_bound_commits.return_value = (
        expected_next_commit_id,
        CommitID(commit_position=start_commit_position, revision='r1000'))

    analysis = MasterFlakeAnalysis.Create(master_name, builder_name,
                                          build_number, step_name, test_name)
    analysis.data_points = [
        DataPoint.Create(commit_position=start_commit_position)
    ]
    analysis.Save()

    next_commit_position_input = NextCommitPositionInput(
        analysis_urlsafe_key=analysis.key.urlsafe(),
        commit_position_range=IntRange(lower=None, upper=start_commit_position),
        step_metadata=step_metadata)

    pipeline_job = NextCommitPositionPipeline(next_commit_position_input)
    pipeline_job.start()
    self.execute_queued_tasks()

    pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id)
    next_commit_position_output = pipeline_job.outputs.default.value

    self.assertFalse(pipeline_job.was_aborted)
    self.assertIsNone(next_commit_position_output['culprit_commit_id'])
    self.assertEqual(expected_next_commit_id.ToSerializable(),
                     next_commit_position_output['next_commit_id'])
    mock_bound_commits.assert_called_once_with(
        analysis.data_points, lower_bound_target, upper_bound_target)