Exemple #1
0
  def testGet(self):
    master_name = 'm'
    builder_name = 'b'
    step_name = 's'
    test_name = 't'
    git_hash = 'a1b2c3'
    try_job_id = 'try_job_id'

    try_job_before = FlakeTryJob.Create(
        master_name, builder_name, step_name, test_name, git_hash)
    try_job_before.try_job_ids = [try_job_id]
    try_job_before.put()

    try_job_after = FlakeTryJob.Get(
        master_name, builder_name, step_name, test_name, git_hash)
    self.assertEqual([try_job_id], try_job_after.try_job_ids)
Exemple #2
0
    def testScheduleFlakeTryJob(self, mock_module):
        master_name = 'm'
        builder_name = 'b'
        build_number = 1
        step_name = 's'
        test_name = 't'
        git_hash = 'a1b2c3d4'
        build_id = '1'
        url = 'url'
        analysis_key = ndb.Key('key', 1)
        build = WfBuild.Create(master_name, builder_name, build_number)
        build.data = {
            'properties': {
                'parent_mastername': 'pm',
                'parent_buildername': 'pb'
            }
        }
        build.put()
        response = {
            'build': {
                'id': build_id,
                'url': url,
                'status': 'SCHEDULED',
            }
        }
        results = [(None,
                    buildbucket_client.BuildbucketBuild(response['build']))]
        mock_module.TriggerTryJobs.return_value = results

        FlakeTryJob.Create(master_name, builder_name, step_name, test_name,
                           git_hash).put()

        try_job_pipeline = ScheduleFlakeTryJobPipeline()
        try_job_id = try_job_pipeline.run(master_name, builder_name, step_name,
                                          test_name, git_hash,
                                          analysis_key.urlsafe(), None, None)

        try_job = FlakeTryJob.Get(master_name, builder_name, step_name,
                                  test_name, git_hash)
        try_job_data = FlakeTryJobData.Get(build_id)

        self.assertEqual(build_id, try_job_id)
        self.assertEqual(build_id, try_job.flake_results[-1]['try_job_id'])
        self.assertTrue(build_id in try_job.try_job_ids)
        self.assertEqual(try_job_data.try_job_key, try_job.key)
        self.assertEqual(analysis_key, try_job_data.analysis_key)
    def testUpdateFlakeTryJobResult(self):
        master_name = 'm'
        builder_name = 'b'
        step_name = 's'
        test_name = 't'
        git_hash = 'a1b2c3d4'
        try_job_id = '2'
        try_job = FlakeTryJob.Create(master_name, builder_name, step_name,
                                     test_name, git_hash)
        try_job.put()

        pipeline = MonitorTryJobPipeline()
        pipeline._UpdateTryJobResult(
            try_job.key.urlsafe(), failure_type.FLAKY_TEST, try_job_id, 'url',
            buildbucket_client.BuildbucketBuild.STARTED)
        try_job = FlakeTryJob.Get(master_name, builder_name, step_name,
                                  test_name, git_hash)
        self.assertEqual(analysis_status.RUNNING, try_job.status)
  def run(self, master_name, builder_name, canonical_step_name,
          test_name, git_hash, urlsafe_analysis_key, cache_name, dimensions,
          iterations_to_rerun=None):
    """Triggers a flake try job.

    Args:
      master_name (str): The master name of a flaky test.
      builder_name (str): The builder name of a flaky test.
      canonical_step_name (str): The canonical name of the step the flaky test
          occurred on.
      test_name (str): The name of the flaky test.
      git_hash (str): The git hash of the revision to run the try job against.
      urlsafe_analysis_key (str): The urlsafe key of the original
          MasterFlakeAnalysis that triggered this try job.
      cache_name (str): A string to identify separate directories for different
          waterfall bots on the trybots.
      dimensions (list): A list of strings in the format
          ["key1:value1", "key2:value2"].
      iterations_to_rerun (int): The number of iterations to rerun.

    Returns:
      build_id (str): Id of the triggered try job.
    """
    properties = self._GetBuildProperties(
        master_name, builder_name, canonical_step_name, test_name, git_hash,
        iterations_to_rerun)
    build_id = self._TriggerTryJob(
        master_name, builder_name, properties, {},
        failure_type.GetDescriptionForFailureType(failure_type.FLAKY_TEST),
        cache_name, dimensions)

    try_job = FlakeTryJob.Get(
        master_name, builder_name, canonical_step_name, test_name, git_hash)
    try_job.flake_results.append({'try_job_id': build_id})
    try_job.try_job_ids.append(build_id)
    try_job.put()

    # Create a corresponding Flake entity to capture as much metadata as early
    # as possible.
    self._CreateTryJobData(build_id, try_job.key, urlsafe_analysis_key)

    return build_id
def _GetLastAttemptedTryJobDetails(analysis):
    last_attempted_revision = analysis.last_attempted_revision
    if not last_attempted_revision:
        return {}

    try_job = FlakeTryJob.Get(analysis.master_name, analysis.builder_name,
                              analysis.step_name, analysis.test_name,
                              last_attempted_revision)

    if not try_job or not try_job.try_job_ids:
        return {}

    try_job_id = try_job.try_job_ids[-1]
    try_job_data = FlakeTryJobData.Get(try_job_id)
    if not try_job_data:
        return {}

    return {
        'status': analysis_status.STATUS_TO_DESCRIPTION.get(try_job.status),
        'url': try_job_data.try_job_url
    }
Exemple #6
0
    def _LogUnexpectedAbort(self):
        if not self.was_aborted:
            return

        flake_analysis = ndb.Key(urlsafe=self.urlsafe_flake_analysis_key).get()

        assert flake_analysis

        flake_analysis.try_job_status = analysis_status.ERROR
        flake_analysis.error = flake_analysis.error or {
            'error': 'RecursiveFlakeTryJobPipeline was aborted unexpectedly',
            'message': 'RecursiveFlakeTryJobPipeline was aborted unexpectedly'
        }
        flake_analysis.end_time = time_util.GetUTCNow()
        flake_analysis.put()

        try_job = FlakeTryJob.Get(flake_analysis.master_name,
                                  flake_analysis.builder_name,
                                  flake_analysis.step_name,
                                  flake_analysis.test_name, self.revision)

        if try_job and not try_job.completed:
            try_job.status = analysis_status.ERROR
            try_job.put()

        if not try_job or not try_job.try_job_ids:
            return

        try_job_data = FlakeTryJobData.Get(try_job.try_job_ids[-1])
        if try_job_data:  # pragma: no branch
            try_job_data.error = try_job_data.error or {
                'error':
                'RecursiveFlakeTryJobPipeline was aborted unexpectedly',
                'message':
                'RecursiveFlakeTryJobPipeline was aborted unexpectedly'
            }
            try_job_data.put()
Exemple #7
0
    def testRecursiveFlakeTryJobPipeline(self):
        master_name = 'm'
        builder_name = 'b'
        build_number = 100
        step_name = 's'
        test_name = 't'
        commit_position = 1000
        revision = 'r1000'
        try_job_id = 'try_job_id'
        lower_boundary_commit_position = 998

        analysis = MasterFlakeAnalysis.Create(master_name, builder_name,
                                              build_number, step_name,
                                              test_name)
        analysis.status = analysis_status.COMPLETED
        analysis.algorithm_parameters = DEFAULT_CONFIG_DATA[
            'check_flake_settings']
        analysis.Save()

        iterations_to_rerun = analysis.algorithm_parameters.get(
            'try_job_rerun', {}).get('iterations_to_rerun')

        try_job = FlakeTryJob.Create(master_name, builder_name, step_name,
                                     test_name, revision)

        try_job_result = {
            revision: {
                step_name: {
                    'status': 'failed',
                    'failures': [test_name],
                    'valid': True,
                    'pass_fail_counts': {
                        'test_name': {
                            'pass_count': 28,
                            'fail_count': 72
                        }
                    }
                }
            }
        }

        self.MockPipeline(
            recursive_flake_try_job_pipeline.ScheduleFlakeTryJobPipeline,
            try_job_id,
            expected_args=[
                master_name, builder_name, step_name, test_name, revision,
                analysis.key.urlsafe(), _DEFAULT_CACHE_NAME, None,
                iterations_to_rerun
            ])
        self.MockPipeline(
            recursive_flake_try_job_pipeline.MonitorTryJobPipeline,
            try_job_result,
            expected_args=[
                try_job.key.urlsafe(), failure_type.FLAKY_TEST, try_job_id
            ])
        self.MockPipeline(
            recursive_flake_try_job_pipeline.ProcessFlakeTryJobResultPipeline,
            None,
            expected_args=[
                revision, commit_position, try_job_result,
                try_job.key.urlsafe(),
                analysis.key.urlsafe()
            ])
        self.MockPipeline(
            recursive_flake_try_job_pipeline.NextCommitPositionPipeline,
            '',
            expected_args=[analysis.key.urlsafe(),
                           try_job.key.urlsafe()])

        pipeline = RecursiveFlakeTryJobPipeline(
            analysis.key.urlsafe(), commit_position, revision,
            lower_boundary_commit_position, _DEFAULT_CACHE_NAME, None)
        pipeline.start(queue_name=constants.DEFAULT_QUEUE)
        self.execute_queued_tasks()

        self.assertIsNotNone(
            FlakeTryJob.Get(master_name, builder_name, step_name, test_name,
                            revision))
        self.assertEqual(analysis.last_attempted_revision, revision)
        self.assertIsNone(analysis.last_attempted_swarming_task_id)
    def testGetTryJobsForFlakeSuccess(self, mock_buildbucket, mock_report):
        master_name = 'm'
        builder_name = 'b'
        step_name = 's'
        test_name = 't'
        git_hash = 'a1b2c3d4'
        try_job_id = '1'

        try_job = FlakeTryJob.Create(master_name, builder_name, step_name,
                                     test_name, git_hash)
        try_job.flake_results = [{
            'report': None,
            'url': 'https://build.chromium.org/p/m/builders/b/builds/1234',
            'try_job_id': '1',
        }]
        try_job.status = analysis_status.RUNNING
        try_job.put()

        try_job_data = FlakeTryJobData.Create(try_job_id)
        try_job_data.try_job_key = try_job.key
        try_job_data.try_job_url = (
            'https://build.chromium.org/p/m/builders/b/builds/1234')
        try_job_data.put()

        build_response = {
            'id': '1',
            'url': 'https://build.chromium.org/p/m/builders/b/builds/1234',
            'status': 'COMPLETED',
        }
        report = {
            'result': {
                'r0': {
                    'gl_tests': {
                        'status': 'passed',
                        'valid': True,
                        'pass_fail_counts': {
                            'Test.One': {
                                'pass_count': 100,
                                'fail_count': 0
                            }
                        }
                    }
                }
            }
        }
        mock_buildbucket.GetTryJobs.return_value = [
            (None, buildbucket_client.BuildbucketBuild(build_response))
        ]
        mock_report.return_value = json.dumps(report)

        pipeline = MonitorTryJobPipeline()
        pipeline.start_test()
        pipeline.run(try_job.key.urlsafe(), failure_type.FLAKY_TEST,
                     try_job_id)
        pipeline.callback(callback_params=pipeline.last_params)

        # Reload from ID to get all internal properties in sync.
        pipeline = MonitorTryJobPipeline.from_id(pipeline.pipeline_id)
        pipeline.finalized()
        flake_result = pipeline.outputs.default.value

        expected_flake_result = {
            'report': {
                'result': {
                    'r0': {
                        'gl_tests': {
                            'status': 'passed',
                            'valid': True,
                            'pass_fail_counts': {
                                'Test.One': {
                                    'pass_count': 100,
                                    'fail_count': 0
                                }
                            }
                        }
                    }
                }
            },
            'url': 'https://build.chromium.org/p/m/builders/b/builds/1234',
            'try_job_id': '1',
        }

        self.assertEqual(expected_flake_result, flake_result)

        try_job = FlakeTryJob.Get(master_name, builder_name, step_name,
                                  test_name, git_hash)
        self.assertEqual(expected_flake_result, try_job.flake_results[-1])
        self.assertEqual(analysis_status.RUNNING, try_job.status)

        try_job_data = FlakeTryJobData.Get(try_job_id)
        self.assertEqual(try_job_data.last_buildbucket_response,
                         build_response)