def testGetCoordinatesData(self):
    master_name = 'm'
    builder_name = 'b'
    build_number = 123
    step_name = 's'
    test_name = 't'
    success_rate = .9
    try_job_url = 'try_job_url'
    analysis = MasterFlakeAnalysis.Create(
        master_name, builder_name, build_number, step_name, test_name)
    data_point_1 = DataPoint()
    data_point_1.build_number = build_number
    data_point_1.pass_rate = success_rate
    data_point_1.commit_position = 5
    data_point_1.git_hash = 'git_hash_5'
    data_point_1.previous_build_commit_position = 4
    data_point_1.previous_build_git_hash = 'git_hash_4'
    data_point_1.try_job_url = try_job_url
    analysis.data_points.append(data_point_1)

    data_point_2 = DataPoint()
    data_point_2.build_number = build_number - 3
    data_point_2.pass_rate = success_rate
    data_point_2.commit_position = 2
    data_point_2.git_hash = 'git_hash_2'
    data_point_2.previous_build_commit_position = 1
    data_point_2.previous_build_git_hash = 'git_hash_1'
    data_point_2.try_job_url = try_job_url
    analysis.data_points.append(data_point_2)
    analysis.Save()

    expected_result = [
        {
            'commit_position': 2,
            'pass_rate': success_rate,
            'task_id': None,
            'build_number': build_number - 3,
            'git_hash': 'git_hash_2',
            'try_job_url': try_job_url
        },
        {
            'commit_position': 5,
            'pass_rate': success_rate,
            'task_id': None,
            'build_number': build_number,
            'git_hash': 'git_hash_5',
            'lower_bound_commit_position': 2,
            'lower_bound_git_hash': 'git_hash_2',
            'try_job_url': try_job_url
        }
    ]
    self.assertEqual(expected_result, check_flake._GetCoordinatesData(analysis))
  def testGetSuspectedFlakeInfo(self):
    analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
    analysis.suspected_flake_build_number = 123
    data_point = DataPoint()
    data_point.build_number = 123
    data_point.pass_rate = 0.9
    data_point.commit_position = 2
    data_point.git_hash = 'git_hash_2'
    data_point.previous_build_commit_position = 1
    data_point.previous_build_git_hash = 'git_hash_1'
    analysis.data_points.append(data_point)
    analysis.confidence_in_suspected_build = 0
    analysis.Save()

    expected_result = {
        'confidence': 0,
        'build_number': analysis.suspected_flake_build_number,
        'commit_position': 2,
        'git_hash': 'git_hash_2',
        'lower_bound_commit_position': 1,
        'lower_bound_git_hash': 'git_hash_1',
        'triage_result': 0
    }
    self.assertEqual(expected_result,
                     check_flake._GetSuspectedFlakeInfo(analysis))
  def testGetNumbersOfDataPointGroups(self):
    data_point1 = DataPoint()
    data_point1.try_job_url = 'try_job_url'

    data_point2 = DataPoint()
    data_point2.build_number = 1

    data_points = [data_point1, data_point2]
    self.assertEqual((1, 1),
                     check_flake._GetNumbersOfDataPointGroups(data_points))
示例#4
0
    def _UpdateMasterFlakeAnalysis(self,
                                   master_name,
                                   builder_name,
                                   build_number,
                                   step_name,
                                   master_build_number,
                                   test_name,
                                   version_number,
                                   pass_rate,
                                   flake_swarming_task,
                                   has_valid_artifact=True):
        """Update MasterFlakeAnalysis to include result of the swarming task."""
        master_flake_analysis = MasterFlakeAnalysis.GetVersion(
            master_name,
            builder_name,
            master_build_number,
            step_name,
            test_name,
            version=version_number)
        logging.info(
            'Updating MasterFlakeAnalysis swarming task data %s/%s/%s/%s/%s',
            master_name, builder_name, master_build_number, step_name,
            test_name)

        data_point = DataPoint()
        data_point.build_number = build_number
        data_point.pass_rate = pass_rate
        data_point.task_id = flake_swarming_task.task_id
        data_point.has_valid_artifact = has_valid_artifact

        # Include git information about each build that was run.
        build_info = build_util.GetBuildInfo(master_name, builder_name,
                                             build_number)
        data_point.commit_position = build_info.commit_position
        data_point.git_hash = build_info.chromium_revision

        if build_number > 0:
            previous_build = build_util.GetBuildInfo(master_name, builder_name,
                                                     build_number - 1)
            data_point.previous_build_commit_position = previous_build.commit_position
            data_point.previous_build_git_hash = previous_build.chromium_revision
            data_point.blame_list = _GetCommitsBetweenRevisions(
                previous_build.chromium_revision, build_info.chromium_revision)
        else:
            data_point.blame_list = build_info.blame_list

        master_flake_analysis.data_points.append(data_point)

        results = flake_swarming_task.GetFlakeSwarmingTaskData()
        # TODO(lijeffrey): Determine whether or not this flake swarming task
        # was a cache hit (already ran results for more iterations than were
        # requested) and update results['cache_hit'] accordingly.
        master_flake_analysis.swarming_rerun_results.append(results)
        master_flake_analysis.put()
示例#5
0
    def testGetDataPointOfSuspectedBuildNoDataPoint(self):
        # This scenario should not happen.
        expected_build_number = 123
        unexpected_build_number = 124
        data_point = DataPoint()
        data_point.build_number = expected_build_number

        analysis = MasterFlakeAnalysis.Create('m', 'b', 125, 's', 't')
        analysis.suspected_flake_build_number = unexpected_build_number
        analysis.data_points.append(data_point)

        self.assertIsNone(analysis.GetDataPointOfSuspectedBuild())
示例#6
0
    def testGetDataPointOfSuspectedBuild(self):
        expected_build_number = 123
        data_point = DataPoint()
        data_point.build_number = expected_build_number

        analysis = MasterFlakeAnalysis.Create('m', 'b', 125, 's', 't')
        analysis.suspected_flake_build_number = expected_build_number
        analysis.data_points.append(data_point)

        suspected_data_point = analysis.GetDataPointOfSuspectedBuild()
        self.assertEqual(expected_build_number,
                         suspected_data_point.build_number)
示例#7
0
def _GenerateDataPoint(pass_rate=None,
                       build_number=None,
                       task_id=None,
                       try_job_url=None,
                       commit_position=None,
                       git_hash=None,
                       previous_build_commit_position=None,
                       previous_build_git_hash=None,
                       blame_list=None):
    data_point = DataPoint()
    data_point.pass_rate = pass_rate
    data_point.build_number = build_number
    data_point.task_id = task_id
    data_point.try_job_url = try_job_url
    data_point.commit_position = commit_position
    data_point.git_hash = git_hash
    data_point.previous_build_commit_position = previous_build_commit_position
    data_point.previous_build_git_hash = previous_build_git_hash
    data_point.blame_list = blame_list if blame_list else []
    return data_point
  def testAnyoneCanViewScheduledAnalysis(self, *_):
    master_name = 'm'
    builder_name = 'b'
    build_number = '123'
    step_name = 's'
    test_name = 't'
    success_rate = .9

    analysis = MasterFlakeAnalysis.Create(
        master_name, builder_name, build_number, step_name, test_name)
    data_point = DataPoint()
    data_point.build_number = int(build_number)
    data_point.pass_rate = success_rate
    data_point.task_id = '1'
    analysis.data_points.append(data_point)
    analysis.status = analysis_status.COMPLETED
    analysis.suspected_flake_build_number = 100
    analysis.request_time = datetime.datetime(2016, 10, 01, 12, 10, 00)
    analysis.start_time = datetime.datetime(2016, 10, 01, 12, 10, 05)
    analysis.end_time = datetime.datetime(2016, 10, 01, 13, 10, 00)
    analysis.algorithm_parameters = {'iterations_to_rerun': 100}
    analysis.Save()

    self.mock_current_user(user_email='*****@*****.**')

    response = self.test_app.get('/waterfall/flake', params={
        'key': analysis.key.urlsafe(),
        'format': 'json'})

    expected_check_flake_result = {
        'key': analysis.key.urlsafe(),
        'pass_rates': [[12345, 0.9, '1', 100, 'git_hash_2', 12344,
                        'git_hash_1']],
        'analysis_status': STATUS_TO_DESCRIPTION.get(analysis.status),
        'master_name': master_name,
        'builder_name': builder_name,
        'build_number': int(build_number),
        'step_name': step_name,
        'test_name': test_name,
        'request_time': '2016-10-01 12:10:00 UTC',
        'build_level_number': 1,
        'revision_level_number': 0,
        'error': None,
        'iterations_to_rerun': 100,
        'pending_time': '00:00:05',
        'duration': '00:59:55',
        'suspected_flake': {
            'build_number': 100,
            'commit_position': 12345,
            'git_hash': 'git_hash_1',
            'triage_result': 0
        },
        'version_number': 1,
        'show_input_ui': False,
        'culprit': {},
        'try_job_status': None,
        'last_attempted_swarming_task': {
            'task_id': None,
            'build_number': None
        },
        'last_attempted_try_job': {},
        'user_email': '*****@*****.**'
    }

    self.assertEquals(200, response.status_int)
    self.assertEqual(expected_check_flake_result, response.json_body)
  def testRequestExistingAnalysis(self, *_):
    master_name = 'm'
    builder_name = 'b'
    build_number = 123
    step_name = 's'
    test_name = 't'
    success_rate = 0.9

    previous_analysis = MasterFlakeAnalysis.Create(
        master_name, builder_name, build_number - 1, step_name, test_name)
    data_point = DataPoint()
    data_point.build_number = build_number - 1
    data_point.pass_rate = success_rate
    previous_analysis.data_points.append(data_point)
    previous_analysis.status = analysis_status.COMPLETED
    previous_analysis.suspected_flake_build_number = 100
    previous_analysis.request_time = datetime.datetime(2016, 10, 01, 12, 10, 00)
    previous_analysis.start_time = datetime.datetime(2016, 10, 01, 12, 10, 05)
    previous_analysis.end_time = datetime.datetime(2016, 10, 01, 13, 10, 00)
    previous_analysis.algorithm_parameters = {'iterations_to_rerun': 100}
    previous_analysis.Save()

    previous_request = FlakeAnalysisRequest.Create(test_name, False, None)
    build_step = BuildStep.Create(
        master_name, builder_name, build_number, step_name, None)
    build_step.wf_master_name = build_step.master_name
    build_step.wf_builder_name = build_step.builder_name
    build_step.wf_build_number = build_step.build_number
    build_step.wf_step_name = build_step.step_name
    previous_request.build_steps.append(build_step)
    previous_request.analyses.append(previous_analysis.key)
    previous_request.Save()

    self.mock_current_user(user_email='*****@*****.**')

    response = self.test_app.get('/waterfall/flake', params={
        'url': buildbot.CreateBuildUrl(master_name, builder_name, build_number),
        'step_name': step_name,
        'test_name': test_name,
        'format': 'json'})

    expected_check_flake_result = {
        'key': previous_analysis.key.urlsafe(),
        'pass_rates': [[12345, 0.9, '1', 100, 'git_hash_2', 12344,
                        'git_hash_1']],
        'analysis_status': STATUS_TO_DESCRIPTION.get(previous_analysis.status),
        'master_name': master_name,
        'builder_name': builder_name,
        'build_number': build_number - 1,
        'step_name': step_name,
        'test_name': test_name,
        'request_time': '2016-10-01 12:10:00 UTC',
        'build_level_number': 1,
        'revision_level_number': 0,
        'error': None,
        'iterations_to_rerun': 100,
        'pending_time': '00:00:05',
        'duration': '00:59:55',
        'suspected_flake': {
            'build_number': 100,
            'commit_position': 12345,
            'git_hash': 'a_git_hash',
            'triage_result': 0
        },
        'version_number': 1,
        'show_input_ui': False,
        'culprit': {},
        'try_job_status': None,
        'last_attempted_swarming_task': {
            'task_id': None,
            'build_number': None
        },
        'last_attempted_try_job': {},
        'user_email': '*****@*****.**'
    }

    self.assertEqual(200, response.status_int)
    self.assertEqual(expected_check_flake_result, response.json_body)