def testGetCoordinatesData(self): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' success_rate = .9 try_job_url = 'try_job_url' analysis = MasterFlakeAnalysis.Create( master_name, builder_name, build_number, step_name, test_name) data_point_1 = DataPoint() data_point_1.build_number = build_number data_point_1.pass_rate = success_rate data_point_1.commit_position = 5 data_point_1.git_hash = 'git_hash_5' data_point_1.previous_build_commit_position = 4 data_point_1.previous_build_git_hash = 'git_hash_4' data_point_1.try_job_url = try_job_url analysis.data_points.append(data_point_1) data_point_2 = DataPoint() data_point_2.build_number = build_number - 3 data_point_2.pass_rate = success_rate data_point_2.commit_position = 2 data_point_2.git_hash = 'git_hash_2' data_point_2.previous_build_commit_position = 1 data_point_2.previous_build_git_hash = 'git_hash_1' data_point_2.try_job_url = try_job_url analysis.data_points.append(data_point_2) analysis.Save() expected_result = [ { 'commit_position': 2, 'pass_rate': success_rate, 'task_id': None, 'build_number': build_number - 3, 'git_hash': 'git_hash_2', 'try_job_url': try_job_url }, { 'commit_position': 5, 'pass_rate': success_rate, 'task_id': None, 'build_number': build_number, 'git_hash': 'git_hash_5', 'lower_bound_commit_position': 2, 'lower_bound_git_hash': 'git_hash_2', 'try_job_url': try_job_url } ] self.assertEqual(expected_result, check_flake._GetCoordinatesData(analysis))
def testGetSuspectedFlakeInfo(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.suspected_flake_build_number = 123 data_point = DataPoint() data_point.build_number = 123 data_point.pass_rate = 0.9 data_point.commit_position = 2 data_point.git_hash = 'git_hash_2' data_point.previous_build_commit_position = 1 data_point.previous_build_git_hash = 'git_hash_1' analysis.data_points.append(data_point) analysis.confidence_in_suspected_build = 0 analysis.Save() expected_result = { 'confidence': 0, 'build_number': analysis.suspected_flake_build_number, 'commit_position': 2, 'git_hash': 'git_hash_2', 'lower_bound_commit_position': 1, 'lower_bound_git_hash': 'git_hash_1', 'triage_result': 0 } self.assertEqual(expected_result, check_flake._GetSuspectedFlakeInfo(analysis))
def testGetRevisionAtCommitPosition(self): data_point = DataPoint() data_point.blame_list = ['r1', 'r2', 'r3'] data_point.commit_position = 100 self.assertEqual('r1', data_point.GetRevisionAtCommitPosition(98)) self.assertEqual('r2', data_point.GetRevisionAtCommitPosition(99)) self.assertEqual('r3', data_point.GetRevisionAtCommitPosition(100))
def testGetDictOfCommitPositionAndRevision(self): data_point = DataPoint() data_point.blame_list = ['r1', 'r2', 'r3'] data_point.commit_position = 100 expected_CLs = {100: 'r3', 99: 'r2', 98: 'r1'} self.assertEqual(expected_CLs, data_point.GetDictOfCommitPositionAndRevision())
def testGetCommitPosition(self): data_point = DataPoint() data_point.blame_list = ['r1', 'r2', 'r3'] data_point.commit_position = 100 data_point.previous_build_commit_position = 97 self.assertEqual(98, data_point.GetCommitPosition('r1')) self.assertEqual(99, data_point.GetCommitPosition('r2')) self.assertEqual(100, data_point.GetCommitPosition('r3'))
def _UpdateMasterFlakeAnalysis(self, master_name, builder_name, build_number, step_name, master_build_number, test_name, version_number, pass_rate, flake_swarming_task, has_valid_artifact=True): """Update MasterFlakeAnalysis to include result of the swarming task.""" master_flake_analysis = MasterFlakeAnalysis.GetVersion( master_name, builder_name, master_build_number, step_name, test_name, version=version_number) logging.info( 'Updating MasterFlakeAnalysis swarming task data %s/%s/%s/%s/%s', master_name, builder_name, master_build_number, step_name, test_name) data_point = DataPoint() data_point.build_number = build_number data_point.pass_rate = pass_rate data_point.task_id = flake_swarming_task.task_id data_point.has_valid_artifact = has_valid_artifact # Include git information about each build that was run. build_info = build_util.GetBuildInfo(master_name, builder_name, build_number) data_point.commit_position = build_info.commit_position data_point.git_hash = build_info.chromium_revision if build_number > 0: previous_build = build_util.GetBuildInfo(master_name, builder_name, build_number - 1) data_point.previous_build_commit_position = previous_build.commit_position data_point.previous_build_git_hash = previous_build.chromium_revision data_point.blame_list = _GetCommitsBetweenRevisions( previous_build.chromium_revision, build_info.chromium_revision) else: data_point.blame_list = build_info.blame_list master_flake_analysis.data_points.append(data_point) results = flake_swarming_task.GetFlakeSwarmingTaskData() # TODO(lijeffrey): Determine whether or not this flake swarming task # was a cache hit (already ran results for more iterations than were # requested) and update results['cache_hit'] accordingly. master_flake_analysis.swarming_rerun_results.append(results) master_flake_analysis.put()
def _GenerateDataPoint(pass_rate=None, build_number=None, task_id=None, try_job_url=None, commit_position=None, git_hash=None, previous_build_commit_position=None, previous_build_git_hash=None, blame_list=None): data_point = DataPoint() data_point.pass_rate = pass_rate data_point.build_number = build_number data_point.task_id = task_id data_point.try_job_url = try_job_url data_point.commit_position = commit_position data_point.git_hash = git_hash data_point.previous_build_commit_position = previous_build_commit_position data_point.previous_build_git_hash = previous_build_git_hash data_point.blame_list = blame_list if blame_list else [] return data_point
def run(self, revision, commit_position, try_job_result, urlsafe_try_job_key, urlsafe_flake_analysis_key): """Extracts pass rate information and updates flake analysis. Args: revision (str): The git hash the try job was run against. commit_position (int): The commit position corresponding to |revision|. try_job_result (dict): The result dict reported by buildbucket. Example: { 'metadata': {}, 'result': { 'cafed52c5f3313646b8e04e05601b5cb98f305b3': { 'browser_tests': { 'status': 'failed', 'failures': ['TabCaptureApiTest.FullscreenEvents'], 'valid': True, 'pass_fail_counts': { 'TabCaptureApiTest.FullscreenEvents': { 'pass_count': 28, 'fail_count': 72 } }, 'step_metadata': { 'task_ids': [], ... } } } } } urlsafe_try_job_key (str): The urlsafe key to the corresponding try job entity. urlsafe_flake_analysis_key (str): The urlsafe key for the master flake analysis entity to be updated. """ flake_analysis = ndb.Key(urlsafe=urlsafe_flake_analysis_key).get() try_job = ndb.Key(urlsafe=urlsafe_try_job_key).get() assert flake_analysis assert try_job try_job.status = analysis_status.COMPLETED try_job.put() step_name = flake_analysis.canonical_step_name test_name = flake_analysis.test_name result = try_job_result['report']['result'] pass_fail_counts = result[revision][step_name].get('pass_fail_counts', {}) if pass_fail_counts: test_results = pass_fail_counts[test_name] pass_count = test_results['pass_count'] fail_count = test_results['fail_count'] tries = pass_count + fail_count pass_rate = float(pass_count) / tries else: # Test does not exist. pass_rate = -1 data_point = DataPoint() data_point.commit_position = commit_position data_point.git_hash = revision data_point.pass_rate = pass_rate data_point.try_job_url = try_job.flake_results[-1].get('url') data_point.task_id = _GetSwarmingTaskIdForTryJob( try_job.flake_results[-1].get('report'), revision, step_name, test_name) flake_analysis.data_points.append(data_point) flake_analysis.put()