def AddFlakeIfBelong(self, flake, occurrences): """Adds the flake, also updates flakes_with_same_occurrences and num_occurrences if needed. Args: flake (Flake): Flake entity to be added in the group. occurrences (list): A list of occurrences of the flake. Returns: (bool), True if the flake is added to the group otherwise False. """ flake_issue = GetFlakeIssue(flake) assert flake_issue == self.flake_issue, ( 'Tried to add flake {flake} to group with issue {issue}, while flake ' 'links to another issue {a_issue}'.format( flake=flake.key.urlsafe(), issue=FlakeIssue.GetLinkForIssue( self.flake_issue.monorail_project, self.flake_issue.issue_id), a_issue=FlakeIssue.GetLinkForIssue( flake_issue.monorail_project, flake_issue.issue_id) if flake_issue else None)) self.flakes.append(flake) if len(occurrences) < self.num_occurrences: # Only maintains a minimum num_occurrences to show in bug comments. self.num_occurrences = len(occurrences) self.flakes_with_same_occurrences = False return True
def _GetOrCreateFlakeIssue(bug_id): monorail_project = 'chromium' issue = FlakeIssue.Get(monorail_project, bug_id) if issue: return issue issue = FlakeIssue.Create(monorail_project, bug_id) if bug_id >= 123458: issue.create_time_in_monorail = datetime(2018, 9, 1) issue.put() return issue
def testMergeOrSplitFlakeIssueByCulpritIssueAlreadyMerged( self, mocked_get_issue, mocked_merge_issues, _): # Culprit's flake issue 12344 was already merged into 12346. # Incoming flake issue's id is 12345 and is expected to be merged as well. project = 'chromium' merged_bug_id = 12344 open_bug_id = 12345 destination_bug_id = 12346 revision = 'r1000' commit_position = 1000 flake_issue = FlakeIssue.Create(project, open_bug_id) flake_issue.put() destination_issue = FlakeIssue.Create(project, destination_bug_id) destination_issue.put() culprit_flake_issue = FlakeIssue.Create(project, merged_bug_id) culprit_flake_issue.status = 'Merged' culprit_flake_issue.merge_destination_key = destination_issue.key culprit_flake_issue.put() flake_culprit = FlakeCulprit.Create(project, revision, commit_position) flake_culprit.flake_issue_key = culprit_flake_issue.key flake_culprit.put() flake_monorail_issue = Issue({ 'status': 'Available', 'projectId': 'chromium', 'id': str(open_bug_id) }) destination_monorail_issue = Issue({ 'status': 'Available', 'projectId': 'chromium', 'id': str(destination_bug_id) }) mocked_get_issue.side_effect = [ destination_monorail_issue, flake_monorail_issue, ] (duplicate, destination) = flake_analysis_actions.MergeOrSplitFlakeIssueByCulprit( flake_issue.key, flake_culprit.key) mocked_merge_issues.assert_called_once_with( flake_monorail_issue, destination_monorail_issue, mock.ANY) flake_issue = flake_issue.key.get() self.assertEqual(flake_issue.key, duplicate) self.assertEqual(destination_issue.key, destination) self.assertEqual(destination_issue.key, flake_issue.merge_destination_key)
def _CreateIssueForFlake(issue_generator, target_flake, create_or_update_bug): """Creates a monorail bug for a single flake. This function is used to create bugs for detected flakes and flake analysis results. Args: create_or_update_bug (bool): True to create or update monorail bug, otherwise False. Should always look for existing bugs for flakes, even if cannot update the bug. """ monorail_project = issue_generator.GetMonorailProject() # Re-uses an existing open bug if possible. issue_id = SearchOpenIssueIdForFlakyTest(target_flake.normalized_test_name, monorail_project) if not issue_id: # Reopens a recently closed bug if possible. issue_id = SearchRecentlyClosedIssueIdForFlakyTest( target_flake.normalized_test_name, monorail_project) if issue_id: logging.info('An existing issue %s was found, attach it to flake: %s.', FlakeIssue.GetLinkForIssue(monorail_project, issue_id), target_flake.key) _AssignIssueToFlake(issue_id, target_flake) if create_or_update_bug: monorail_util.UpdateIssueWithIssueGenerator( issue_id=issue_id, issue_generator=issue_generator, reopen=True) return issue_id if not create_or_update_bug: # No existing bug found, and cannot create bug, bail out. return None logging.info('No existing open issue was found, create a new one.') issue_id = monorail_util.CreateIssueWithIssueGenerator( issue_generator=issue_generator) if not issue_id: logging.warning('Failed to create monorail bug for flake: %s.', target_flake.key) return None logging.info('%s was created for flake: %s.', FlakeIssue.GetLinkForIssue(monorail_project, issue_id), target_flake.key) _AssignIssueToFlake(issue_id, target_flake) return issue_id
def testMergeOrSplitFlakeIssueByCulpritMergeIntoManuallyCreated( self, mocked_get_issue, mocked_merge_issues, _): project = 'chromium' duplicate_bug_id = 12344 manually_created_bug_id = 12345 revision = 'r1000' commit_position = 1000 flake_issue = FlakeIssue.Create(project, manually_created_bug_id) flake_issue.status = 'Assigned' flake_issue.put() culprit_flake_issue = FlakeIssue.Create(project, duplicate_bug_id) culprit_flake_issue.put() flake_culprit = FlakeCulprit.Create(project, revision, commit_position) flake_culprit.flake_issue_key = culprit_flake_issue.key flake_culprit.put() # Even though the flake issue associated with the culprit was identified # first, the incoming flake issue was manually created. Merge into the # manually created one. flake_monorail_issue = Issue({ 'status': 'Available', 'projectId': 'chromium', 'id': str(manually_created_bug_id) }) culprit_monorail_issue = Issue({ 'status': 'Available', 'projectId': 'chromium', 'id': str(duplicate_bug_id) }) mocked_get_issue.side_effect = [ culprit_monorail_issue, flake_monorail_issue ] (duplicate, destination) = flake_analysis_actions.MergeOrSplitFlakeIssueByCulprit( flake_issue.key, flake_culprit.key) mocked_merge_issues.assert_called_once_with(culprit_monorail_issue, flake_monorail_issue, mock.ANY) flake_culprit = flake_culprit.key.get() flake_issue = flake_issue.key.get() culprit_flake_issue = culprit_flake_issue.key.get() self.assertEqual(culprit_flake_issue.key, duplicate) self.assertEqual(flake_issue.key, destination) self.assertEqual(flake_issue.flake_culprit_key, flake_culprit.key) self.assertEqual(flake_issue.key, culprit_flake_issue.merge_destination_key)
def testMergeOrSplitFlakeIssueByCulpritIssueClosed(self, mocked_get_issue, mocked_merge_issues, *_): project = 'chromium' closed_bug_id = 12344 open_bug_id = 12345 revision = 'r1000' commit_position = 1000 flake_issue = FlakeIssue.Create(project, open_bug_id) flake_issue.put() culprit_flake_issue = FlakeIssue.Create(project, closed_bug_id) culprit_flake_issue.status = 'Fixed' culprit_flake_issue.last_updated_time_in_monorail = datetime( 2019, 1, 1) culprit_flake_issue.put() flake_culprit = FlakeCulprit.Create(project, revision, commit_position) flake_culprit.flake_issue_key = culprit_flake_issue.key flake_culprit.put() # Even though the flake issue associated with the culprit was identified # first, it has been closed. FlakeCulprit should have its flake issue # updated to the incoming one. flake_monorail_issue = Issue({ 'status': 'Available', 'projectId': 'chromium', 'id': str(open_bug_id) }) culprit_monorail_issue = Issue({ 'status': 'Fixed', 'projectId': 'chromium', 'id': str(closed_bug_id) }) mocked_get_issue.side_effect = [ culprit_monorail_issue, flake_monorail_issue ] (duplicate, destination) = flake_analysis_actions.MergeOrSplitFlakeIssueByCulprit( flake_issue.key, flake_culprit.key) mocked_merge_issues.assert_not_called() flake_culprit = flake_culprit.key.get() self.assertIsNone(duplicate) self.assertIsNone(destination) self.assertIsNone(flake_issue.merge_destination_key) self.assertEqual(flake_issue.key, flake_culprit.flake_issue_key)
def testGetMostUpdatedIssue(self): monorail_project = 'chromium' issue_id = 12345 merge_issue_id = 67890 merge_issue = FlakeIssue.Create(monorail_project=monorail_project, issue_id=merge_issue_id) merge_issue.put() flake_issue = FlakeIssue.Create(monorail_project=monorail_project, issue_id=issue_id) flake_issue.merge_destination_key = merge_issue.key flake_issue.put() self.assertEqual(merge_issue, flake_issue.GetMostUpdatedIssue())
def testCreate(self): monorail_project = 'chromium' issue_id = 123 FlakeIssue.Create(monorail_project=monorail_project, issue_id=issue_id).put() flake_issue = FlakeIssue.Get(monorail_project, issue_id) fetched_flake_issues = FlakeIssue.query().fetch() self.assertEqual(1, len(fetched_flake_issues)) self.assertEqual(flake_issue, fetched_flake_issues[0]) self.assertIsNone( fetched_flake_issues[0].last_updated_time_by_flake_detection) self.assertEqual(monorail_project, flake_issue.monorail_project) self.assertEqual(issue_id, flake_issue.issue_id) self.assertIsNone(flake_issue.merge_destination_key)
def _UpdateFlakeIssueWithMonorailIssue(flake_issue, monorail_issue): """Updates a FlakeIssue with its corresponding Monorail issue.""" issue_id = flake_issue.issue_id monorail_project = flake_issue.monorail_project issue_link = FlakeIssue.GetLinkForIssue(monorail_project, issue_id) if not monorail_issue: # Possible reason: Findit doesn't have access to the monorail issue. return assert monorail_issue.status is not None, ( 'Failed to get issue.status from {}'.format(issue_link)) assert monorail_issue.updated or monorail_issue.closed, ( 'Failed to get updated time from {}'.format(issue_link)) if monorail_issue.status == 'Duplicate': # Impacted |merge_destination_key|s need to be updated. merged_monorail_issue = monorail_util.GetMergedDestinationIssueForId( issue_id, monorail_project) if not merged_monorail_issue.id: logging.warning('Failed to get merged monorail issue %s', issue_link) _UpdateMergeDestinationAndIssueLeaves(flake_issue, merged_monorail_issue) flake_issue.Update( status=monorail_issue.status, labels=monorail_issue.labels, last_updated_time_in_monorail=(monorail_issue.closed or monorail_issue.updated), create_time_in_monorail=monorail_issue.created)
def testGetLinkForStagingIssue(self, _): monorail_project = 'chromium' issue_id = 12345 self.assertEqual( ('https://monorail-staging.appspot.com/p/chromium/issues/detail?' 'id=12345'), FlakeIssue.GetLinkForIssue(monorail_project, issue_id))
def _GetIssueGenerator(self, new_issue=True): luci_project = 'chromium' normalized_step_name = 'step' flake0 = Flake.Create(luci_project, normalized_step_name, 'suite.test0', 'suite.test0') flake0.tags = ['component::Blink'] flake0.put() flake1 = Flake.Create(luci_project, normalized_step_name, 'suite.test1', 'suite.test1') flake1.tags = ['component::Blink/Infra'] flake1.put() flake2 = Flake.Create(luci_project, normalized_step_name, 'suite.test2', 'suite.test2') flake2.put() flake3 = Flake.Create(luci_project, 'other_step', 'other_test', 'other_test') flake3.put() issue_generator_new = issue_generator.FlakeDetectionGroupIssueGenerator( flakes=[flake0, flake1, flake2], num_occurrences=5, canonical_step_name=normalized_step_name) flake_issue = FlakeIssue.Create(luci_project, 12345) flake_issue.put() issue_generator_old = issue_generator.FlakeDetectionGroupIssueGenerator( flakes=[flake1, flake2, flake3], num_occurrences=5, flake_issue=flake_issue, flakes_with_same_occurrences=False) return issue_generator_new if new_issue else issue_generator_old
def SyncOpenFlakeIssuesWithMonorail(): """Updates open FlakeIssues to reflect the latest state in Monorail.""" flake_issues_needing_updating = _GetFlakeIssuesNeedingUpdating() for flake_issue in flake_issues_needing_updating: issue_id = flake_issue.issue_id monorail_project = flake_issue.monorail_project # TODO(crbug.com/914160): Monorail has a maximum of 300 requests per minute # within any 5 minute window. Should the limit be exceeded, requests will # result in 4xx errors and exponential backoff should be used. monorail_issue = monorail_util.GetMonorailIssueForIssueId( issue_id, monorail_project) if (not monorail_issue or monorail_issue.id is None or int(monorail_issue.id) != issue_id): # pragma: no cover # No cover due to being unexpected, but log a warning regardless and skip. link = FlakeIssue.GetLinkForIssue(monorail_project, issue_id) logging.warning('Failed to get issue %s', link) continue _UpdateFlakeIssueWithMonorailIssue(flake_issue, monorail_issue) if monorail_issue.status in issue_constants.CLOSED_STATUSES_NO_DUPLICATE: # Issue is closed, detaches it from flakes. _ArchiveFlakesForClosedIssue(flake_issue)
def GetFlakeInformation(flake, max_occurrence_count, with_occurrences=True): """Gets information for a detected flakes. Gets occurrences of the flake and the attached monorail issue. Args: flake(Flake): Flake object for a flaky test. max_occurrence_count(int): Maximum number of occurrences to fetch. with_occurrences(bool): If the flake must be with occurrences or not. For flakes reported by Flake detection, there should always be occurrences, but it's not always true for flakes reported by Flake Analyzer, ignore those flakes for now. Returns: flake_dict(dict): A dict of information for the test. Including data from its Flake entity, its flake issue information and information of all its flake occurrences. """ occurrences = [] for flake_type in [ FlakeType.CQ_FALSE_REJECTION, FlakeType.RETRY_WITH_PATCH, FlakeType.CI_FAILED_STEP, FlakeType.CQ_HIDDEN_FLAKE ]: typed_occurrences = _FetchFlakeOccurrences(flake, flake_type, max_occurrence_count) occurrences.extend(typed_occurrences) if max_occurrence_count: max_occurrence_count = max_occurrence_count - len( typed_occurrences) if max_occurrence_count == 0: # Bails out if the number of occurrences with higher impact has hit the # cap. break if not occurrences and with_occurrences: # Flake must be with occurrences, but there is no occurrence, bail out. return None # Makes sure occurrences are sorted by time_happened in descending order, # regardless of types. occurrences.sort(key=lambda x: x.time_happened, reverse=True) flake_dict = flake.to_dict() flake_dict['occurrences'] = _GetGroupedOccurrencesByBuilder(occurrences) flake_dict['flake_counts_last_week'] = _GetFlakeCountsList( flake.flake_counts_last_week) flake_issue = GetFlakeIssue(flake) if flake_issue and flake_issue.status and flake_issue.status in OPEN_STATUSES: flake_dict['flake_issue'] = flake_issue.to_dict() flake_dict['flake_issue']['issue_link'] = FlakeIssue.GetLinkForIssue( flake_issue.monorail_project, flake_issue.issue_id) flake_dict['flake_issue'][ 'last_updated_time_in_monorail'] = _GetLastUpdatedTimeDelta( flake_issue) flake_dict['culprits'], flake_dict['sample_analysis'] = ( _GetFlakeAnalysesResults(flake_issue.issue_id)) return flake_dict
def testOnCulpritIdentified(self, mocked_update_monorail, mocked_update_issues, mocked_merge): project = 'chromium' master_name = 'm' builder_name = 'b' build_number = 123 step_name = 's' test_name = 't' label = 'l' bug_id = 12345 merged_bug_id = 12344 revision = 'r1000' commit_position = 1000 merged_issue = FlakeIssue.Create(project, merged_bug_id) merged_issue.put() issue = FlakeIssue.Create(project, bug_id) issue.merge_destination_key = merged_issue.key issue.put() flake = Flake.Create(project, step_name, test_name, label) flake.flake_issue_key = issue.key flake.put() culprit = FlakeCulprit.Create(project, revision, commit_position) culprit.flake_issue_key = merged_issue.key culprit.put() mocked_merge.return_value = (issue.key, merged_issue.key) analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.flake_key = flake.key analysis.culprit_urlsafe_key = culprit.key.urlsafe() analysis.confidence_in_culprit = 0.9 analysis.put() flake_analysis_actions.OnCulpritIdentified(analysis.key.urlsafe()) mocked_merge.assert_called_once_with(issue.key, culprit.key) mocked_update_issues.assert_called_once_with( issue.key, issue.merge_destination_key) mocked_update_monorail.assert_called_once_with(analysis.key.urlsafe())
def testGetMostUpdatedIssueNoMergeKeyOnly(self): monorail_project = 'chromium' issue_id = 12345 flake_issue = FlakeIssue.Create(monorail_project=monorail_project, issue_id=issue_id) flake_issue.put() self.assertEqual(flake_issue.key, flake_issue.GetMostUpdatedIssue(key_only=True))
def _GetOrCreateFlakeIssue(issue_id, monorail_project): """Gets or creates a FlakeIssue entity for the monorail issue. Args: issue_id (int): Id of the issue. monorail_project (str): Monorail project of the issue. Returns: (FlakeIssue): a FlakeIssue entity of the issue. """ flake_issue = FlakeIssue.Get(monorail_project, issue_id) if flake_issue: return flake_issue flake_issue = FlakeIssue.Create(monorail_project, issue_id) flake_issue.put() monorail_issue = monorail_util.GetMonorailIssueForIssueId( issue_id, monorail_project) _UpdateFlakeIssueWithMonorailIssue(flake_issue, monorail_issue) return flake_issue
def GetRemainingPostAnalysisDailyBugUpdatesCount(): """Returns how many FlakeIssue updates can be made by Flake Analyzer.""" action_settings = waterfall_config.GetActionSettings() limit = action_settings.get( 'max_flake_analysis_bug_updates_per_day', flake_constants.DEFAULT_MAX_BUG_UPDATES_PER_DAY) utc_one_day_ago = time_util.GetUTCNow() - datetime.timedelta(days=1) num_updated_issues_24h = FlakeIssue.query( FlakeIssue.last_updated_time_with_analysis_results > utc_one_day_ago ).count() return limit - num_updated_issues_24h
def GetAndUpdateMergedIssue(flake_issue): """Gets the most up-to-date merged issue and update data in data store. Args: flake_issue (FlakeIssue): FlakeIssue to check its merge destination and update. Returns: merged_issue (monorail_api.Issue): Merge destination of the flake_issue. """ monorail_project = flake_issue.monorail_project merged_issue = monorail_util.GetMergedDestinationIssueForId( flake_issue.issue_id, monorail_project) if merged_issue and flake_issue.issue_id != merged_issue.id: logging.info( 'Flake issue %s was merged to %s, updates this issue and' ' all issues were merged into it.', FlakeIssue.GetLinkForIssue(monorail_project, flake_issue.issue_id), FlakeIssue.GetLinkForIssue(monorail_project, merged_issue.id)) _UpdateMergeDestinationAndIssueLeaves(flake_issue, merged_issue) return merged_issue
def testGetFlakeIssue(self): flake_issue = FlakeIssue.Create(monorail_project='chromium', issue_id=12345) flake_issue.put() flake_issue_key = flake_issue.key flake = Flake.Create(luci_project='chromium', normalized_step_name='step', normalized_test_name='suite.test', test_label_name='*/suite.test/*') flake.flake_issue_key = flake_issue_key flake.put() self.assertEqual(flake_issue_key, flake.GetIssue(up_to_date=True, key_only=True))
def _GetFlakesByBug(monorail_project, bug_id): """Gets flakes link to the same bug. Gets flakes directly link to the bug and also flakes link to bugs that are merged into this bug. """ flake_issue = FlakeIssue.Get(monorail_project, bug_id) assert flake_issue, 'Requested FlakeIssue {} not found.'.format(bug_id) all_issue_keys = [flake_issue.key] issue_leaves = FlakeIssue.query( FlakeIssue.merge_destination_key == flake_issue.key).fetch( keys_only=True) all_issue_keys.extend(issue_leaves) flakes = [] for issue_key in all_issue_keys: flakes_to_issue = Flake.query( Flake.flake_issue_key == issue_key).fetch() flakes.extend(flakes_to_issue) flakes.sort(key=lambda flake: flake.flake_score_last_week, reverse=True) return flakes
def testGetFlakeIssueDataInconsistent(self): flake_issue = FlakeIssue.Create(monorail_project='chromium', issue_id=12345) flake_issue.put() flake_issue_key = flake_issue.key flake = Flake.Create(luci_project='chromium', normalized_step_name='step', normalized_test_name='suite.test', test_label_name='*/suite.test/*') flake.flake_issue_key = flake_issue_key flake.put() flake_issue_key.delete() self.assertIsNone(flake.GetIssue())
def testGetIssue(self): luci_project = 'chromium' normalized_step_name = 'normalized_step' normalized_test_name = 'a/b.html' test_label_name = 'test_label' bug_id = 12345 flake_issue = FlakeIssue.Create(luci_project, bug_id) flake_issue.put() flake = Flake.Create(luci_project=luci_project, normalized_step_name=normalized_step_name, normalized_test_name=normalized_test_name, test_label_name=test_label_name) flake.flake_issue_key = flake_issue.key self.assertEqual(flake_issue, flake.GetIssue())
def _AssignIssueToFlake(issue_id, flake): """Assigns an issue id to a flake, and created a FlakeIssue if necessary. Args: issue_id: Id of a Monorail issue. flake: A Flake Model entity. """ assert flake, 'The flake entity cannot be None.' monorail_project = FlakeIssue.GetMonorailProjectFromLuciProject( flake.luci_project) flake_issue = _GetOrCreateFlakeIssue(issue_id, monorail_project) flake.flake_issue_key = flake_issue.key flake.put() return flake_issue
def testMergeOrSplitFlakeIssueByCulpritFlakeIssueClosedLongAgo(self, _): project = 'chromium' duplicate_bug_id = 12344 manually_created_bug_id = 12345 revision = 'r1000' commit_position = 1000 flake_issue = FlakeIssue.Create(project, manually_created_bug_id) flake_issue.status = 'Fixed' flake_issue.last_updated_time_in_monorail = datetime(2019, 1, 1) flake_issue.put() culprit_flake_issue = FlakeIssue.Create(project, duplicate_bug_id) culprit_flake_issue.put() flake_culprit = FlakeCulprit.Create(project, revision, commit_position) flake_culprit.flake_issue_key = culprit_flake_issue.key flake_culprit.put() (duplicate, destination) = flake_analysis_actions.MergeOrSplitFlakeIssueByCulprit( flake_issue.key, flake_culprit.key) self.assertIsNone(duplicate) self.assertIsNone(destination)
def GenerateDisabledTestsData(disabled_tests): """Processes disabled test data to make them ready to be displayed on pages. Args: disabled_tests ([LuciTest]): A list of LuciTest entities. Returns: [dict]: A list of dicts containing each disabled test's data. Dictionaries are of the format: { 'luci_project' : str, 'normalized_step_name': str, 'normalized_test_name': str, 'disabled_test_variants': [[str]], 'disabled': bool, 'issue_keys: [ndb.Key], 'issues': [ { 'issue_id': str, 'issue_link': str, }, ] 'tags': [str], 'last_updated_time': datetime, } """ disabled_tests_data = [] for disabled_test in disabled_tests: disabled_test_dict = disabled_test.to_dict() disabled_test_dict[ 'disabled_test_variants'] = LuciTest.SummarizeDisabledVariants( disabled_test_dict['disabled_test_variants']) disabled_test_dict['issues'] = [] for issue_key in disabled_test.issue_keys: issue = issue_key.get() if not issue: continue issue_dict = { 'issue_link': FlakeIssue.GetLinkForIssue(issue.monorail_project, issue.issue_id), 'issue_id': issue.issue_id } disabled_test_dict['issues'].append(issue_dict) disabled_tests_data.append(disabled_test_dict) return disabled_tests_data
def testGetFirstCommentWhenBugJustCreated(self): issue_generator_new = self._GetIssueGenerator() flake_issue = FlakeIssue.Create('chromium', 12345) flake_issue.put() issue_generator_new.SetFlakeIssue(flake_issue) wrong_result_link = ( 'https://bugs.chromium.org/p/chromium/issues/entry?' 'status=Unconfirmed&labels=Pri-1,Test-Findit-Wrong&' 'components=Infra%3ETest%3EFlakiness&' 'summary=%5BFindit%5D%20Flake%20Detection%20-%20Wrong%20result%3A%20' 'Tests in step&comment=Link%20to%20flake%20details%3A%20' 'https://analysis.chromium.org/p/chromium/flake-portal/flakes?bug_id={}' ).format(flake_issue.issue_id) expected_description = _EXPECTED_GROUP_FIRST_COMMENT.format( flake_issue.issue_id, wrong_result_link) self.assertEqual(expected_description, issue_generator_new.GetFirstCommentWhenBugJustCreated())
def testGenerateDisabledTestsData(self): disabled_test_key = LuciTest.CreateKey('a', 'b', 'c') disabled_tests = [ LuciTest(key=disabled_test_key, disabled_test_variants={('os:Mac1234', ), ('Unknown', )}, last_updated_time=datetime(2019, 6, 29, 0, 0, 0), issue_keys=[ndb.Key('FlakeIssue', 'chromium@123')]) ] flake_issue = FlakeIssue.Create('chromium', 123) flake_issue.put() expected_disabled_test_dictionaries = [{ 'luci_project': 'a', 'normalized_step_name': 'b', 'normalized_test_name': 'c', 'disabled_test_variants': [ [ 'os:Mac', ], [ 'Unknown', ], ], 'issue_keys': [ndb.Key('FlakeIssue', 'chromium@123')], 'issues': [ { 'issue_id': 123, 'issue_link': 'https://monorail-prod.appspot.com/p/chromium/issues/detail?id=123', }, ], 'tags': [], 'disabled': True, 'last_updated_time': datetime(2019, 6, 29, 0, 0, 0), }] self.assertEqual( expected_disabled_test_dictionaries, disabled_test_detection_utils.GenerateDisabledTestsData( disabled_tests))
def _GetFlakeIssuesNeedingUpdating(): """Returns a list of all FlakeIssue entities needing updating.""" issue_statuses_needing_updates = _GetIssueStatusesNeedingUpdating() # Query and update issues by oldest first that's still open in case there are # exceptions when trying to update issues. flake_issues_query = FlakeIssue.query().filter( FlakeIssue.status.IN(issue_statuses_needing_updates)).order( FlakeIssue.last_updated_time_in_monorail, FlakeIssue.key) flake_issues_needing_updating = [] cursor = None more = True while more: flake_issues, cursor, more = flake_issues_query.fetch_page( _PAGE_SIZE, start_cursor=cursor) flake_issues_needing_updating.extend(flake_issues) return flake_issues_needing_updating
def testMergeOrSplitFlakeIssueByCulprit(self): project = 'chromium' bug_id = 12345 revision = 'r1000' commit_position = 1000 issue = FlakeIssue.Create(project, bug_id) issue.put() culprit = FlakeCulprit.Create(project, revision, commit_position) culprit.put() flake_analysis_actions.MergeOrSplitFlakeIssueByCulprit( issue.key, culprit.key) issue = issue.key.get() culprit = culprit.key.get() self.assertEqual(culprit.key, issue.flake_culprit_key) self.assertEqual(issue.key, culprit.flake_issue_key)
def GenerateFlakesData(flakes, include_closed_bug=False): """Processes flakes data to make them ready to be displayed on pages. Args: flakes ([Flake]): A list of Flake objects. include_closed_bug (bool): True to include info about closed bugs. Otherwise False. Returns: [dict]: A list of dicts containing each flake's data. """ flakes_data = [] for flake in flakes: flake_dict = flake.to_dict() # Tries to use merge_destination first, then falls back to the bug that # directly associates to the flake. flake_issue = GetFlakeIssue(flake) if (flake_issue and (include_closed_bug or (flake_issue.status and flake_issue.status in OPEN_STATUSES))): # pragma: no branch. # Only show open bugs on dashboard. # Unless told otherwise. flake_dict['flake_issue'] = flake_issue.to_dict() flake_dict['flake_issue'][ 'issue_link'] = FlakeIssue.GetLinkForIssue( flake_issue.monorail_project, flake_issue.issue_id) flake_dict['flake_issue'][ 'last_updated_time_in_monorail'] = _GetLastUpdatedTimeDelta( flake_issue) flake_dict['flake_urlsafe_key'] = flake.key.urlsafe() flake_dict['time_delta'] = time_util.FormatTimedelta( time_util.GetUTCNow() - flake.last_occurred_time, with_days=True) if flake.last_occurred_time else None flake_dict['flake_counts_last_week'] = _GetFlakeCountsList( flake.flake_counts_last_week) flakes_data.append(flake_dict) return flakes_data