def AddFlakeIfBelong(self, flake, occurrences):
        """Adds the flake, also updates flakes_with_same_occurrences and
      num_occurrences if needed.

    Args:
      flake (Flake): Flake entity to be added in the group.
      occurrences (list): A list of occurrences of the flake.

    Returns:
      (bool), True if the flake is added to the group otherwise False.
    """

        flake_issue = GetFlakeIssue(flake)
        assert flake_issue == self.flake_issue, (
            'Tried to add flake {flake} to group with issue {issue}, while flake '
            'links to another issue {a_issue}'.format(
                flake=flake.key.urlsafe(),
                issue=FlakeIssue.GetLinkForIssue(
                    self.flake_issue.monorail_project,
                    self.flake_issue.issue_id),
                a_issue=FlakeIssue.GetLinkForIssue(
                    flake_issue.monorail_project, flake_issue.issue_id)
                if flake_issue else None))

        self.flakes.append(flake)
        if len(occurrences) < self.num_occurrences:
            # Only maintains a minimum num_occurrences to show in bug comments.
            self.num_occurrences = len(occurrences)
            self.flakes_with_same_occurrences = False
        return True
def _CreateIssueForFlake(issue_generator, target_flake, create_or_update_bug):
    """Creates a monorail bug for a single flake.

  This function is used to create bugs for detected flakes and flake analysis
  results.

  Args:
    create_or_update_bug (bool): True to create or update monorail bug,
      otherwise False. Should always look for existing bugs for flakes, even if
      cannot update the bug.
  """
    monorail_project = issue_generator.GetMonorailProject()

    # Re-uses an existing open bug if possible.
    issue_id = SearchOpenIssueIdForFlakyTest(target_flake.normalized_test_name,
                                             monorail_project)

    if not issue_id:
        # Reopens a recently closed bug if possible.
        issue_id = SearchRecentlyClosedIssueIdForFlakyTest(
            target_flake.normalized_test_name, monorail_project)

    if issue_id:
        logging.info('An existing issue %s was found, attach it to flake: %s.',
                     FlakeIssue.GetLinkForIssue(monorail_project, issue_id),
                     target_flake.key)
        _AssignIssueToFlake(issue_id, target_flake)

        if create_or_update_bug:
            monorail_util.UpdateIssueWithIssueGenerator(
                issue_id=issue_id,
                issue_generator=issue_generator,
                reopen=True)
        return issue_id

    if not create_or_update_bug:
        # No existing bug found, and cannot create bug, bail out.
        return None

    logging.info('No existing open issue was found, create a new one.')
    issue_id = monorail_util.CreateIssueWithIssueGenerator(
        issue_generator=issue_generator)

    if not issue_id:
        logging.warning('Failed to create monorail bug for flake: %s.',
                        target_flake.key)
        return None
    logging.info('%s was created for flake: %s.',
                 FlakeIssue.GetLinkForIssue(monorail_project, issue_id),
                 target_flake.key)
    _AssignIssueToFlake(issue_id, target_flake)
    return issue_id
Beispiel #3
0
 def testGetLinkForStagingIssue(self, _):
     monorail_project = 'chromium'
     issue_id = 12345
     self.assertEqual(
         ('https://monorail-staging.appspot.com/p/chromium/issues/detail?'
          'id=12345'), FlakeIssue.GetLinkForIssue(monorail_project,
                                                  issue_id))
def _UpdateFlakeIssueWithMonorailIssue(flake_issue, monorail_issue):
    """Updates a FlakeIssue with its corresponding Monorail issue."""
    issue_id = flake_issue.issue_id
    monorail_project = flake_issue.monorail_project
    issue_link = FlakeIssue.GetLinkForIssue(monorail_project, issue_id)

    if not monorail_issue:
        # Possible reason: Findit doesn't have access to the monorail issue.
        return

    assert monorail_issue.status is not None, (
        'Failed to get issue.status from {}'.format(issue_link))
    assert monorail_issue.updated or monorail_issue.closed, (
        'Failed to get updated time from {}'.format(issue_link))

    if monorail_issue.status == 'Duplicate':
        # Impacted |merge_destination_key|s need to be updated.
        merged_monorail_issue = monorail_util.GetMergedDestinationIssueForId(
            issue_id, monorail_project)
        if not merged_monorail_issue.id:
            logging.warning('Failed to get merged monorail issue %s',
                            issue_link)

        _UpdateMergeDestinationAndIssueLeaves(flake_issue,
                                              merged_monorail_issue)

    flake_issue.Update(
        status=monorail_issue.status,
        labels=monorail_issue.labels,
        last_updated_time_in_monorail=(monorail_issue.closed
                                       or monorail_issue.updated),
        create_time_in_monorail=monorail_issue.created)
def SyncOpenFlakeIssuesWithMonorail():
    """Updates open FlakeIssues to reflect the latest state in Monorail."""
    flake_issues_needing_updating = _GetFlakeIssuesNeedingUpdating()

    for flake_issue in flake_issues_needing_updating:
        issue_id = flake_issue.issue_id
        monorail_project = flake_issue.monorail_project

        # TODO(crbug.com/914160): Monorail has a maximum of 300 requests per minute
        # within any 5 minute window. Should the limit be exceeded, requests will
        # result in 4xx errors and exponential backoff should be used.
        monorail_issue = monorail_util.GetMonorailIssueForIssueId(
            issue_id, monorail_project)
        if (not monorail_issue or monorail_issue.id is None
                or int(monorail_issue.id) != issue_id):  # pragma: no cover
            # No cover due to being unexpected, but log a warning regardless and skip.
            link = FlakeIssue.GetLinkForIssue(monorail_project, issue_id)
            logging.warning('Failed to get issue %s', link)
            continue

        _UpdateFlakeIssueWithMonorailIssue(flake_issue, monorail_issue)

        if monorail_issue.status in issue_constants.CLOSED_STATUSES_NO_DUPLICATE:
            # Issue is closed, detaches it from flakes.
            _ArchiveFlakesForClosedIssue(flake_issue)
Beispiel #6
0
def GetFlakeInformation(flake, max_occurrence_count, with_occurrences=True):
    """Gets information for a detected flakes.
  Gets occurrences of the flake and the attached monorail issue.

  Args:
    flake(Flake): Flake object for a flaky test.
    max_occurrence_count(int): Maximum number of occurrences to fetch.
    with_occurrences(bool): If the flake must be with occurrences or not.
      For flakes reported by Flake detection, there should always be
      occurrences, but it's not always true for flakes reported by
      Flake Analyzer, ignore those flakes for now.
  Returns:
    flake_dict(dict): A dict of information for the test. Including data from
    its Flake entity, its flake issue information and information of all its
    flake occurrences.
  """
    occurrences = []
    for flake_type in [
            FlakeType.CQ_FALSE_REJECTION, FlakeType.RETRY_WITH_PATCH,
            FlakeType.CI_FAILED_STEP, FlakeType.CQ_HIDDEN_FLAKE
    ]:
        typed_occurrences = _FetchFlakeOccurrences(flake, flake_type,
                                                   max_occurrence_count)
        occurrences.extend(typed_occurrences)

        if max_occurrence_count:
            max_occurrence_count = max_occurrence_count - len(
                typed_occurrences)
            if max_occurrence_count == 0:
                # Bails out if the number of occurrences with higher impact has hit the
                # cap.
                break

    if not occurrences and with_occurrences:
        # Flake must be with occurrences, but there is no occurrence, bail out.
        return None

    # Makes sure occurrences are sorted by time_happened in descending order,
    # regardless of types.
    occurrences.sort(key=lambda x: x.time_happened, reverse=True)
    flake_dict = flake.to_dict()
    flake_dict['occurrences'] = _GetGroupedOccurrencesByBuilder(occurrences)
    flake_dict['flake_counts_last_week'] = _GetFlakeCountsList(
        flake.flake_counts_last_week)

    flake_issue = GetFlakeIssue(flake)
    if flake_issue and flake_issue.status and flake_issue.status in OPEN_STATUSES:
        flake_dict['flake_issue'] = flake_issue.to_dict()
        flake_dict['flake_issue']['issue_link'] = FlakeIssue.GetLinkForIssue(
            flake_issue.monorail_project, flake_issue.issue_id)
        flake_dict['flake_issue'][
            'last_updated_time_in_monorail'] = _GetLastUpdatedTimeDelta(
                flake_issue)

        flake_dict['culprits'], flake_dict['sample_analysis'] = (
            _GetFlakeAnalysesResults(flake_issue.issue_id))
    return flake_dict
def GetAndUpdateMergedIssue(flake_issue):
    """Gets the most up-to-date merged issue and update data in data store.

  Args:
    flake_issue (FlakeIssue): FlakeIssue to check its merge destination and
       update.

  Returns:
    merged_issue (monorail_api.Issue): Merge destination of the flake_issue.
  """
    monorail_project = flake_issue.monorail_project
    merged_issue = monorail_util.GetMergedDestinationIssueForId(
        flake_issue.issue_id, monorail_project)
    if merged_issue and flake_issue.issue_id != merged_issue.id:
        logging.info(
            'Flake issue %s was merged to %s, updates this issue and'
            ' all issues were merged into it.',
            FlakeIssue.GetLinkForIssue(monorail_project, flake_issue.issue_id),
            FlakeIssue.GetLinkForIssue(monorail_project, merged_issue.id))
        _UpdateMergeDestinationAndIssueLeaves(flake_issue, merged_issue)

    return merged_issue
Beispiel #8
0
def GenerateDisabledTestsData(disabled_tests):
    """Processes disabled test data to make them ready to be displayed on pages.

  Args:
    disabled_tests ([LuciTest]): A list of LuciTest entities.

  Returns:
    [dict]: A list of dicts containing each disabled test's data.
      Dictionaries are of the format:
      {
        'luci_project' : str,
        'normalized_step_name': str,
        'normalized_test_name': str,
        'disabled_test_variants': [[str]],
        'disabled': bool,
        'issue_keys: [ndb.Key],
        'issues': [
          {
          'issue_id': str,
          'issue_link': str,
          },
        ]
        'tags': [str],
        'last_updated_time': datetime,
      }
  """
    disabled_tests_data = []
    for disabled_test in disabled_tests:
        disabled_test_dict = disabled_test.to_dict()
        disabled_test_dict[
            'disabled_test_variants'] = LuciTest.SummarizeDisabledVariants(
                disabled_test_dict['disabled_test_variants'])
        disabled_test_dict['issues'] = []
        for issue_key in disabled_test.issue_keys:
            issue = issue_key.get()
            if not issue:
                continue
            issue_dict = {
                'issue_link':
                FlakeIssue.GetLinkForIssue(issue.monorail_project,
                                           issue.issue_id),
                'issue_id':
                issue.issue_id
            }
            disabled_test_dict['issues'].append(issue_dict)
        disabled_tests_data.append(disabled_test_dict)
    return disabled_tests_data
def GenerateFlakesData(flakes, include_closed_bug=False):
    """Processes flakes data to make them ready to be displayed on pages.

  Args:
    flakes ([Flake]): A list of Flake objects.
    include_closed_bug (bool): True to include info about closed bugs. Otherwise
      False.

  Returns:
    [dict]: A list of dicts containing each flake's data.
  """
    flakes_data = []
    for flake in flakes:
        flake_dict = flake.to_dict()

        # Tries to use merge_destination first, then falls back to the bug that
        # directly associates to the flake.
        flake_issue = GetFlakeIssue(flake)
        if (flake_issue and
            (include_closed_bug or
             (flake_issue.status
              and flake_issue.status in OPEN_STATUSES))):  # pragma: no branch.
            # Only show open bugs on dashboard.
            # Unless told otherwise.
            flake_dict['flake_issue'] = flake_issue.to_dict()
            flake_dict['flake_issue'][
                'issue_link'] = FlakeIssue.GetLinkForIssue(
                    flake_issue.monorail_project, flake_issue.issue_id)
            flake_dict['flake_issue'][
                'last_updated_time_in_monorail'] = _GetLastUpdatedTimeDelta(
                    flake_issue)

        flake_dict['flake_urlsafe_key'] = flake.key.urlsafe()
        flake_dict['time_delta'] = time_util.FormatTimedelta(
            time_util.GetUTCNow() - flake.last_occurred_time,
            with_days=True) if flake.last_occurred_time else None

        flake_dict['flake_counts_last_week'] = _GetFlakeCountsList(
            flake.flake_counts_last_week)

        flakes_data.append(flake_dict)
    return flakes_data
def _UpdateMergeDestinationAndIssueLeaves(flake_issue, merged_monorail_issue):
    """Updates flake_issue and all other issues that are merged into it to
    store the new merging_destination.

  Args:
    flake_issue (FlakeIssue): A FlakeIssue to update.
    merged_monorail_issue (Issue): The merged Monorail issue.
  """
    merged_flake_issue = _GetOrCreateFlakeIssue(int(merged_monorail_issue.id),
                                                flake_issue.monorail_project)
    assert merged_flake_issue, (
        'Failed to get or create FlakeIssue for merged_issue %s' %
        FlakeIssue.GetLinkForIssue(flake_issue.monorail_project,
                                   merged_flake_issue.id))

    merged_flake_issue_key = merged_flake_issue.key
    flake_issue.merge_destination_key = merged_flake_issue_key
    flake_issue.put()

    UpdateIssueLeaves(flake_issue.key, merged_flake_issue_key)
def _CreateIssueForFlakeGroup(flake_group):
    """Creates an issue for a flake group.

  Args:
    flake_group (FlakeGroupByOccurrences): A flake group without an issue.

  Returns:
    Id of the issue that was eventually created or linked.
  """

    assert isinstance(flake_group, FlakeGroupByOccurrences), (
        'flake_group is not a FlakeGroupByOccurrences instance.')

    issue_generator = FlakeDetectionGroupIssueGenerator(
        flake_group.flakes,
        flake_group.num_occurrences,
        canonical_step_name=flake_group.canonical_step_name)
    issue_id = monorail_util.CreateIssueWithIssueGenerator(
        issue_generator=issue_generator)
    if not issue_id:
        logging.warning('Failed to create monorail bug for flake group: %s.',
                        flake_group.canonical_step_name)
        return None
    logging.info(
        '%s was created for flake_group: %s.',
        FlakeIssue.GetLinkForIssue(issue_generator.GetMonorailProject(),
                                   issue_id), flake_group.canonical_step_name)

    flake_issue = _AssignIssueToFlake(issue_id, flake_group.flakes[0])
    for i in xrange(1, len(flake_group.flakes)):
        flake = flake_group.flakes[i]
        flake.flake_issue_key = flake_issue.key
        flake.put()

    issue_generator.SetFlakeIssue(flake_issue)

    monorail_util.PostCommentOnMonorailBug(
        issue_id, issue_generator,
        issue_generator.GetFirstCommentWhenBugJustCreated())

    return issue_id
def UpdateMonorailBugWithCulprit(analysis_urlsafe_key):
    """Updates a bug in monorail with the culprit of a MasterFlakeAnalsyis"""
    analysis = entity_util.GetEntityFromUrlsafeKey(analysis_urlsafe_key)
    assert analysis, 'Analysis {} missing unexpectedly!'.format(
        analysis_urlsafe_key)

    if not analysis.flake_key:  # pragma: no cover.
        logging.warning(
            'Analysis %s has no flake key. Bug updates should only be '
            'routed through Flake and FlakeIssue', analysis_urlsafe_key)
        return

    flake = analysis.flake_key.get()
    assert flake, 'Analysis\' associated Flake {} missing unexpectedly!'.format(
        analysis.flake_key)

    flake_urlsafe_key = flake.key.urlsafe()
    if flake.archived:
        logging.info(
            'Flake %s has been archived when flake analysis %s completes.',
            flake_urlsafe_key, analysis_urlsafe_key)
        return

    if not flake.flake_issue_key:  # pragma: no cover.
        logging.warning(
            'Flake %s has no flake_issue_key. Bug updates should only'
            ' be routed through Flake and FlakeIssue', flake_urlsafe_key)
        return

    flake_issue = flake.flake_issue_key.get()
    assert flake_issue, 'Flake issue {} missing unexpectedly!'.format(
        flake.flake_issue_key)

    # Only comment on the latest flake issue.
    flake_issue_to_update = flake_issue.GetMostUpdatedIssue()
    issue_link = FlakeIssue.GetLinkForIssue(
        flake_issue_to_update.monorail_project, flake_issue_to_update.issue_id)

    # Don't comment if the issue is closed.
    latest_merged_monorail_issue = monorail_util.GetMonorailIssueForIssueId(
        flake_issue_to_update.issue_id)
    if not latest_merged_monorail_issue or not latest_merged_monorail_issue.open:
        logging.info(
            'Skipping updating issue %s which is not accessible or closed',
            issue_link)
        return

    # Don't comment if there are existing updates by Findit to prevent spamming.
    if flake_issue_to_update.last_updated_time_with_analysis_results:
        logging.info(
            'Skipping updating issue %s as it has already been updated',
            issue_link)
        return

    # Don't comment if Findit has filled the daily quota of monorail updates.
    if flake_issue_util.GetRemainingPostAnalysisDailyBugUpdatesCount() <= 0:
        logging.info(
            'Skipping updating issue %s due to maximum daily bug limit being '
            'reached', issue_link)
        return

    # Comment with link to FlakeCulprit.
    monorail_util.UpdateIssueWithIssueGenerator(
        flake_issue_to_update.issue_id,
        issue_generator.FlakeAnalysisIssueGenerator(analysis))
    flake_issue_to_update.last_updated_time_with_analysis_results = (
        time_util.GetUTCNow())
    flake_issue_to_update.last_updated_time_in_monorail = time_util.GetUTCNow()
    flake_issue_to_update.put()

    monitoring.flake_analyses.increment({
        'result': 'culprit-identified',
        'action_taken': 'bug-updated',
        'reason': ''
    })
Beispiel #13
0
    def setUp(self):
        super(RankFlakesTest, self).setUp()

        self.flake_issue0 = FlakeIssue.Create(monorail_project='chromium',
                                              issue_id=900)
        self.flake_issue0.last_updated_time_in_monorail = datetime.datetime(
            2018, 1, 1)
        self.flake_issue0.status = 'Assigned'
        self.flake_issue0.last_updated_time_in_monorail = datetime.datetime(
            2018, 1, 1)
        self.flake_issue0.put()

        self.flake_issue1 = FlakeIssue.Create(monorail_project='chromium',
                                              issue_id=1000)
        self.flake_issue1.last_updated_time_in_monorail = datetime.datetime(
            2018, 1, 1)
        self.flake_issue1.merge_destination_key = self.flake_issue0.key
        self.flake_issue1.put()

        self.luci_project = 'chromium'
        self.normalized_step_name = 'normalized_step_name'
        self.flake1 = Flake.Create(
            luci_project=self.luci_project,
            normalized_step_name=self.normalized_step_name,
            normalized_test_name='normalized_test_name',
            test_label_name='normalized_test_name')

        self.flake1.flake_issue_key = self.flake_issue1.key
        self.flake1.false_rejection_count_last_week = 3
        self.flake1.impacted_cl_count_last_week = 2
        self.flake1.flake_score_last_week = 0
        self.flake1.last_occurred_time = datetime.datetime(2018, 10, 1)
        self.flake1.put()

        self.flake2 = Flake.Create(
            luci_project=self.luci_project,
            normalized_step_name=self.normalized_step_name,
            normalized_test_name='suite.test1',
            test_label_name='suite.test1')
        self.flake2.put()

        self.flake3 = Flake.Create(
            luci_project=self.luci_project,
            normalized_step_name=self.normalized_step_name,
            normalized_test_name='suite.test2',
            test_label_name='suite.test2')
        self.flake3.false_rejection_count_last_week = 5
        self.flake3.impacted_cl_count_last_week = 3
        self.flake3.flake_score_last_week = 10800
        self.flake3.last_occurred_time = datetime.datetime(2018, 10, 1)
        self.flake3.flake_issue_key = self.flake_issue0.key
        self.flake3.tags = ['suite::suite', 'test_type::flavored_tests']
        self.flake3.put()

        self.flake4 = Flake.Create(
            luci_project=self.luci_project,
            normalized_step_name=self.normalized_step_name,
            normalized_test_name='suite.test3',
            test_label_name='suite.test3')
        self.flake4.false_rejection_count_last_week = 5
        self.flake4.impacted_cl_count_last_week = 3
        self.flake4.flake_score_last_week = 1080
        self.flake4.last_occurred_time = datetime.datetime(2018, 10, 1)
        self.flake4.tags = ['test_type::tests']
        self.flake4.put()

        self.flake5 = Flake.Create(
            luci_project=self.luci_project,
            normalized_step_name=self.normalized_step_name,
            normalized_test_name='suite.test5',
            test_label_name='suite.test5')
        self.flake5.false_rejection_count_last_week = 5
        self.flake5.impacted_cl_count_last_week = 3
        self.flake5.flake_score_last_week = 10800
        self.flake5.last_occurred_time = datetime.datetime(2018, 10, 1)
        self.flake5.flake_issue_key = self.flake_issue0.key
        self.flake5.tags = ['suite::suite', 'test_type::flavored_tests']
        self.flake5.archived = True
        self.flake5.put()

        self.flake6 = Flake.Create(
            luci_project=self.luci_project,
            normalized_step_name=self.normalized_step_name,
            normalized_test_name='suite.test6',
            test_label_name='suite.test6')
        self.flake6.false_rejection_count_last_week = 5
        self.flake6.impacted_cl_count_last_week = 3
        self.flake6.flake_score_last_week = 108
        self.flake6.last_occurred_time = datetime.datetime(2018, 10, 1)
        self.flake6.flake_issue_key = self.flake_issue0.key
        self.flake6.tags = ['suite::suite', 'test_type::flavored_tests']
        self.flake6.put()

        flake_issue0_dict = self.flake_issue0.to_dict()
        flake_issue0_dict['issue_link'] = FlakeIssue.GetLinkForIssue(
            self.flake_issue0.monorail_project, self.flake_issue0.issue_id)
        flake_issue0_dict['last_updated_time_in_monorail'] = (
            '274 days, 01:00:00')
        flake_issue0_dict['status'] = 'Assigned'

        self.flake1_dict = self.flake1.to_dict()
        self.flake1_dict['flake_issue'] = flake_issue0_dict

        self.flake3_dict = self.flake3.to_dict()
        self.flake3_dict['flake_issue'] = flake_issue0_dict

        self.flake4_dict = self.flake4.to_dict()

        self.flake5_dict = self.flake5.to_dict()
        self.flake5_dict['flake_issue'] = flake_issue0_dict

        self.flake6_dict = self.flake6.to_dict()
        self.flake6_dict['flake_issue'] = flake_issue0_dict

        for data, flake in ((self.flake1_dict, self.flake1),
                            (self.flake3_dict, self.flake3), (self.flake4_dict,
                                                              self.flake4),
                            (self.flake5_dict, self.flake5), (self.flake6_dict,
                                                              self.flake6)):
            data['flake_urlsafe_key'] = flake.key.urlsafe()
            data['time_delta'] = '1 day, 01:00:00'
            data['flake_counts_last_week'] = [{
                'flake_type': 'cq false rejection',
                'impacted_cl_count': 0,
                'occurrence_count': 0
            }, {
                'flake_type': 'cq step level retry',
                'impacted_cl_count': 0,
                'occurrence_count': 0
            }, {
                'flake_type': 'cq hidden flake',
                'impacted_cl_count': 0,
                'occurrence_count': 0
            }, {
                'flake_type': 'ci failed step',
                'impacted_cl_count': 0,
                'occurrence_count': 0
            }]
Beispiel #14
0
 def testGetLinkForProdIssue(self, _):
     monorail_project = 'chromium'
     issue_id = 12345
     self.assertEqual(
         'https://monorail-prod.appspot.com/p/chromium/issues/detail?id=12345',
         FlakeIssue.GetLinkForIssue(monorail_project, issue_id))