Example #1
0
 def testGetPagedResultsForDirectionPrevious(self):
   """Tests getting previous page."""
   entities_on_page1, _, bottom_cursor1 = dashboard_util.GetPagedResults(
       Entity.query(), [(Entity.time, 'desc')], direction='next', page_size=1)
   _, top_cursor2, _ = dashboard_util.GetPagedResults(
       Entity.query(), [(Entity.time, 'desc')],
       cursor=bottom_cursor1,
       direction='next',
       page_size=1)
   back_to_page1_entities, _, _ = dashboard_util.GetPagedResults(
       Entity.query(), [(Entity.time, 'desc')],
       cursor=top_cursor2,
       direction='previous',
       page_size=1)
   self.assertListEqual(entities_on_page1, back_to_page1_entities)
Example #2
0
 def testGetPagedResultsForDirectionNext(self):
   """Tests getting next page."""
   entities, _, _ = dashboard_util.GetPagedResults(
       Entity.query(),
       [(Entity.time, 'desc')], direction='next', page_size=1)
   self.assertEqual(len(entities), 1)
   self.assertEqual(entities, [self.entities[2]])
Example #3
0
def _GetDisabledTestsQueryResults(cursor, direction, page_size):
  """Gets queried results of disabled tests.

  Args:
    cursor (None or str): The cursor provides a cursor in the current query
      results, allowing you to retrieve the next set based on the offset.
    direction (str): Either previous or next.
    page_size (int): Number of entities to show per page.

  Returns:
    A tuple of (disabled_tests, prev_cursor, cursor).
    disabled_tests ([LuciTest]): List of disabled_tests to be displayed at the
      current page.
    prev_cursor (str): The urlsafe encoding of the cursor, which is at the
      top position of entities of the current page.
    cursor (str): The urlsafe encoding of the cursor, which is at the
      bottom position of entities of the current page.
  """

  disabled_tests_query = LuciTest.query(LuciTest.disabled == True)  # pylint: disable=singleton-comparison

  return dashboard_util.GetPagedResults(
      disabled_tests_query,
      order_properties=[
          (LuciTest.last_updated_time, dashboard_util.DESC),
          (LuciTest.normalized_test_name, dashboard_util.ASC),
      ],
      cursor=cursor,
      direction=direction,
      page_size=page_size)
Example #4
0
def _GetFlakeQueryResults(luci_project, cursor, direction, page_size):
    """Gets queried results of flakes.

  Args:
    luci_project (str): Luci project of the flakes.
    cursor (None or str): The cursor provides a cursor in the current query
      results, allowing you to retrieve the next set based on the offset.
    direction (str): Either previous or next.
    page_size (int): Number of entities to show per page.

  Returns:
    A tuple of (flakes, prev_cursor, cursor).
    flakes (list): List of flakes to be displayed at the current page.
    prev_cursor (str): The urlsafe encoding of the cursor, which is at the
      top position of entities of the current page.
    cursor (str): The urlsafe encoding of the cursor, which is at the
      bottom position of entities of the current page.
  """
    flake_query = Flake.query(
        ndb.AND(Flake.luci_project == luci_project, Flake.archived == False))  # pylint: disable=singleton-comparison
    # Orders flakes by flake_score_last_week.
    flake_query = flake_query.filter(Flake.flake_score_last_week > 0)
    first_sort_property = Flake.flake_score_last_week

    return dashboard_util.GetPagedResults(
        flake_query,
        order_properties=[
            (first_sort_property, dashboard_util.DESC),
            (Flake.last_occurred_time, dashboard_util.DESC),
            (Flake.normalized_step_name, dashboard_util.ASC),
            (Flake.test_label_name, dashboard_util.ASC),
        ],
        cursor=cursor,
        direction=direction,
        page_size=page_size)
Example #5
0
 def testFetchPreviousPages(self):
     query = _GetFlakeAnalysisFilterQuery(MasterFlakeAnalysis.query())
     result2, _, _ = dashboard_util.GetPagedResults(
         query, [(MasterFlakeAnalysis.request_time, dashboard_util.DESC)],
         cursor=self.cursor,
         direction='previous')
     self.assertEqual(len(result2), 1)
     self.assertEqual(self.results, result2)
Example #6
0
  def HandleGet(self):
    """Shows crash analysis results in an HTML page."""
    start_date, end_date = dashboard_util.GetStartAndEndDates(
        self.request.get('start_date'), self.request.get('end_date'))

    query = self.Filter(start_date, end_date)

    try:
      page_size = int(self.request.get('n'))
    except (ValueError, TypeError):
      page_size = _PAGE_SIZE

    crash_analyses, top_cusor, bottom_cursor = dashboard_util.GetPagedResults(
        query,
        [(self.crash_analysis_cls.requested_time, dashboard_util.DESC)],
        cursor=self.request.get('cursor'),
        direction=self.request.get('direction', 'next'), page_size=page_size)

    # TODO(katesonia): An optimization is to index analysis.status.
    crash_analyses = [analysis for analysis in crash_analyses
                      if analysis.completed]
    data = {
        'start_date': time_util.FormatDatetime(start_date),
        'end_date': time_util.FormatDatetime(end_date),
        'found_suspects': self.request.get('found_suspects', '-1'),
        'has_regression_range': self.request.get('has_regression_range', '-1'),
        'suspected_cls_triage_status': self.request.get(
            'suspected_cls_triage_status', '-1'),
        'regression_range_triage_status': self.request.get(
            'regression_range_triage_status', '-1'),
        'client': self.client,
        'crashes': self.CrashDataToDisplay(crash_analyses),
        'signature': self.request.get('signature'),
        'top_cursor': top_cusor,
        'bottom_cursor': bottom_cursor,
    }

    return {
        'template': self.template,
        'data': data
    }
Example #7
0
 def _MockCursor(self):
     results, prev_cursor, cursor = dashboard_util.GetPagedResults(
         MasterFlakeAnalysis.query(),
         [(MasterFlakeAnalysis.request_time, dashboard_util.DESC)],
         page_size=1)
     return results, prev_cursor, cursor
Example #8
0
def GetFlakesByFilter(flake_filter,
                      luci_project,
                      cursor,
                      direction,
                      page_size=None):  # pragma: no cover.
    """Gets flakes by the given filter, then sorts them by the flake score.

  Args:
    flake_filter (str): It could be a test name, or a tag-based filter in the
      following forms:
      * tag::value
      * tag1::value1@tag2::value2
      * tag1::value1@-tag2:value2
    luci_project (str): The Luci project that the flakes are for.
    cursor (None or str): The cursor provides a cursor in the current query
      results, allowing you to retrieve the next set based on the offset.
    direction (str): Either previous or next.
    page_size (int): Limit of results required in one page.

  Returns:
    (flakes, prev_cursor, cursor, grouping_search, error_message)
    flakes (list): A list of Flakes filtered by tags.
    prev_cursor (str): The urlsafe encoding of the cursor, which is at the
      top position of entities of the current page.
    cursor (str): The urlsafe encoding of the cursor, which is at the
      bottom position of entities of the current page.
    grouping_search (bool): Whether it is a group searching.
    error_message (str): An error message if there is one; otherwise None.
  """
    logging.info('Searching filter: %s', flake_filter)

    flakes = []
    error_message = None

    grouping_search = True
    filters = [f.strip() for f in flake_filter.split('@') if f.strip()]

    # The resulted flakes are those:
    # * Match all of positive filters
    # * Not match any of negative filters
    positive_filters = []
    negative_filters = []
    invalid_filters = []
    for f in filters:
        parts = [p.strip() for p in f.split(TAG_DELIMITER)]
        if len(parts) != 2 or not parts[1]:
            invalid_filters.append(f)
            continue

        if parts[0] == _TEST_FILTER_NAME:
            # Search for a specific test.
            grouping_search = False
            flakes = Flake.query(
                Flake.normalized_test_name == Flake.NormalizeTestName(parts[1])
            ).filter(Flake.luci_project == luci_project).fetch()
            return flakes, '', '', grouping_search, error_message

        negative = False
        if parts[0][0] == '-':
            parts[0] = parts[0][1:]
            negative = True

        if parts[0] not in SUPPORTED_TAGS:
            invalid_filters.append(f)
            continue

        if negative:
            negative_filters.append(TAG_DELIMITER.join(parts))
        else:
            positive_filters.append(TAG_DELIMITER.join(parts))

    if invalid_filters:
        error_message = 'Unsupported tag filters: %s' % ', '.join(
            invalid_filters)
        return flakes, '', '', grouping_search, error_message

    if not positive_filters:
        # At least one positive filter should be given.
        error_message = 'At least one positive filter required'
        return flakes, '', '', grouping_search, error_message

    logging.info('Positive filters: %r', positive_filters)
    logging.info('Negative filters: %r', negative_filters)

    query = Flake.query(
        ndb.AND(Flake.luci_project == luci_project, Flake.archived == False))  # pylint: disable=singleton-comparison
    for tag in positive_filters:
        query = query.filter(Flake.tags == tag)
    query = query.filter(Flake.flake_score_last_week > 0)
    minimum_flake_count_in_page = max(
        1, page_size / 2) if page_size else DEFAULT_PAGE_SIZE / 2

    while True:
        results, prev_cursor, cursor = dashboard_util.GetPagedResults(
            query,
            order_properties=[
                (Flake.flake_score_last_week, dashboard_util.DESC),
                (Flake.last_occurred_time, dashboard_util.DESC),
                (Flake.normalized_step_name, dashboard_util.ASC),
                (Flake.test_label_name, dashboard_util.ASC),
            ],
            cursor=cursor,
            direction=direction,
            page_size=page_size or DEFAULT_PAGE_SIZE)

        for result in results:
            if negative_filters and any(t in result.tags
                                        for t in negative_filters):
                continue
            flakes.append(result)

        if ((direction == dashboard_util.PREVIOUS and prev_cursor == '')
                or cursor == '' or len(flakes) >= minimum_flake_count_in_page):
            # No more results or gets enough flakes on a page.
            # Ideally we expect the page shows the same amount of flakes as the
            # page_size suggests, but in the case with negative_filters, the number of
            # flakes left after filtering out negative_filters is unknown.
            # Uses minimum_flake_count_in_page to cap the flake count in one page from
            # 0.5 page_size to 1.5 page_size.
            break

    return flakes, prev_cursor, cursor, grouping_search, error_message
Example #9
0
    def HandleGet(self):
        status_code = int(
            self.request.get('result_status', result_status.UNSPECIFIED))
        step_name = self.request.get('step_name').strip()
        test_name = self.request.get('test_name').strip()
        triage = self.request.get('triage') == '1'

        # Only allow querying by start/end dates for admins during triage to avoid
        # overcomplicating the UI for other users.
        start_date, end_date = self._GetStartAndEndDates(triage)

        master_flake_analysis_query = _GetFlakeAnalysisFilterQuery(
            MasterFlakeAnalysis.query(), step_name, test_name, start_date,
            end_date, status_code)

        # If filters by step_name and/or test_name, don't do paging.
        if step_name or test_name:
            analyses = master_flake_analysis_query.order(
                -MasterFlakeAnalysis.request_time).fetch()
            prev_cursor = ''
            cursor = ''
        else:
            analyses, prev_cursor, cursor = dashboard_util.GetPagedResults(
                master_flake_analysis_query,
                MasterFlakeAnalysis.request_time,
                self.request.get('cursor'),
                self.request.get('direction').strip(),
                page_size=PAGE_SIZE)

        data = {
            'master_flake_analyses': [],
            'result_status_filter': status_code,
            'step_name_filter': step_name,
            'test_name_filter': test_name,
            'prev_cursor': prev_cursor,
            'cursor': cursor,
        }

        if triage:  # pragma: no cover
            data['triage'] = triage
            data['start_date'] = start_date
            data['end_date'] = end_date

        for master_flake_analysis in analyses:
            data['master_flake_analyses'].append({
                'build_analysis_status':
                master_flake_analysis.status_description,
                'build_number':
                master_flake_analysis.build_number,
                'builder_name':
                master_flake_analysis.builder_name,
                'confidence_in_suspected_build':
                (master_flake_analysis.confidence_in_suspected_build),
                'culprit': (master_flake_analysis.culprit.ToDict()
                            if master_flake_analysis.culprit else {}),
                'key':
                master_flake_analysis.key.urlsafe(),
                'master_name':
                master_flake_analysis.master_name,
                'request_time':
                time_util.FormatDatetime(master_flake_analysis.request_time),
                'result_status':
                result_status.RESULT_STATUS_TO_DESCRIPTION.get(
                    master_flake_analysis.result_status),
                'step_name':
                master_flake_analysis.step_name,
                'suspected_build':
                master_flake_analysis.suspected_flake_build_number,
                'test_name':
                master_flake_analysis.test_name,
                'try_job_status':
                analysis_status.STATUS_TO_DESCRIPTION.get(
                    master_flake_analysis.try_job_status),
            })

        return {'template': 'flake/dashboard.html', 'data': data}
Example #10
0
  def HandleGet(self):
    status_code = int(
        self.request.get('result_status', result_status.UNSPECIFIED))
    step_name = self.request.get('step_name').strip()
    test_name = self.request.get('test_name').strip()
    triage = self.request.get('triage') == '1'

    # Only allow querying by start/end dates for admins during triage to avoid
    # overcomplicating the UI for other users.
    start_date, end_date = self._GetStartAndEndDates(triage)

    master_flake_analysis_query = _GetFlakeAnalysisFilterQuery(
        MasterFlakeAnalysis.query(), step_name, test_name, start_date, end_date,
        status_code)

    # If filters by step_name and/or test_name, don't do paging.
    if step_name or test_name:
      analyses = master_flake_analysis_query.order(
          -MasterFlakeAnalysis.request_time).fetch()
      prev_cursor = ''
      cursor = ''
    else:
      analyses, prev_cursor, cursor = dashboard_util.GetPagedResults(
          master_flake_analysis_query,
          [(MasterFlakeAnalysis.request_time, dashboard_util.DESC)],
          self.request.get('cursor'),
          self.request.get('direction').strip(),
          page_size=PAGE_SIZE)

    data = {
        'master_flake_analyses': [],
        'result_status_filter': status_code,
        'step_name_filter': step_name,
        'test_name_filter': test_name,
        'prev_cursor': prev_cursor,
        'cursor': cursor,
    }

    if triage:  # pragma: no cover
      data['triage'] = triage
      data['start_date'] = start_date
      data['end_date'] = end_date

    for analysis in analyses:
      culprit = None
      if analysis.culprit_urlsafe_key:
        culprit_key = ndb.Key(urlsafe=analysis.culprit_urlsafe_key)
        # TODO(crbug.com/799308): Remove this hack when bug is fixed.
        assert culprit_key.pairs()[0]
        assert culprit_key.pairs()[0][0]  # Name of the model.
        assert culprit_key.pairs()[0][1]  # Id of the model.
        culprit = ndb.Key(culprit_key.pairs()[0][0],
                          culprit_key.pairs()[0][1]).get()

      status = analysis.status
      if analysis.heuristic_analysis_status == analysis_status.ERROR:
        status = analysis_status.ERROR

      timestamp = (
          time_util.ConvertToTimestamp(analysis.request_time)
          if analysis.request_time else 'None')

      data['master_flake_analyses'].append({
          'master_name':
              analysis.original_master_name or analysis.master_name,
          'builder_name':
              analysis.original_builder_name or analysis.builder_name,
          'build_number':
              analysis.original_build_number or analysis.build_number,
          'step_name':
              analysis.original_step_name or analysis.step_name,
          'test_name':
              analysis.original_test_name or analysis.test_name,
          'bug_id':
              analysis.bug_id,
          'confidence_in_culprit':
              analysis.confidence_in_culprit,
          'culprit':
              culprit.to_dict() if culprit else {},
          'key':
              analysis.key.urlsafe(),
          'request_utc_timestamp':
              timestamp,
          'result_status':
              result_status.RESULT_STATUS_TO_DESCRIPTION.get(
                  analysis.result_status),
          'suspected_build':
              analysis.suspected_flake_build_number,
          'status':
              analysis_status.STATUS_TO_DESCRIPTION.get(status),
      })

    return {'template': 'flake/dashboard.html', 'data': data}