Ejemplo n.º 1
0
def _GetFlakeQueryResults(luci_project, cursor, direction, page_size):
    """Gets queried results of flakes.

  Args:
    luci_project (str): Luci project of the flakes.
    cursor (None or str): The cursor provides a cursor in the current query
      results, allowing you to retrieve the next set based on the offset.
    direction (str): Either previous or next.
    page_size (int): Number of entities to show per page.

  Returns:
    A tuple of (flakes, prev_cursor, cursor).
    flakes (list): List of flakes to be displayed at the current page.
    prev_cursor (str): The urlsafe encoding of the cursor, which is at the
      top position of entities of the current page.
    cursor (str): The urlsafe encoding of the cursor, which is at the
      bottom position of entities of the current page.
  """
    flake_query = Flake.query(
        ndb.AND(Flake.luci_project == luci_project, Flake.archived == False))  # pylint: disable=singleton-comparison
    # Orders flakes by flake_score_last_week.
    flake_query = flake_query.filter(Flake.flake_score_last_week > 0)
    first_sort_property = Flake.flake_score_last_week

    return dashboard_util.GetPagedResults(
        flake_query,
        order_properties=[
            (first_sort_property, dashboard_util.DESC),
            (Flake.last_occurred_time, dashboard_util.DESC),
            (Flake.normalized_step_name, dashboard_util.ASC),
            (Flake.test_label_name, dashboard_util.ASC),
        ],
        cursor=cursor,
        direction=direction,
        page_size=page_size)
Ejemplo n.º 2
0
def _ArchiveFlakesForClosedIssue(flake_issue):
    """Archives flakes with closed issue.

  Flakes with closed issue should be archived since they are fixed or not
  actionable.
  """
    flakes = Flake.query(Flake.flake_issue_key == flake_issue.key).fetch()
    for flake in flakes:
        flake.archived = True
    ndb.put_multi(flakes)
Ejemplo n.º 3
0
    def testCreate(self):
        luci_project = 'chromium'
        normalized_step_name = 'normalized_step'
        normalized_test_name = 'normalized_test'
        test_label_name = 'test_label'

        flake = Flake.Create(luci_project=luci_project,
                             normalized_step_name=normalized_step_name,
                             normalized_test_name=normalized_test_name,
                             test_label_name=test_label_name)

        flake.put()

        fetched_flakes = Flake.query().fetch()
        self.assertEqual(1, len(fetched_flakes))
        self.assertEqual(flake, fetched_flakes[0])
Ejemplo n.º 4
0
    def HandleGet(self):
        total = self.request.get('total').strip()
        component = self.request.get('component').strip()
        luci_project = self.request.get(
            'luci_project').strip() or DEFAULT_LUCI_PROJECT
        if not component and not total:
            return self.CreateError(
                'A component is required to show its flake report, or add total=1 to '
                'show total numbers.',
                return_code=404)

        if component:
            component_reports = _QueryComponentReports(component, luci_project)
            if not component_reports:
                return self.CreateError(
                    'Didn\'t find reports for project {}, component {}.'.
                    format(luci_project, component),
                    return_code=404)
            report_json = _GenerateComponentReportJson(component_reports)
            top_flakes, _, _, _, _ = flake_detection_utils.GetFlakesByFilter(
                ComponentFlakinessReport.GenerateTag('component', component),
                luci_project,
                cursor=None,
                direction='next',
                page_size=_DEFAULT_TOP_FLAKE_NUM)

        else:
            total_reports = _QueryTotalReports(luci_project)
            report_json = _GenerateComponentReportJson(total_reports)
            top_flakes = Flake.query().order(
                -Flake.flake_score_last_week).fetch(_DEFAULT_TOP_FLAKE_NUM)

        flakes_data = flake_detection_utils.GenerateFlakesData(top_flakes)

        data = {
            'report_json':
            report_json,
            'component':
            component if component else 'All',
            'top_flakes':
            flakes_data,
            'total':
            total,
            'luci_project':
            (luci_project if luci_project != DEFAULT_LUCI_PROJECT else ''),
        }
        return {'template': 'flake/report/component_report.html', 'data': data}
def _UpdateCountsForNewFlake(start_date):
    """Updates counts for new, re-occurred or rare flakes.

  Args:
    start_date(datetime): Earliest time to check.
  """
    more = True
    cursor = None

    while more:
        ndb.get_context().clear_cache()
        flakes, cursor, more = Flake.query().filter(
            Flake.last_occurred_time > start_date).filter(
                Flake.flake_score_last_week == 0).fetch_page(
                    100, start_cursor=cursor)
        for flake in flakes:
            _UpdateFlakeCountsAndScore(flake, start_date)

        ndb.put_multi(flakes)
def _UpdateCountsForOldFlake(start_date):
    """Updates counts for old flakes - flakes with score greater than 0.

  a. if the flake has 1+ occurrences within the time range, updates counts.
  b. if the flake didn't occurred within the time range, resets counts.

  Args:
    start_date(datetime): Earliest time to check.
  """
    more = True
    cursor = None

    while more:
        ndb.get_context().clear_cache()
        flakes, cursor, more = Flake.query().filter(
            Flake.flake_score_last_week > 0).fetch_page(100,
                                                        start_cursor=cursor)
        for flake in flakes:
            _UpdateFlakeCountsAndScore(flake, start_date)

        ndb.put_multi(flakes)
Ejemplo n.º 7
0
def _GetFlakesByBug(monorail_project, bug_id):
    """Gets flakes link to the same bug.

  Gets flakes directly link to the bug and also flakes link to bugs that are
    merged into this bug.
  """
    flake_issue = FlakeIssue.Get(monorail_project, bug_id)
    assert flake_issue, 'Requested FlakeIssue {} not found.'.format(bug_id)

    all_issue_keys = [flake_issue.key]
    issue_leaves = FlakeIssue.query(
        FlakeIssue.merge_destination_key == flake_issue.key).fetch(
            keys_only=True)
    all_issue_keys.extend(issue_leaves)

    flakes = []
    for issue_key in all_issue_keys:
        flakes_to_issue = Flake.query(
            Flake.flake_issue_key == issue_key).fetch()
        flakes.extend(flakes_to_issue)

    flakes.sort(key=lambda flake: flake.flake_score_last_week, reverse=True)
    return flakes
Ejemplo n.º 8
0
def _AddFlakesToCounters(counters, flake_info_dict, report_time,
                         save_test_report):
    """Queries all flakes that have happened after report_time and adds their info
    to counters.
  """
    query = Flake.query()
    query = query.filter(Flake.last_occurred_time >= report_time)

    cursor = None
    more = True
    while more:
        flakes, cursor, more = query.fetch_page(500, start_cursor=cursor)
        for flake in flakes:
            luci_project = flake.luci_project
            if luci_project not in counters:
                counters[luci_project] = _NewTally(
                    TotalFlakinessReport.MakeId(report_time, luci_project))
            _AddFlakeToTally(counters[luci_project], flake, report_time)

            component = flake.GetComponent()
            test = flake.normalized_test_name
            flake_info_dict[flake.key] = {
                'luci_project': luci_project,
                'component': component,
                'test': test
            }

            if component not in counters[luci_project]:
                counters[luci_project][component] = _NewTally(component)
            _AddFlakeToTally(counters[luci_project][component], flake,
                             report_time)

            if save_test_report:  # pragma: no branch.
                if test not in counters[luci_project][component]:
                    counters[luci_project][component][test] = _NewTally(test)
                _AddFlakeToTally(counters[luci_project][component][test],
                                 flake, report_time)
Ejemplo n.º 9
0
def GetFlakesByFilter(flake_filter,
                      luci_project,
                      cursor,
                      direction,
                      page_size=None):  # pragma: no cover.
    """Gets flakes by the given filter, then sorts them by the flake score.

  Args:
    flake_filter (str): It could be a test name, or a tag-based filter in the
      following forms:
      * tag::value
      * tag1::value1@tag2::value2
      * tag1::value1@-tag2:value2
    luci_project (str): The Luci project that the flakes are for.
    cursor (None or str): The cursor provides a cursor in the current query
      results, allowing you to retrieve the next set based on the offset.
    direction (str): Either previous or next.
    page_size (int): Limit of results required in one page.

  Returns:
    (flakes, prev_cursor, cursor, grouping_search, error_message)
    flakes (list): A list of Flakes filtered by tags.
    prev_cursor (str): The urlsafe encoding of the cursor, which is at the
      top position of entities of the current page.
    cursor (str): The urlsafe encoding of the cursor, which is at the
      bottom position of entities of the current page.
    grouping_search (bool): Whether it is a group searching.
    error_message (str): An error message if there is one; otherwise None.
  """
    logging.info('Searching filter: %s', flake_filter)

    flakes = []
    error_message = None

    grouping_search = True
    filters = [f.strip() for f in flake_filter.split('@') if f.strip()]

    # The resulted flakes are those:
    # * Match all of positive filters
    # * Not match any of negative filters
    positive_filters = []
    negative_filters = []
    invalid_filters = []
    for f in filters:
        parts = [p.strip() for p in f.split(TAG_DELIMITER)]
        if len(parts) != 2 or not parts[1]:
            invalid_filters.append(f)
            continue

        if parts[0] == _TEST_FILTER_NAME:
            # Search for a specific test.
            grouping_search = False
            flakes = Flake.query(
                Flake.normalized_test_name == Flake.NormalizeTestName(parts[1])
            ).filter(Flake.luci_project == luci_project).fetch()
            return flakes, '', '', grouping_search, error_message

        negative = False
        if parts[0][0] == '-':
            parts[0] = parts[0][1:]
            negative = True

        if parts[0] not in SUPPORTED_TAGS:
            invalid_filters.append(f)
            continue

        if negative:
            negative_filters.append(TAG_DELIMITER.join(parts))
        else:
            positive_filters.append(TAG_DELIMITER.join(parts))

    if invalid_filters:
        error_message = 'Unsupported tag filters: %s' % ', '.join(
            invalid_filters)
        return flakes, '', '', grouping_search, error_message

    if not positive_filters:
        # At least one positive filter should be given.
        error_message = 'At least one positive filter required'
        return flakes, '', '', grouping_search, error_message

    logging.info('Positive filters: %r', positive_filters)
    logging.info('Negative filters: %r', negative_filters)

    query = Flake.query(
        ndb.AND(Flake.luci_project == luci_project, Flake.archived == False))  # pylint: disable=singleton-comparison
    for tag in positive_filters:
        query = query.filter(Flake.tags == tag)
    query = query.filter(Flake.flake_score_last_week > 0)
    minimum_flake_count_in_page = max(
        1, page_size / 2) if page_size else DEFAULT_PAGE_SIZE / 2

    while True:
        results, prev_cursor, cursor = dashboard_util.GetPagedResults(
            query,
            order_properties=[
                (Flake.flake_score_last_week, dashboard_util.DESC),
                (Flake.last_occurred_time, dashboard_util.DESC),
                (Flake.normalized_step_name, dashboard_util.ASC),
                (Flake.test_label_name, dashboard_util.ASC),
            ],
            cursor=cursor,
            direction=direction,
            page_size=page_size or DEFAULT_PAGE_SIZE)

        for result in results:
            if negative_filters and any(t in result.tags
                                        for t in negative_filters):
                continue
            flakes.append(result)

        if ((direction == dashboard_util.PREVIOUS and prev_cursor == '')
                or cursor == '' or len(flakes) >= minimum_flake_count_in_page):
            # No more results or gets enough flakes on a page.
            # Ideally we expect the page shows the same amount of flakes as the
            # page_size suggests, but in the case with negative_filters, the number of
            # flakes left after filtering out negative_filters is unknown.
            # Uses minimum_flake_count_in_page to cap the flake count in one page from
            # 0.5 page_size to 1.5 page_size.
            break

    return flakes, prev_cursor, cursor, grouping_search, error_message