def UpdateFlakeCounts(): """Updates flakes periodically on statistical fields. Currently we only have weekly counts to update. Later we may also maintain daily or monthly counts. """ start_date = time_util.GetDateDaysBeforeNow(days=constants.DAYS_IN_A_WEEK) _UpdateCountsForOldFlake(start_date) _UpdateCountsForNewFlake(start_date)
def _FetchFlakeOccurrences(flake, flake_type, max_occurrence_count): """Fetches flake occurrences of a certain type within a time range. Args: flake(Flake): Flake object for a flaky test. flake_type(FlakeType): Type of the occurrences. max_occurrence_count(int): Maximum number of occurrences to fetch. Returns: (list): A list of occurrences. """ start_date = time_util.GetDateDaysBeforeNow(days=constants.DAYS_IN_A_WEEK) occurrences_query = FlakeOccurrence.query(ancestor=flake.key).filter( ndb.AND(FlakeOccurrence.flake_type == flake_type, FlakeOccurrence.time_happened > start_date)).order(-FlakeOccurrence.time_happened) return occurrences_query.fetch(max_occurrence_count)
def IsFlakeIssueActionable(flake_issue): """Checks if the flake_issue is actionable. If the issue has been closed on monorail over 7 days, the flake_issue is not actionable. """ is_bug_stale = (flake_issue.status in issue_constants.CLOSED_STATUSES_NO_DUPLICATE and flake_issue.last_updated_time_in_monorail and flake_issue.last_updated_time_in_monorail < time_util.GetDateDaysBeforeNow(days=7)) if is_bug_stale: logging.info( 'Flake issue %d has been closed over 7 days, don\'t take action on it.', flake_issue.issue_id) return False return True
def IsIssueClosedWithinAWeek(issue): """Checks if a monorail issue is closed (excepted Merged) within a week.""" return (issue.status in issue_constants.CLOSED_STATUSES_NO_DUPLICATE and issue.closed > time_util.GetDateDaysBeforeNow(DAYS_IN_A_WEEK))
def testGetDateDaysFromNowBefore(self, _): self.assertEqual(datetime(2017, 4, 26, 8, 0, 0), time_util.GetDateDaysBeforeNow(days=1))
def _UpdateTestLocationAndTags(flake, occurrences, component_mapping, watchlists): """Updates the test location and tags of the given flake. Currently only support gtests and webkit layout tests in chromium/src. Returns: True if flake is updated; otherwise False. """ chromium_tag = 'gerrit_project::chromium/src' if chromium_tag not in flake.tags: logging.debug('Flake is not from chromium/src: %r', flake) return False # No need to update if the test location and related tags were updated within # the last 7 days. if (flake.last_test_location_based_tag_update_time and (flake.last_test_location_based_tag_update_time > time_util.GetDateDaysBeforeNow(7))): logging.debug('Flake test location tags were updated recently : %r', flake) return False # Update the test definition location, and then components/tags, etc. test_location = None if 'webkit_layout_tests' in occurrences[0].step_ui_name: # For Webkit layout tests, assume that the normalized test name is # the directory name. # TODO(crbug.com/835960): use new location third_party/blink/web_tests. test_location = TestLocation( file_path=_NormalizePath('third_party/blink/web_tests/%s' % flake.normalized_test_name)) elif test_name_util.GTEST_REGEX.match(flake.normalized_test_name): # For Gtest, we read the test location from the output.json test_location = _GetTestLocation(occurrences[0]) updated = False # Ignore old test-location-based tags. all_tags = set([ t for t in (flake.tags or []) if not t.startswith(('watchlist::', 'directory::', 'source::', 'parent_component::', 'component::')) ]) if test_location: updated = True flake.test_location = test_location file_path = test_location.file_path # Use watchlist to set the watchlist tags for the flake. for watchlist, pattern in watchlists.iteritems(): if re.search(pattern, file_path): all_tags.add('watchlist::%s' % watchlist) component = None # Use test file path to find the best matched component in the mapping. # Each parent directory will become a tag. index = len(file_path) while index > 0: index = file_path.rfind('/', 0, index) if index > 0: if not component and file_path[0:index + 1] in component_mapping: component = component_mapping[file_path[0:index + 1]] all_tags.add('directory::%s' % file_path[0:index + 1]) all_tags.add('source::%s' % file_path) if component: flake.component = component all_tags.add('component::%s' % component) all_tags.add('parent_component::%s' % component) index = len(component) while index > 0: index = component.rfind('>', 0, index) if index > 0: all_tags.add('parent_component::%s' % component[0:index]) else: flake.component = DEFAULT_COMPONENT all_tags.add('component::%s' % DEFAULT_COMPONENT) all_tags.add('parent_component::%s' % DEFAULT_COMPONENT) flake.tags = sorted(all_tags) flake.last_test_location_based_tag_update_time = time_util.GetUTCNow() else: if flake.normalized_step_name == 'telemetry_gpu_integration_test': # Special case for telemetry_gpu_integration_test. components = [] for occurrence in occurrences: canonical_step_name = step_util.GetCanonicalStepName( master_name=occurrence.build_configuration. legacy_master_name, builder_name=occurrence.build_configuration.luci_builder, build_number=occurrence.build_configuration. legacy_build_number, step_name=occurrence.step_ui_name ) or occurrence.step_ui_name.split()[0] components.extend( _MAP_STEP_NAME_TO_COMPONENTS.get(canonical_step_name, [])) components = list(set(components)) # To remove duplicates. if components: flake.component = components[0] all_tags = all_tags.union( set([ 'component::%s' % component for component in components ])) flake.tags = sorted(all_tags) updated = True return updated
def Report(save_test_report=False): """Creates report data for a given week. Iterates over flakes have happened in the week, and adds information we can directly get from flakes (component, test name, occurrences_counts in the week) to the counters. Then iterates all flake occurrences happened in the week (using projection query to lower latency and cost) to count distinct impacted CLs. After the totals are accummulated, persists the entities to datastore. Args: save_test_report (bool): True if save TestFlakinessReport entries, otherwise False. Noted: an error "too much contention on these datastore entities" may fire when also save TestFlakinessReport entries. """ report_date = time_util.GetMidnight( time_util.GetDateDaysBeforeNow(days=DAYS_IN_A_WEEK)) if TotalFlakinessReport.Get(report_date, DEFAULT_LUCI_PROJECT): raise ReportExistsException('Report already exist for {}'.format( time_util.FormatDatetime(report_date, day_only=True))) # TODO(crbug.com/920926): Use classes instead of a big dict for temp data. # Data structure to accumulate the counts in. # After tallying, should look something like this: # { # 'chromium': { # '_id': '2018-08-27', # # Totals # '_bugs': set([FlakeIssue.key, ...]), # '_impacted_cls': { # FlakeType.CQ_FALSE_REJECTION: set([12345, 12346, ...]), # FlakeType.RETRY_WITH_PATCH: set([12348, ...]) # }, # '_occurrences': { # FlakeType.CQ_FALSE_REJECTION: 100, # FlakeType.RETRY_WITH_PATCH: 1800}, # '_tests': set(['test1', ...]), # 'component1': { # # Per-component Totals # '_id': 'component1', # '_bugs': set([FlakeIssue.key, ...]), # '_impacted_cls': { # FlakeType.CQ_FALSE_REJECTION: set([12345, 12346, ...]), # FlakeType.RETRY_WITH_PATCH: set([12348, ...]) # }, # '_occurrences': { # FlakeType.CQ_FALSE_REJECTION: 10, # FlakeType.RETRY_WITH_PATCH: 100}, # '_tests': set(['test1', ...]), # 'test1': { # # Per-(component/test) Totals # '_id': 'test1', # '_bugs': set([FlakeIssue.key, ...]), # '_impacted_cls': { # FlakeType.CQ_FALSE_REJECTION: set([12345]), # FlakeType.RETRY_WITH_PATCH: set([12348]) # }, # '_occurrences': { # FlakeType.CQ_FALSE_REJECTION: 1, # FlakeType.RETRY_WITH_PATCH: 18}, # '_tests': set(['test1']), # }, ...<more tests> # }, ...<more components> # }, # ... <more project> # } counters = {} # A dict with key as each flake's ndb key and value as each flake's component # and normalized_test_name. flake_info_dict = {} end = report_date + datetime.timedelta(days=7) _AddFlakesToCounters(counters, flake_info_dict, report_date, save_test_report) _AddDistinctCLsToCounters(counters, flake_info_dict, report_date, end, save_test_report) SaveReportToDatastore(counters, report_date, save_test_report)