def test_delete_old_flakes(self): # Create old FlakyRuns. now = datetime.datetime.utcnow() old_flakes = [] for i in range(1, 101): # id can not be 0 key = FlakyRun(key=ndb.Key('FlakyRun', i), failure_run_time_finished=now - datetime.timedelta(days=100 + i), failure_run=ndb.Key('BuildRun', 1), success_run=ndb.Key('BuildRun', 2)).put() old_flakes.append(key) # Create new FlakyRuns. new_flakes = [] for i in range(101, 201): key = FlakyRun(key=ndb.Key('FlakyRun', i), failure_run_time_finished=now - datetime.timedelta(hours=100 + i), failure_run=ndb.Key('BuildRun', 1), success_run=ndb.Key('BuildRun', 2)).put() new_flakes.append(key) non_existant_flake = [ndb.Key('FlakyRun', '201')] # Create Flakes. Flake(key=ndb.Key('Flake', 'foo'), name='foo', occurrences=old_flakes + new_flakes, last_time_seen=now).put() Flake(key=ndb.Key('Flake', 'bar'), name='bar', occurrences=old_flakes + new_flakes[:50], last_time_seen=now).put() Flake(key=ndb.Key('Flake', 'baz'), name='baz', occurrences=non_existant_flake + new_flakes, last_time_seen=now).put() path = '/cron/delete_old_flake_occurrences' response = self.test_app.get(path, headers={'X-AppEngine-Cron': 'true'}) self.assertEqual(200, response.status_int) # Removed old flakes. self.assertEqual(set(Flake.get_by_id('foo').occurrences), set(new_flakes)) # Kept old flakes since there are just 50 new flakes. self.assertEqual(len(Flake.get_by_id('bar').occurrences), 150) # Make sure that non existant flake got removed. self.assertNotIn(non_existant_flake, Flake.get_by_id('bar').occurrences) # Make sure that we do not delete any FlakyRun entities. self.assertEqual(FlakyRun.query().count(limit=300), 200)
def add_failure_to_flake(name, flaky_run_key, failure_time): flake = Flake.get_by_id(name) if not flake: flake = Flake(name=name, id=name, last_time_seen=datetime.datetime.min) flake.put() flake.occurrences.append(flaky_run_key) util.add_occurrence_time_to_flake(flake, failure_time) flake.put()
def add_failure_to_flake(name, flaky_run): flake = Flake.get_by_id(name) if not flake: flake = Flake(name=name, id=name, last_time_seen=datetime.datetime.min) flake.put() flake.occurrences.append(flaky_run.key) flaky_run_time = flaky_run.failure_run.get().time_finished add_occurance_time_to_flake(flake, flaky_run_time) flake.put()
def add_failure_to_flake(name, flaky_run_key, failure_time, is_step): flake = Flake.get_by_id(name) if not flake: flake = Flake(name=name, id=name, last_time_seen=datetime.datetime.min, is_step=is_step) flake.put() flake.occurrences.append(flaky_run_key) # TODO(sergiyb): This is necessary to update existing flakes. Remove in July # 2016 or later. flake.is_step = is_step util.add_occurrence_time_to_flake(flake, failure_time) flake.put()
def is_duplicate_occurrence(flake_id, flaky_run): """Returns true if the given flaky run has already been reported.""" flake = Flake.get_by_id(flake_id) if not flake: return False # Get the changelist/patchset. patchset_builder_runs = flaky_run.failure_run.parent().get() changelist_issue = patchset_builder_runs.issue builder = patchset_builder_runs.builder # Compare the changelist/patchset for uniqueness. for occurrence in ndb.get_multi(flake.occurrences): # Skip null occurrences or occurrences without a failure run. if not occurrence or not occurrence.failure_run: # pragma: no cover continue n_patchset_builder_runs = occurrence.failure_run.parent().get() if (n_patchset_builder_runs.issue == changelist_issue and n_patchset_builder_runs.builder == builder): return True return False