def test_invalidates_snooze_with_buffers(self, mock_processor, send_robust): redis_buffer = RedisBuffer() with mock.patch("sentry.buffer.get", redis_buffer.get), mock.patch("sentry.buffer.incr", redis_buffer.incr): event = self.store_event(data={ "message": "testing", "fingerprint": ["group-1"] }, project_id=self.project.id) event_2 = self.store_event(data={ "message": "testing", "fingerprint": ["group-1"] }, project_id=self.project.id) group = event.group group.update(times_seen=50) snooze = GroupSnooze.objects.create(group=group, count=100, state={"times_seen": 0}) cache_key = write_event_to_cache(event) post_process_group( is_new=False, is_regression=False, is_new_group_environment=True, cache_key=cache_key, group_id=event.group_id, ) assert GroupSnooze.objects.filter(id=snooze.id).exists() cache_key = write_event_to_cache(event_2) buffer.incr(Group, {"times_seen": 60}, filters={"pk": event.group.id}) post_process_group( is_new=False, is_regression=False, is_new_group_environment=True, cache_key=cache_key, group_id=event.group_id, ) assert not GroupSnooze.objects.filter(id=snooze.id).exists()
def test_rule_processor_buffer_values(self): # Test that pending buffer values for `times_seen` are applied to the group and that alerts # fire as expected from sentry.models import Rule MOCK_RULES = ( "sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter", ) redis_buffer = RedisBuffer() with mock.patch("sentry.buffer.get", redis_buffer.get), mock.patch( "sentry.buffer.incr", redis_buffer.incr), patch( "sentry.constants._SENTRY_RULES", MOCK_RULES), patch("sentry.rules.processor.rules", init_registry()) as rules: MockAction = mock.Mock() MockAction.rule_type = "action/event" MockAction.id = "tests.sentry.tasks.post_process.tests.MockAction" MockAction.return_value.after.return_value = [] rules.add(MockAction) conditions = [ { "id": "sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter", "value": 10, }, ] actions = [{ "id": "tests.sentry.tasks.post_process.tests.MockAction" }] Rule.objects.filter(project=self.project).delete() Rule.objects.create(project=self.project, data={ "conditions": conditions, "actions": actions }) event = self.store_event(data={ "message": "testing", "fingerprint": ["group-1"] }, project_id=self.project.id) event_2 = self.store_event(data={ "message": "testing", "fingerprint": ["group-1"] }, project_id=self.project.id) cache_key = write_event_to_cache(event) post_process_group( is_new=True, is_regression=False, is_new_group_environment=True, cache_key=cache_key, group_id=event.group_id, ) event.group.update(times_seen=2) assert MockAction.return_value.after.call_count == 0 cache_key = write_event_to_cache(event_2) buffer.incr(Group, {"times_seen": 15}, filters={"pk": event.group.id}) post_process_group( is_new=True, is_regression=False, is_new_group_environment=True, cache_key=cache_key, group_id=event_2.group_id, ) assert MockAction.return_value.after.call_count == 1
def test_default_host_is_local(self): buf = RedisBuffer() self.assertEquals(len(buf.conn.hosts), 1) self.assertEquals(buf.conn.hosts[0].host, 'localhost')
def setUp(self): self.buf = RedisBuffer(hosts={0: {'db': 9}}) self.buf.conn.flushdb()
def setUp(self): self.buf = RedisBuffer()
def setUp(self): self.buf = RedisBuffer(hosts={0: {'db': 9}})