def test_throws_when_matches_discarded_hash(self): manager = EventManager(make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32])) with self.tasks(): event = manager.save(1) group = Group.objects.get(id=event.group_id) tombstone = GroupTombstone.objects.create( project_id=group.project_id, level=group.level, message=group.message, culprit=group.culprit, data=group.data, previous_group_id=group.id, ) GroupHash.objects.filter(group=group).update(group=None, group_tombstone_id=tombstone.id) manager = EventManager(make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32])) mock_event_discarded = mock.Mock() event_discarded.connect(mock_event_discarded) mock_event_saved = mock.Mock() event_saved.connect(mock_event_saved) with self.tasks(): with self.assertRaises(HashDiscarded): event = manager.save(1) assert not mock_event_saved.called assert_mock_called_once_with_partial( mock_event_discarded, project=group.project, sender=EventManager, signal=event_discarded )
def test_recursion_breaker(settings, post_event_with_sdk): # If this test terminates at all then we avoided recursion. settings.SENTRY_INGEST_CONSUMER_APM_SAMPLING = 1.0 event_id = uuid.uuid4().hex with mock.patch( "sentry.event_manager.EventManager.save", side_effect=ValueError("oh no!") ) as save: with pytest.raises(ValueError): post_event_with_sdk({"message": "internal client test", "event_id": event_id}) assert_mock_called_once_with_partial(save, settings.SENTRY_PROJECT, cache_key=f"e:{event_id}:1")
def test_event_saved_signal(self): mock_event_saved = mock.Mock() event_saved.connect(mock_event_saved) manager = EventManager(make_event(message="foo")) manager.normalize() event = manager.save(1) assert_mock_called_once_with_partial( mock_event_saved, project=event.group.project, sender=EventManager, signal=event_saved )
def test_recursion_breaker(post_event_with_sdk): # If this test terminates at all then we avoided recursion. with mock.patch("sentry.event_manager.EventManager.save", side_effect=ValueError("oh no!")) as save: with pytest.raises(ValueError): post_event_with_sdk("internal client test") assert_mock_called_once_with_partial(save, settings.SENTRY_PROJECT, cache_key=u"e:{}:1".format( last_event_id()))
def test_event_accepted_outcome(self): manager = EventManager(make_event(message="foo")) manager.normalize() mock_track_outcome = mock.Mock() with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome): manager.save(1) assert_mock_called_once_with_partial( mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.DEFAULT )
def test_accepted_signal(self): mock_event_accepted = Mock() event_accepted.connect(mock_event_accepted) resp = self._postWithHeader({"logentry": {"message": u"hello"}}) assert resp.status_code == 200, resp.content assert_mock_called_once_with_partial(mock_event_accepted, ip="127.0.0.1", project=self.project, signal=event_accepted)
def test_throws_when_matches_discarded_hash(self): manager = EventManager( self.make_event( message='foo', event_id='a' * 32, fingerprint=['a' * 32], ) ) with self.tasks(): event = manager.save(1) group = Group.objects.get(id=event.group_id) tombstone = GroupTombstone.objects.create( project_id=group.project_id, level=group.level, message=group.message, culprit=group.culprit, data=group.data, previous_group_id=group.id, ) GroupHash.objects.filter( group=group, ).update( group=None, group_tombstone_id=tombstone.id, ) manager = EventManager( self.make_event( message='foo', event_id='b' * 32, fingerprint=['a' * 32], ) ) mock_event_discarded = mock.Mock() event_discarded.connect(mock_event_discarded) mock_event_saved = mock.Mock() event_saved.connect(mock_event_saved) with self.tasks(): with self.assertRaises(HashDiscarded): event = manager.save(1) assert not mock_event_saved.called assert_mock_called_once_with_partial( mock_event_discarded, project=group.project, sender=EventManager, signal=event_discarded, )
def test_event_saved_signal(self): mock_event_saved = mock.Mock() event_saved.connect(mock_event_saved) manager = EventManager(self.make_event(message='foo')) manager.normalize() event = manager.save(1) assert_mock_called_once_with_partial( mock_event_saved, project=event.group.project, sender=EventManager, signal=event_saved, )
def test_accepted_signal(self): mock_event_accepted = Mock() event_accepted.connect(mock_event_accepted) resp = self._postWithHeader({'sentry.interfaces.Message': {'message': u'hello'}}) assert resp.status_code == 200, resp.content assert_mock_called_once_with_partial( mock_event_accepted, ip='127.0.0.1', project=self.project, signal=event_accepted, )
def test_dropped_signal(self, mock_is_rate_limited): mock_is_rate_limited.is_limited = True mock_event_dropped = Mock() event_dropped.connect(mock_event_dropped) resp = self._postWithHeader({"logentry": {"message": u"hello"}}) assert resp.status_code == 429, resp.content assert_mock_called_once_with_partial(mock_event_dropped, ip="127.0.0.1", project=self.project, signal=event_dropped)
def test_filtered_signal(self, mock_should_filter): mock_should_filter.return_value = (True, "ip-address") mock_event_filtered = Mock() event_filtered.connect(mock_event_filtered) resp = self._postWithHeader({"logentry": {"message": u"hello"}}) assert resp.status_code == 403, resp.content assert_mock_called_once_with_partial(mock_event_filtered, ip="127.0.0.1", project=self.project, signal=event_filtered)
def test_dropped_signal(self, mock_is_rate_limited): mock_is_rate_limited.is_limited = True mock_event_dropped = Mock() event_dropped.connect(mock_event_dropped) resp = self._postWithHeader({'sentry.interfaces.Message': {'message': u'hello'}}) assert resp.status_code == 429, resp.content assert_mock_called_once_with_partial( mock_event_dropped, ip='127.0.0.1', project=self.project, signal=event_dropped, )
def test_filtered_signal(self, mock_should_filter): mock_should_filter.return_value = (True, 'ip-address') mock_event_filtered = Mock() event_filtered.connect(mock_event_filtered) resp = self._postWithHeader({'sentry.interfaces.Message': {'message': u'hello'}}) assert resp.status_code == 403, resp.content assert_mock_called_once_with_partial( mock_event_filtered, ip='127.0.0.1', project=self.project, signal=event_filtered, )
def test_recursion_breaker(self): configure_sdk() Hub.current.bind_client(Hub.main.client) # If this test terminates at all then we avoided recursion. with self.tasks(): with mock.patch( "sentry.event_manager.EventManager.save", side_effect=ValueError("oh no!") ) as save: event_id = raven.captureMessage("internal client test") event = nodestore.get(Event.generate_node_id(settings.SENTRY_PROJECT, event_id)) assert event is None assert_mock_called_once_with_partial( save, settings.SENTRY_PROJECT, cache_key=u"e:{}:1".format(event_id) )
def test_throws_when_matches_discarded_hash(self): manager = EventManager(make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32])) with self.tasks(): event = manager.save(1) group = Group.objects.get(id=event.group_id) tombstone = GroupTombstone.objects.create( project_id=group.project_id, level=group.level, message=group.message, culprit=group.culprit, data=group.data, previous_group_id=group.id, ) GroupHash.objects.filter(group=group).update(group=None, group_tombstone_id=tombstone.id) manager = EventManager(make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32])) from sentry.utils.outcomes import track_outcome mock_track_outcome = mock.Mock(wraps=track_outcome) with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome): with self.tasks(): with self.assertRaises(HashDiscarded): event = manager.save(1) assert_mock_called_once_with_partial( mock_track_outcome, outcome=Outcome.FILTERED, reason=FilterStatKeys.DISCARDED_HASH ) def query(model, key, **kwargs): return tsdb.get_sums(model, [key], event.datetime, event.datetime, **kwargs)[key] # Ensure that we incremented TSDB counts assert query(tsdb.models.organization_total_received, event.project.organization.id) == 2 assert query(tsdb.models.project_total_received, event.project.id) == 2 assert query(tsdb.models.project, event.project.id) == 1 assert query(tsdb.models.group, event.group.id) == 1 assert query(tsdb.models.organization_total_blacklisted, event.project.organization.id) == 1 assert query(tsdb.models.project_total_blacklisted, event.project.id) == 1
def test_event_consumer(self, mock_kafka_publisher): with self.options({ 'store.kafka-sample-rate': 1.0, 'store.process-in-kafka': True, 'kafka-publisher.raw-event-sample-rate': 0.0, }): mock_event_accepted = mock.Mock() event_accepted.connect(mock_event_accepted) resp = self._postWithHeader({'logentry': {'message': u'hello'}}) assert resp.status_code == 200, resp.content publish_args, publish_kwargs = list(mock_kafka_publisher.publish.call_args) kafka_message_value = publish_kwargs['value'] process_event_from_kafka(json.loads(kafka_message_value)) assert_mock_called_once_with_partial( mock_event_accepted, ip='127.0.0.1', project=self.project, signal=event_accepted, )
def test_event_consumer(self, mock_kafka_publisher): with self.options({ 'store.kafka-sample-rate': 1.0, 'store.process-in-kafka': True, 'kafka-publisher.raw-event-sample-rate': 0.0, }): mock_event_accepted = mock.Mock() event_accepted.connect(mock_event_accepted) resp = self._postWithHeader({'logentry': {'message': u'hello'}}) assert resp.status_code == 200, resp.content publish_args, publish_kwargs = list( mock_kafka_publisher.publish.call_args) kafka_message_value = publish_kwargs['value'] process_event_from_kafka(json.loads(kafka_message_value)) assert_mock_called_once_with_partial( mock_event_accepted, ip='127.0.0.1', project=self.project, signal=event_accepted, )