Пример #1
0
    def test_post_process_forwarder_batch_consumer(
            self, dispatch_post_process_group_task):
        consumer_group = f"consumer-{uuid.uuid1().hex}"
        synchronize_commit_group = f"sync-consumer-{uuid.uuid1().hex}"

        events_producer = self._get_producer("events")
        commit_log_producer = self._get_producer("snuba-commit-log")
        message = json.dumps(kafka_message_payload()).encode()

        eventstream = KafkaEventStream()
        consumer = eventstream._build_consumer(
            entity="all",
            consumer_group=consumer_group,
            commit_log_topic=self.commit_log_topic,
            synchronize_commit_group=synchronize_commit_group,
            commit_batch_size=1,
            initial_offset_reset="earliest",
        )

        # produce message to the events topic
        events_producer.produce(self.events_topic, message)
        assert events_producer.flush(
            5) == 0, "events producer did not successfully flush queue"

        # Move the committed offset forward for our synchronizing group.
        commit_log_producer.produce(
            self.commit_log_topic,
            key=f"{self.events_topic}:0:{synchronize_commit_group}".encode(),
            value=f"{1}".encode(),
        )
        assert (commit_log_producer.flush(5) == 0
                ), "snuba-commit-log producer did not successfully flush queue"

        # Run the loop for sometime
        for _ in range(3):
            consumer._run_once()
            time.sleep(1)

        # Verify that the task gets called once
        dispatch_post_process_group_task.assert_called_once_with(
            event_id="fe0ee9a2bc3b415497bad68aaf70dc7f",
            project_id=1,
            group_id=43,
            primary_hash="311ee66a5b8e697929804ceb1c456ffe",
            is_new=False,
            is_regression=None,
            is_new_group_environment=False,
        )
Пример #2
0
    def setUp(self):
        super(SnubaEventStreamTest, self).setUp()

        self.kafka_eventstream = KafkaEventStream()
        self.kafka_eventstream.producer = Mock()
Пример #3
0
class SnubaEventStreamTest(TestCase, SnubaTestCase):
    def setUp(self):
        super(SnubaEventStreamTest, self).setUp()

        self.kafka_eventstream = KafkaEventStream()
        self.kafka_eventstream.producer = Mock()

    def __build_event(self, timestamp):
        raw_event = {
            "event_id": "a" * 32,
            "message": "foo",
            "timestamp": time.mktime(timestamp.timetuple()),
            "level": logging.ERROR,
            "logger": "default",
            "tags": [],
        }
        manager = EventManager(raw_event)
        manager.normalize()
        return manager.save(self.project.id)

    def __produce_event(self, *insert_args, **insert_kwargs):
        # pass arguments on to Kafka EventManager
        self.kafka_eventstream.insert(*insert_args, **insert_kwargs)

        produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args)
        assert not produce_args
        assert produce_kwargs["topic"] == "events"
        assert produce_kwargs["key"] == six.text_type(self.project.id)

        version, type_, payload1, payload2 = json.loads(produce_kwargs["value"])
        assert version == 2
        assert type_ == "insert"

        # insert what would have been the Kafka payload directly
        # into Snuba, expect an HTTP 200 and for the event to now exist
        snuba_eventstream = SnubaEventStream()
        snuba_eventstream._send(self.project.id, "insert", (payload1, payload2))

    @patch("sentry.eventstream.insert")
    @patch("sentry.tagstore.delay_index_event_tags")
    def test(self, mock_delay_index_event_tags, mock_eventstream_insert):
        now = datetime.utcnow()

        def _get_event_count():
            return snuba.query(
                start=now - timedelta(days=1),
                end=now + timedelta(days=1),
                groupby=["project_id"],
                filter_keys={"project_id": [self.project.id]},
            ).get(self.project.id, 0)

        assert _get_event_count() == 0

        event = self.__build_event(now)

        # verify eventstream was called by EventManager
        insert_args, insert_kwargs = list(mock_eventstream_insert.call_args)
        assert not insert_args
        assert insert_kwargs == {
            "event": event,
            "group": event.group,
            "is_new_group_environment": True,
            "is_new": True,
            "is_regression": False,
            "is_sample": False,
            "primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
            "skip_consume": False,
        }

        assert mock_delay_index_event_tags.call_count == 1

        self.__produce_event(*insert_args, **insert_kwargs)
        assert _get_event_count() == 1

    @patch("sentry.eventstream.insert")
    @patch("sentry.tagstore.delay_index_event_tags")
    def test_issueless(self, mock_delay_index_event_tags, mock_eventstream_insert):
        now = datetime.utcnow()
        event = self.__build_event(now)

        event.group_id = None
        insert_args = ()
        insert_kwargs = {
            "event": event,
            "group": None,
            "is_new_group_environment": True,
            "is_new": True,
            "is_regression": False,
            "is_sample": False,
            "primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
            "skip_consume": False,
        }

        self.__produce_event(*insert_args, **insert_kwargs)
        result = snuba.raw_query(
            start=now - timedelta(days=1),
            end=now + timedelta(days=1),
            selected_columns=["event_id", "group_id"],
            groupby=None,
            filter_keys={"project_id": [self.project.id], "event_id": [event.event_id]},
        )
        assert len(result["data"]) == 1
        assert result["data"][0]["group_id"] is None
Пример #4
0
class SnubaEventStreamTest(SnubaTestCase):
    def setUp(self):
        super(SnubaEventStreamTest, self).setUp()

        self.kafka_eventstream = KafkaEventStream()
        self.kafka_eventstream.producer = Mock()

    @patch('sentry.eventstream.insert')
    @patch('sentry.tagstore.delay_index_event_tags')
    def test(self, mock_delay_index_event_tags, mock_eventstream_insert):
        now = datetime.utcnow()

        def _get_event_count():
            return snuba.query(
                start=now - timedelta(days=1),
                end=now + timedelta(days=1),
                groupby=['project_id'],
                filter_keys={'project_id': [self.project.id]},
            ).get(self.project.id, 0)

        assert _get_event_count() == 0

        raw_event = {
            'event_id': 'a' * 32,
            'message': 'foo',
            'timestamp': time.mktime(now.timetuple()),
            'level': logging.ERROR,
            'logger': 'default',
            'tags': [],
        }

        manager = EventManager(raw_event)
        manager.normalize()
        event = manager.save(self.project.id)

        # verify eventstream was called by EventManager
        insert_args, insert_kwargs = list(mock_eventstream_insert.call_args)
        assert not insert_args
        assert insert_kwargs == {
            'event': event,
            'group': event.group,
            'is_new_group_environment': True,
            'is_new': True,
            'is_regression': False,
            'is_sample': False,
            'primary_hash': 'acbd18db4cc2f85cedef654fccc4a4d8',
            'skip_consume': False
        }

        assert mock_delay_index_event_tags.call_count == 1

        # pass arguments on to Kafka EventManager
        self.kafka_eventstream.insert(*insert_args, **insert_kwargs)

        produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args)
        assert not produce_args
        assert produce_kwargs['topic'] == 'events'
        assert produce_kwargs['key'] == six.text_type(self.project.id)

        version, type_, payload1, payload2 = json.loads(produce_kwargs['value'])
        assert version == 2
        assert type_ == 'insert'

        # insert what would have been the Kafka payload directly
        # into Snuba, expect an HTTP 200 and for the event to now exist
        snuba_eventstream = SnubaEventStream()
        snuba_eventstream._send(self.project.id, 'insert', (payload1, payload2))
        assert _get_event_count() == 1
Пример #5
0
    def setUp(self):
        super(SnubaEventStreamTest, self).setUp()

        self.kafka_eventstream = KafkaEventStream()
        self.kafka_eventstream.producer = Mock()
Пример #6
0
class SnubaEventStreamTest(TestCase, SnubaTestCase):
    def setUp(self):
        super(SnubaEventStreamTest, self).setUp()

        self.kafka_eventstream = KafkaEventStream()
        self.kafka_eventstream.producer = Mock()

    @patch('sentry.eventstream.insert')
    @patch('sentry.tagstore.delay_index_event_tags')
    def test(self, mock_delay_index_event_tags, mock_eventstream_insert):
        now = datetime.utcnow()

        def _get_event_count():
            return snuba.query(
                start=now - timedelta(days=1),
                end=now + timedelta(days=1),
                groupby=['project_id'],
                filter_keys={'project_id': [self.project.id]},
            ).get(self.project.id, 0)

        assert _get_event_count() == 0

        raw_event = {
            'event_id': 'a' * 32,
            'message': 'foo',
            'timestamp': time.mktime(now.timetuple()),
            'level': logging.ERROR,
            'logger': 'default',
            'tags': [],
        }

        manager = EventManager(raw_event)
        manager.normalize()
        event = manager.save(self.project.id)

        # verify eventstream was called by EventManager
        insert_args, insert_kwargs = list(mock_eventstream_insert.call_args)
        assert not insert_args
        assert insert_kwargs == {
            'event': event,
            'group': event.group,
            'is_new_group_environment': True,
            'is_new': True,
            'is_regression': False,
            'is_sample': False,
            'primary_hash': 'acbd18db4cc2f85cedef654fccc4a4d8',
            'skip_consume': False
        }

        assert mock_delay_index_event_tags.call_count == 1

        # pass arguments on to Kafka EventManager
        self.kafka_eventstream.insert(*insert_args, **insert_kwargs)

        produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args)
        assert not produce_args
        assert produce_kwargs['topic'] == 'events'
        assert produce_kwargs['key'] == six.text_type(self.project.id)

        version, type_, payload1, payload2 = json.loads(produce_kwargs['value'])
        assert version == 2
        assert type_ == 'insert'

        # insert what would have been the Kafka payload directly
        # into Snuba, expect an HTTP 200 and for the event to now exist
        snuba_eventstream = SnubaEventStream()
        snuba_eventstream._send(self.project.id, 'insert', (payload1, payload2))
        assert _get_event_count() == 1