Example #1
0
    def test(self, mock_delay_index_event_tags, mock_eventstream_insert):
        now = datetime.utcnow()

        def _get_event_count():
            return snuba.query(
                start=now - timedelta(days=1),
                end=now + timedelta(days=1),
                groupby=['project_id'],
                filter_keys={'project_id': [self.project.id]},
            ).get(self.project.id, 0)

        assert _get_event_count() == 0

        raw_event = {
            'event_id': 'a' * 32,
            'message': 'foo',
            'timestamp': time.mktime(now.timetuple()),
            'level': logging.ERROR,
            'logger': 'default',
            'tags': [],
        }

        manager = EventManager(raw_event)
        manager.normalize()
        event = manager.save(self.project.id)

        # verify eventstream was called by EventManager
        insert_args, insert_kwargs = list(mock_eventstream_insert.call_args)
        assert not insert_args
        assert insert_kwargs == {
            'event': event,
            'group': event.group,
            'is_new_group_environment': True,
            'is_new': True,
            'is_regression': False,
            'is_sample': False,
            'primary_hash': 'acbd18db4cc2f85cedef654fccc4a4d8',
            'skip_consume': False
        }

        assert mock_delay_index_event_tags.call_count == 1

        # pass arguments on to Kafka EventManager
        self.kafka_eventstream.insert(*insert_args, **insert_kwargs)

        produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args)
        assert not produce_args
        assert produce_kwargs['topic'] == 'events'
        assert produce_kwargs['key'] == six.text_type(self.project.id)

        version, type_, payload1, payload2 = json.loads(produce_kwargs['value'])
        assert version == 2
        assert type_ == 'insert'

        # insert what would have been the Kafka payload directly
        # into Snuba, expect an HTTP 200 and for the event to now exist
        snuba_eventstream = SnubaEventStream()
        snuba_eventstream._send(self.project.id, 'insert', (payload1, payload2))
        assert _get_event_count() == 1
Example #2
0
    def test(self, mock_delay_index_event_tags, mock_eventstream_insert):
        now = datetime.utcnow()

        def _get_event_count():
            return snuba.query(
                start=now - timedelta(days=1),
                end=now + timedelta(days=1),
                groupby=['project_id'],
                filter_keys={'project_id': [self.project.id]},
            ).get(self.project.id, 0)

        assert _get_event_count() == 0

        raw_event = {
            'event_id': 'a' * 32,
            'message': 'foo',
            'timestamp': time.mktime(now.timetuple()),
            'level': logging.ERROR,
            'logger': 'default',
            'tags': [],
        }

        manager = EventManager(raw_event)
        manager.normalize()
        event = manager.save(self.project.id)

        # verify eventstream was called by EventManager
        insert_args, insert_kwargs = list(mock_eventstream_insert.call_args)
        assert not insert_args
        assert insert_kwargs == {
            'event': event,
            'group': event.group,
            'is_new_group_environment': True,
            'is_new': True,
            'is_regression': False,
            'is_sample': False,
            'primary_hash': 'acbd18db4cc2f85cedef654fccc4a4d8',
            'skip_consume': False
        }

        assert mock_delay_index_event_tags.call_count == 1

        # pass arguments on to Kafka EventManager
        self.kafka_eventstream.insert(*insert_args, **insert_kwargs)

        produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args)
        assert not produce_args
        assert produce_kwargs['topic'] == 'events'
        assert produce_kwargs['key'] == six.text_type(self.project.id)

        version, type_, payload1, payload2 = json.loads(produce_kwargs['value'])
        assert version == 2
        assert type_ == 'insert'

        # insert what would have been the Kafka payload directly
        # into Snuba, expect an HTTP 200 and for the event to now exist
        snuba_eventstream = SnubaEventStream()
        snuba_eventstream._send(self.project.id, 'insert', (payload1, payload2))
        assert _get_event_count() == 1
Example #3
0
 def init_snuba(self):
     self.snuba_eventstream = SnubaEventStream()
     self.snuba_tagstore = SnubaTagStorage()
     assert all(
         response.status_code == 200
         for response in ThreadPoolExecutor(4).map(self.call_snuba, self.init_endpoints)
     )
Example #4
0
 def init_snuba(self):
     self.snuba_eventstream = SnubaEventStream()
     self.snuba_tagstore = SnubaTagStorage()
     assert requests.post(settings.SENTRY_SNUBA +
                          "/tests/events/drop").status_code == 200
     assert requests.post(settings.SENTRY_SNUBA +
                          "/tests/transactions/drop").status_code == 200
Example #5
0
    def __produce_event(self, *insert_args, **insert_kwargs):
        # pass arguments on to Kafka EventManager
        self.kafka_eventstream.insert(*insert_args, **insert_kwargs)

        produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args)
        assert not produce_args
        assert produce_kwargs["topic"] == "events"
        assert produce_kwargs["key"] == six.text_type(self.project.id)

        version, type_, payload1, payload2 = json.loads(produce_kwargs["value"])
        assert version == 2
        assert type_ == "insert"

        # insert what would have been the Kafka payload directly
        # into Snuba, expect an HTTP 200 and for the event to now exist
        snuba_eventstream = SnubaEventStream()
        snuba_eventstream._send(self.project.id, "insert", (payload1, payload2))
Example #6
0
    def test_return_multiple_hashes(self):
        self.login_as(user=self.user)

        # remove microseconds and timezone from iso format cause that's what store_event expects
        min_ago = (timezone.now() - timedelta(minutes=1)).isoformat()[:19]
        two_min_ago = (timezone.now() - timedelta(minutes=2)).isoformat()[:19]

        event1 = self.store_event(
            data={
                'event_id': 'a' * 32,
                'message': 'message',
                'timestamp': two_min_ago,
                'stacktrace': copy.deepcopy(DEFAULT_EVENT_DATA['stacktrace']),
                'fingerprint': ['group-1']
            },
            project_id=self.project.id,
        )

        event2 = self.store_event(
            data={
                'event_id': 'b' * 32,
                'message': 'message2',
                'timestamp': min_ago,
                'fingerprint': ['group-2']
            },
            project_id=self.project.id,
        )

        # Merge the events
        eventstream = SnubaEventStream()
        state = eventstream.start_merge(
            self.project.id,
            [event2.group_id],
            event1.group_id
        )

        eventstream.end_merge(state)

        url = u'/api/0/issues/{}/hashes/'.format(event1.group_id)
        response = self.client.get(url, format='json')

        assert response.status_code == 200, response.content
        assert len(response.data) == 2

        primary_hashes = [hash['id'] for hash in response.data]
        assert primary_hashes == [event2.get_primary_hash(), event1.get_primary_hash()]
Example #7
0
    def test_return_multiple_hashes(self):
        self.login_as(user=self.user)

        min_ago = iso_format(before_now(minutes=1))
        two_min_ago = iso_format(before_now(minutes=2))

        event1 = self.store_event(
            data={
                "event_id": "a" * 32,
                "message": "message",
                "timestamp": two_min_ago,
                "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
                "fingerprint": ["group-1"],
            },
            project_id=self.project.id,
        )

        event2 = self.store_event(
            data={
                "event_id": "b" * 32,
                "message": "message2",
                "timestamp": min_ago,
                "fingerprint": ["group-2"],
            },
            project_id=self.project.id,
        )

        # Merge the events
        eventstream = SnubaEventStream()
        state = eventstream.start_merge(self.project.id, [event2.group_id],
                                        event1.group_id)

        eventstream.end_merge(state)

        url = u"/api/0/issues/{}/hashes/".format(event1.group_id)
        response = self.client.get(url, format="json")

        assert response.status_code == 200, response.content
        assert len(response.data) == 2

        primary_hashes = [hash["id"] for hash in response.data]
        assert primary_hashes == [
            event2.get_primary_hash(),
            event1.get_primary_hash()
        ]
Example #8
0
 def init_snuba(self):
     self.snuba_eventstream = SnubaEventStream()
     self.snuba_tagstore = SnubaCompatibilityTagStorage()
     assert requests.post(settings.SENTRY_SNUBA +
                          '/tests/drop').status_code == 200
Example #9
0
 def init_snuba(self):
     self.snuba_eventstream = SnubaEventStream()
     self.snuba_tagstore = SnubaTagStorage()
Example #10
0
 def setUp(self):
     super(SnubaTestCase, self).setUp()
     self.snuba_eventstream = SnubaEventStream()
     self.snuba_tagstore = SnubaCompatibilityTagStorage()
     assert requests.post(settings.SENTRY_SNUBA +
                          '/tests/drop').status_code == 200