示例#1
0
文件: tests.py 项目: Kayle009/sentry
    def test_add_tags(self):
        event = Group.objects.from_kwargs(1, message='rrr')
        group = event.group
        environment = self.create_environment()

        with self.tasks():
            Group.objects.add_tags(
                group,
                environment,
                tags=[
                    ('foo', 'bar'),
                    ('foo', 'baz'),
                    ('biz', 'boz'),
                ],
            )

        results = sorted(
            tagstore.get_group_tag_values(
                group.project_id,
                group.id,
                environment_id=None,
                key='foo',
            ),
            key=lambda x: x.value,
        )
        assert len(results) == 2
        res = results[0]
        self.assertEquals(res.value, 'bar')
        self.assertEquals(res.times_seen, 1)
        res = results[1]
        self.assertEquals(res.value, 'baz')
        self.assertEquals(res.times_seen, 1)

        results = tagstore.get_group_tag_values(
            group.project_id,
            group.id,
            environment_id=None,
            key='biz'
        )
        assert len(results) == 1
        res = list(results)[0]
        self.assertEquals(res.value, 'boz')
        self.assertEquals(res.times_seen, 1)
示例#2
0
文件: tests.py 项目: alshopov/sentry
    def test_add_tags(self):
        event = Group.objects.from_kwargs(1, message='rrr')
        group = event.group

        with self.tasks():
            Group.objects.add_tags(group, tags=(('foo', 'bar'), ('foo', 'baz'), ('biz', 'boz')))

        results = sorted(tagstore.get_group_tag_values(group.id, 'foo'), key=lambda x: x.id)
        assert len(results) == 2
        res = results[0]
        self.assertEquals(res.value, 'bar')
        self.assertEquals(res.times_seen, 1)
        res = results[1]
        self.assertEquals(res.value, 'baz')
        self.assertEquals(res.times_seen, 1)

        results = sorted(tagstore.get_group_tag_values(group.id, 'biz'), key=lambda x: x.id)
        assert len(results) == 1
        res = results[0]
        self.assertEquals(res.value, 'boz')
        self.assertEquals(res.times_seen, 1)
示例#3
0
    def merge_to(self, project):
        from sentry.models import (Group, Event)

        if not isinstance(project, Project):
            project = Project.objects.get_from_cache(pk=project)

        for group in Group.objects.filter(project=self):
            try:
                other = Group.objects.get(
                    project=project,
                )
            except Group.DoesNotExist:
                group.update(project=project)
                tagstore.update_project_for_group(
                    group_id=group.id,
                    old_project_id=self.id,
                    new_project_id=project.id)
            else:
                Event.objects.filter(
                    group_id=group.id,
                ).update(group_id=other.id)

                for obj in tagstore.get_group_tag_values(group_id=group.id):
                    obj2, created = tagstore.get_or_create_group_tag_value(
                        project_id=project.id,
                        group_id=group.id,
                        key=obj.key,
                        value=obj.value,
                        defaults={'times_seen': obj.times_seen}
                    )
                    if not created:
                        obj2.update(times_seen=F('times_seen') + obj.times_seen)

        for fv in tagstore.get_tag_values(self.id):
            tagstore.get_or_create_tag_value(project_id=project.id, key=fv.key, value=fv.value)
            fv.delete()
        self.delete()
示例#4
0
    def test_unmerge(self):
        def shift(i):
            return timedelta(seconds=1 << i)

        now = timezone.now().replace(microsecond=0) - shift(16)

        project = self.create_project()
        source = self.create_group(project)

        sequence = itertools.count(0)
        tag_values = itertools.cycle(['red', 'green', 'blue'])
        user_values = itertools.cycle([
            {
                'id': 1
            },
            {
                'id': 2
            },
        ])

        EnvironmentProject.objects.create(
            environment=Environment.objects.create(
                organization_id=project.organization_id,
                name='production',
            ),
            project=project,
        )

        def create_message_event(template, parameters):
            i = next(sequence)

            event_id = uuid.UUID(
                fields=(i, 0x0, 0x1000, 0x80, 0x80, 0x808080808080, ),
            ).hex

            event = Event.objects.create(
                project_id=project.id,
                group_id=source.id,
                event_id=event_id,
                message='%s' % (id, ),
                datetime=now + shift(i),
                data={
                    'environment':
                    'production',
                    'type':
                    'default',
                    'metadata': {
                        'title': template % parameters,
                    },
                    'sentry.interfaces.Message': {
                        'message': template,
                        'params': parameters,
                        'formatted': template % parameters,
                    },
                    'sentry.interfaces.User':
                    next(user_values),
                    'tags': [
                        ['color', next(tag_values)],
                        ['environment', 'production'],
                        ['sentry:release', 'version'],
                    ],
                },
            )

            with self.tasks():
                Group.objects.add_tags(
                    source,
                    tags=event.get_tags(),
                )

            EventMapping.objects.create(
                project_id=project.id,
                group_id=source.id,
                event_id=event_id,
                date_added=event.datetime,
            )

            UserReport.objects.create(
                project_id=project.id,
                group_id=source.id,
                event_id=event_id,
                name='Log Hat',
                email='*****@*****.**',
                comments='Quack',
            )

            features.record([event])

            return event

        events = OrderedDict()

        for event in (create_message_event('This is message #%s.', i) for i in xrange(10)):
            events.setdefault(get_fingerprint(event), []).append(event)

        for event in (create_message_event('This is message #%s!', i) for i in xrange(10, 17)):
            events.setdefault(get_fingerprint(event), []).append(event)

        assert len(events) == 2
        assert sum(map(len, events.values())) == 17

        # XXX: This is super contrived considering that it doesn't actually go
        # through the event pipeline, but them's the breaks, eh?
        for fingerprint in events.keys():
            GroupHash.objects.create(
                project=project,
                group=source,
                hash=fingerprint,
            )

        assert set(
            [(gtk.key, gtk.values_seen) for gtk in tagstore.get_group_tag_keys(source.id)]
        ) == set([
            (u'color', 3),
            (u'environment', 1),
            (u'sentry:release', 1)
        ])

        assert set(
            [(gtv.key, gtv.value, gtv.times_seen)
             for gtv in tagstore.get_group_tag_values(source.id)]
        ) == set([
            (u'color', u'red', 6),
            (u'color', u'green', 6),
            (u'color', u'blue', 5),
            (u'environment', u'production', 17),
            (u'sentry:release', u'version', 17),
        ])

        assert features.compare(source) == [
            (source.id, {
                'exception:message:character-shingles': None,
                'exception:stacktrace:application-chunks': None,
                'exception:stacktrace:pairs': None,
                'message:message:character-shingles': 1.0
            }),
        ]

        with self.tasks():
            unmerge.delay(
                source.project_id,
                source.id,
                None,
                [events.keys()[1]],
                None,
                batch_size=5,
            )

        assert list(
            Group.objects.filter(id=source.id).values_list(
                'times_seen',
                'first_seen',
                'last_seen',
            )
        ) == [(10, now + shift(0), now + shift(9), )]

        source_activity = Activity.objects.get(
            group_id=source.id,
            type=Activity.UNMERGE_SOURCE,
        )

        destination = Group.objects.get(
            id=source_activity.data['destination_id'],
        )

        assert list(
            Group.objects.filter(id=destination.id).values_list(
                'times_seen',
                'first_seen',
                'last_seen',
            )
        ) == [(7, now + shift(10), now + shift(16), )]

        assert source_activity.data == {
            'destination_id': destination.id,
            'fingerprints': [events.keys()[1]],
        }

        assert source.id != destination.id
        assert source.project == destination.project

        assert Activity.objects.get(
            group_id=destination.id,
            type=Activity.UNMERGE_DESTINATION,
        ).data == {
            'source_id': source.id,
            'fingerprints': [events.keys()[1]],
        }

        source_event_event_ids = map(
            lambda event: event.event_id,
            events.values()[0],
        )

        assert source.event_set.count() == 10

        assert set(
            EventMapping.objects.filter(
                group_id=source.id,
            ).values_list('event_id', flat=True)
        ) == set(source_event_event_ids)

        assert set(
            UserReport.objects.filter(
                group_id=source.id,
            ).values_list('event_id', flat=True)
        ) == set(source_event_event_ids)

        assert set(GroupHash.objects.filter(
            group_id=source.id,
        ).values_list('hash', flat=True)) == set([events.keys()[0]])

        assert set(
            GroupRelease.objects.filter(
                group_id=source.id,
            ).values_list('environment', 'first_seen', 'last_seen')
        ) == set([
            (u'production', now + shift(0), now + shift(9), ),
        ])

        assert set(
            [(gtk.key, gtk.values_seen) for gtk in tagstore.get_group_tag_keys(source.id)]
        ) == set([
            (u'color', 3),
            (u'environment', 1),
            (u'sentry:release', 1),
        ])

        assert set(
            [(gtv.key, gtv.value, gtv.times_seen,
              gtv.first_seen, gtv.last_seen) for gtv in tagstore.get_group_tag_values(source.id)]
        ) == set([
            (u'color', u'red', 4, now + shift(0), now + shift(9), ),
            (u'color', u'green', 3, now + shift(1), now + shift(7), ),
            (u'color', u'blue', 3, now + shift(2), now + shift(8), ),
            (u'environment', u'production', 10, now + shift(0), now + shift(9), ),
            (u'sentry:release', u'version', 10, now + shift(0), now + shift(9), ),
        ])

        destination_event_event_ids = map(
            lambda event: event.event_id,
            events.values()[1],
        )

        assert destination.event_set.count() == 7

        assert set(
            EventMapping.objects.filter(
                group_id=destination.id,
            ).values_list('event_id', flat=True)
        ) == set(destination_event_event_ids)

        assert set(
            UserReport.objects.filter(
                group_id=destination.id,
            ).values_list('event_id', flat=True)
        ) == set(destination_event_event_ids)

        assert set(
            GroupHash.objects.filter(
                group_id=destination.id,
            ).values_list('hash', flat=True)
        ) == set([events.keys()[1]])

        assert set(
            GroupRelease.objects.filter(
                group_id=destination.id,
            ).values_list('environment', 'first_seen', 'last_seen')
        ) == set([
            (u'production', now + shift(10), now + shift(16), ),
        ])

        assert set([(gtk.key, gtk.values_seen) for gtk in tagstore.get_group_tag_keys(source.id)]
                   ) == set(
            [
                (u'color', 3),
                (u'environment', 1),
                (u'sentry:release', 1),
            ]
        )

        assert set(
            [(gtv.key, gtv.value, gtv.times_seen,
              gtv.first_seen, gtv.last_seen) for gtv in tagstore.get_group_tag_values(destination.id)]
        ) == set([
            (u'color', u'red', 2, now + shift(12), now + shift(15), ),
            (u'color', u'green', 3, now + shift(10), now + shift(16), ),
            (u'color', u'blue', 2, now + shift(11), now + shift(14), ),
            (u'environment', u'production', 7, now + shift(10), now + shift(16), ),
            (u'sentry:release', u'version', 7, now + shift(10), now + shift(16), ),
        ])

        rollup_duration = 3600

        time_series = tsdb.get_range(
            tsdb.models.group,
            [source.id, destination.id],
            now - timedelta(seconds=rollup_duration),
            now + shift(16),
            rollup_duration,
        )

        def get_expected_series_values(rollup, events, function=None):
            if function is None:

                def function(aggregate, event):
                    return (aggregate if aggregate is not None else 0) + 1

            expected = {}
            for event in events:
                k = float((to_timestamp(event.datetime) // rollup_duration) * rollup_duration)
                expected[k] = function(expected.get(k), event)

            return expected

        def assert_series_contains(expected, actual, default=0):
            actual = dict(actual)

            for key, value in expected.items():
                assert actual.get(key, 0) == value

            for key in set(actual.keys()) - set(expected.keys()):
                assert actual.get(key, 0) == default

        assert_series_contains(
            get_expected_series_values(rollup_duration, events.values()[0]),
            time_series[source.id],
            0,
        )

        assert_series_contains(
            get_expected_series_values(rollup_duration, events.values()[1]),
            time_series[destination.id],
            0,
        )

        time_series = tsdb.get_distinct_counts_series(
            tsdb.models.users_affected_by_group,
            [source.id, destination.id],
            now - timedelta(seconds=rollup_duration),
            now + shift(16),
            rollup_duration,
        )

        def collect_by_user_tag(aggregate, event):
            aggregate = aggregate if aggregate is not None else set()
            aggregate.add(
                get_event_user_from_interface(
                    event.data['sentry.interfaces.User'],
                ).tag_value,
            )
            return aggregate

        assert_series_contains(
            {
                timestamp: len(values)
                for timestamp, values in get_expected_series_values(
                    rollup_duration,
                    events.values()[0],
                    collect_by_user_tag,
                ).items()
            },
            time_series[source.id],
        )

        assert_series_contains(
            {
                timestamp: len(values)
                for timestamp, values in get_expected_series_values(
                    rollup_duration,
                    events.values()[1],
                    collect_by_user_tag,
                ).items()
            },
            time_series[destination.id],
        )

        time_series = tsdb.get_most_frequent_series(
            tsdb.models.frequent_releases_by_group,
            [source.id, destination.id],
            now - timedelta(seconds=rollup_duration),
            now + shift(16),
            rollup_duration,
        )

        def collect_by_release(group, aggregate, event):
            aggregate = aggregate if aggregate is not None else {}
            release = GroupRelease.objects.get(
                group_id=group.id,
                environment=event.data['environment'],
                release_id=Release.objects.get(
                    organization_id=project.organization_id,
                    version=event.get_tag('sentry:release'),
                ).id,
            ).id
            aggregate[release] = aggregate.get(release, 0) + 1
            return aggregate

        assert_series_contains(
            get_expected_series_values(
                rollup_duration,
                events.values()[0],
                functools.partial(
                    collect_by_release,
                    source,
                ),
            ),
            time_series[source.id],
            {},
        )

        assert_series_contains(
            get_expected_series_values(
                rollup_duration,
                events.values()[1],
                functools.partial(
                    collect_by_release,
                    destination,
                ),
            ),
            time_series[destination.id],
            {},
        )

        time_series = tsdb.get_most_frequent_series(
            tsdb.models.frequent_environments_by_group,
            [source.id, destination.id],
            now - timedelta(seconds=rollup_duration),
            now + shift(16),
            rollup_duration,
        )

        def collect_by_environment(aggregate, event):
            aggregate = aggregate if aggregate is not None else {}
            environment = Environment.objects.get(
                organization_id=project.organization_id,
                name=event.data['environment'],
            ).id
            aggregate[environment] = aggregate.get(environment, 0) + 1
            return aggregate

        assert_series_contains(
            get_expected_series_values(
                rollup_duration,
                events.values()[0],
                collect_by_environment,
            ),
            time_series[source.id],
            {},
        )

        assert_series_contains(
            get_expected_series_values(
                rollup_duration,
                events.values()[1],
                collect_by_environment,
            ),
            time_series[destination.id],
            {},
        )

        source_similar_items = features.compare(source)
        assert source_similar_items[0] == (source.id, {
            'exception:message:character-shingles': None,
            'exception:stacktrace:application-chunks': None,
            'exception:stacktrace:pairs': None,
            'message:message:character-shingles': 1.0,
        })
        assert source_similar_items[1][0] == destination.id
        assert source_similar_items[1][1]['message:message:character-shingles'] < 1.0

        destination_similar_items = features.compare(destination)
        assert destination_similar_items[0] == (
            destination.id, {
                'exception:message:character-shingles': None,
                'exception:stacktrace:application-chunks': None,
                'exception:stacktrace:pairs': None,
                'message:message:character-shingles': 1.0
            }
        )
        assert destination_similar_items[1][0] == source.id
        assert destination_similar_items[1][1]['message:message:character-shingles'] < 1.0
示例#5
0
    def test_merge_updates_tag_values_seen(self):
        project = self.create_project()
        target, other = [self.create_group(project) for _ in range(0, 2)]

        data = {
            'sentry:user': {
                'id:1': {
                    target: 2,
                },
                'id:2': {
                    other: 3,
                },
                'id:3': {
                    target: 1,
                    other: 2,
                },
            },
            'key': {
                'foo': {
                    other: 3,
                },
            },
        }

        input_group_tag_keys = defaultdict(int)  # [(group, key)] = values_seen
        input_group_tag_values = defaultdict(int)  # [(group, key, value)] = times_seen
        output_group_tag_keys = defaultdict(int)  # [key] = values_seen
        output_group_tag_values = defaultdict(int)  # [(key, value)] = times_seen

        for key, values in data.items():
            output_group_tag_keys[key] = len(values)

            for value, groups in values.items():
                for group, count in groups.items():
                    input_group_tag_keys[(group, key)] += 1
                    input_group_tag_values[(group, key, value)] += count
                    output_group_tag_values[(key, value)] += count

        for ((group, key), values_seen) in input_group_tag_keys.items():
            tagstore.create_group_tag_key(
                project_id=project.id,
                group_id=group.id,
                environment_id=self.environment.id,
                key=key,
                values_seen=values_seen,
            )

        for ((group, key, value), times_seen) in input_group_tag_values.items():
            tagstore.create_group_tag_value(
                project_id=project.id,
                group_id=group.id,
                environment_id=self.environment.id,
                key=key,
                value=value,
                times_seen=times_seen,
            )

        with self.tasks():
            merge_group(other.id, target.id)

        assert not Group.objects.filter(id=other.id).exists()
        assert len(tagstore.get_group_tag_keys(other.id)) == 0
        assert len(tagstore.get_group_tag_values(other.id)) == 0

        for key, values_seen in output_group_tag_keys.items():
            assert tagstore.get_group_tag_key(target.id, key).values_seen == values_seen

        for (key, value), times_seen in output_group_tag_values.items():
            assert tagstore.get_group_tag_value(
                group_id=target.id,
                key=key,
                value=value,
            ).times_seen == times_seen