def test_simple(self): group = self.create_group(project=self.project) event = self.create_event(group=group) with self.tasks(): index_event_tags.delay( event_id=event.id, group_id=group.id, project_id=self.project.id, environment_id=self.environment.id, organization_id=self.project.organization_id, tags=[('foo', 'bar'), ('biz', 'baz')], ) assert tagstore.get_group_event_filter( self.project.id, group.id, [self.environment.id], { 'foo': 'bar', 'biz': 'baz' }, None, None, ) == { 'id__in': set([event.id]) } # ensure it safely handles repeat runs with self.tasks(): index_event_tags.delay( event_id=event.id, group_id=group.id, project_id=self.project.id, environment_id=self.environment.id, organization_id=self.project.organization_id, tags=[('foo', 'bar'), ('biz', 'baz')], ) assert tagstore.get_group_event_filter( self.project.id, group.id, [self.environment.id], { 'foo': 'bar', 'biz': 'baz' }, None, None, ) == { 'id__in': set([event.id]) }
def test_simple(self): group = self.create_group(project=self.project) event = self.create_event(group=group) with self.tasks(): index_event_tags.delay( event_id=event.id, group_id=group.id, project_id=self.project.id, environment_id=self.environment.id, organization_id=self.project.organization_id, tags=[("foo", "bar"), ("biz", "baz")], ) assert tagstore.get_group_event_filter( self.project.id, group.id, [self.environment.id], { "foo": "bar", "biz": "baz" }, None, None, ) == { "id__in": set([event.id]) } # ensure it safely handles repeat runs with self.tasks(): index_event_tags.delay( event_id=event.id, group_id=group.id, project_id=self.project.id, environment_id=self.environment.id, organization_id=self.project.organization_id, tags=[("foo", "bar"), ("biz", "baz")], ) assert tagstore.get_group_event_filter( self.project.id, group.id, [self.environment.id], { "foo": "bar", "biz": "baz" }, None, None, ) == { "id__in": set([event.id]) }
def _get_events_legacy( self, request, group, environments, query, tags, start, end, ): events = Event.objects.filter(group_id=group.id) if query: q = Q(message__icontains=query) event_id = normalize_event_id(query) if event_id: q |= Q(event_id__exact=event_id) events = events.filter(q) if tags: event_filter = tagstore.get_group_event_filter( group.project_id, group.id, [env.id for env in environments], tags, start, end, ) if not event_filter: return Response([]) events = events.filter(**event_filter) # Filter start/end here in case we didn't filter by tags at all if start: events = events.filter(datetime__gte=start) if end: events = events.filter(datetime__lte=end) # filter out events which are beyond the retention period retention = quotas.get_event_retention( organization=group.project.organization) if retention: events = events.filter(datetime__gte=timezone.now() - timedelta(days=retention)) return self.paginate( request=request, queryset=events, order_by='-datetime', on_results=lambda x: serialize(x, request.user), paginator_cls=DateTimePaginator, )
def _get_events_legacy( self, request, group, environments, query, tags, start, end, ): events = Event.objects.filter(group_id=group.id) if query: q = Q(message__icontains=query) if is_event_id(query): q |= Q(event_id__exact=query) events = events.filter(q) if tags: event_filter = tagstore.get_group_event_filter( group.project_id, group.id, [env.id for env in environments], tags, start, end, ) if not event_filter: return Response([]) events = events.filter(**event_filter) # Filter start/end here in case we didn't filter by tags at all if start: events = events.filter(datetime__gte=start) if end: events = events.filter(datetime__lte=end) # filter out events which are beyond the retention period retention = quotas.get_event_retention(organization=group.project.organization) if retention: events = events.filter( datetime__gte=timezone.now() - timedelta(days=retention) ) return self.paginate( request=request, queryset=events, order_by='-datetime', on_results=lambda x: serialize(x, request.user), paginator_cls=DateTimePaginator, )
def test_simple(self): group = self.create_group(project=self.project) event = self.create_event(group=group) with self.tasks(): index_event_tags.delay( event_id=event.id, group_id=group.id, project_id=self.project.id, environment_id=self.environment.id, organization_id=self.project.organization_id, tags=[('foo', 'bar'), ('biz', 'baz')], ) assert tagstore.get_group_event_filter( self.project.id, group.id, [self.environment.id], {'foo': 'bar', 'biz': 'baz'}, None, None, ) == {'id__in': set([event.id])} # ensure it safely handles repeat runs with self.tasks(): index_event_tags.delay( event_id=event.id, group_id=group.id, project_id=self.project.id, environment_id=self.environment.id, organization_id=self.project.organization_id, tags=[('foo', 'bar'), ('biz', 'baz')], ) assert tagstore.get_group_event_filter( self.project.id, group.id, [self.environment.id], {'foo': 'bar', 'biz': 'baz'}, None, None, ) == {'id__in': set([event.id])}
def _get_events_legacy(self, request, group, environment, query, tags): events = Event.objects.filter(group_id=group.id) if query: q = Q(message__icontains=query) if is_event_id(query): q |= Q(event_id__exact=query) events = events.filter(q) if tags: event_filter = tagstore.get_group_event_filter( group.project_id, group.id, environment.id if environment is not None else None, tags, ) if not event_filter: return Response([]) events = events.filter(**event_filter) # filter out events which are beyond the retention period retention = quotas.get_event_retention( organization=group.project.organization) if retention: events = events.filter(datetime__gte=timezone.now() - timedelta(days=retention)) return self.paginate( request=request, queryset=events, order_by='-datetime', on_results=lambda x: serialize(x, request.user), paginator_cls=DateTimePaginator, )
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ def respond(queryset): return self.paginate( request=request, queryset=queryset, order_by='-datetime', on_results=lambda x: serialize(x, request.user), paginator_cls=DateTimePaginator, ) events = Event.objects.filter(group_id=group.id) try: environment = self._get_environment_from_request( request, group.project.organization_id, ) except Environment.DoesNotExist: return respond(events.none()) raw_query = request.GET.get('query') if raw_query: try: query_kwargs = parse_query(group.project, raw_query, request.user) except InvalidQuery as exc: return Response({'detail': six.text_type(exc)}, status=400) else: query = query_kwargs.pop('query', None) tags = query_kwargs.pop('tags', {}) else: query = None tags = {} if environment is not None: if 'environment' in tags and tags[ 'environment'] != environment.name: # An event can only be associated with a single # environment, so if the environment associated with # the request is different than the environment # provided as a tag lookup, the query cannot contain # any valid results. return respond(events.none()) else: tags['environment'] = environment.name if query: q = Q(message__icontains=query) if len(query) == 32: q |= Q(event_id__exact=query) events = events.filter(q) # TODO currently snuba can be used to get this filter of event_ids matching # the search tags, which is then used to further filter a postgres QuerySet # Ideally we would just use snuba to completely replace the fetching of the # events. if tags: event_filter = tagstore.get_group_event_filter( group.project_id, group.id, environment.id if environment is not None else None, tags, ) if not event_filter: return respond(events.none()) events = events.filter(**event_filter) # filter out events which are beyond the retention period retention = quotas.get_event_retention( organization=group.project.organization) if retention: events = events.filter(datetime__gte=timezone.now() - timedelta(days=retention)) return respond(events)
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ def respond(queryset): return self.paginate( request=request, queryset=queryset, order_by='-datetime', on_results=lambda x: serialize(x, request.user), paginator_cls=DateTimePaginator, ) events = Event.objects.filter(group_id=group.id) try: environment = self._get_environment_from_request( request, group.project.organization_id, ) except Environment.DoesNotExist: return respond(events.none()) raw_query = request.GET.get('query') if raw_query: try: query_kwargs = parse_query([group.project], raw_query, request.user) except InvalidQuery as exc: return Response({'detail': six.text_type(exc)}, status=400) else: query = query_kwargs.pop('query', None) tags = query_kwargs.pop('tags', {}) else: query = None tags = {} if environment is not None: if 'environment' in tags and tags['environment'] != environment.name: # An event can only be associated with a single # environment, so if the environment associated with # the request is different than the environment # provided as a tag lookup, the query cannot contain # any valid results. return respond(events.none()) else: tags['environment'] = environment.name if query: q = Q(message__icontains=query) if len(query) == 32: q |= Q(event_id__exact=query) events = events.filter(q) # TODO currently snuba can be used to get this filter of event_ids matching # the search tags, which is then used to further filter a postgres QuerySet # Ideally we would just use snuba to completely replace the fetching of the # events. if tags: event_filter = tagstore.get_group_event_filter( group.project_id, group.id, environment.id if environment is not None else None, tags, ) if not event_filter: return respond(events.none()) events = events.filter(**event_filter) # filter out events which are beyond the retention period retention = quotas.get_event_retention(organization=group.project.organization) if retention: events = events.filter( datetime__gte=timezone.now() - timedelta(days=retention) ) return respond(events)