def test_simple(self): def data_fn(offset=None, limit=None): return [i for i in range(offset, limit)] paginator = GenericOffsetPaginator(data_fn=data_fn) result = paginator.get_result(5) assert list(result) == [0, 1, 2, 3, 4] assert result.prev == Cursor(0, 0, True, False) assert result.next == Cursor(0, 5, False, True) result2 = paginator.get_result(5, result.next) assert list(result2) == [5] assert result2.prev == Cursor(0, 0, True, True) assert result2.next == Cursor(0, 10, False, False)
def get_v2(self, request, organization): try: snuba_args = self.get_snuba_query_args_v2(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: return Response([]) else: data_fn = partial( lambda *args, **kwargs: transform_aliases_and_query( *args, **kwargs)['data'], referrer='api.organization-events-v2', **snuba_args) return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn))
def _get_events_snuba(self, request, group, environments, query, tags, start, end): conditions = [] if query: msg_substr = [ 'positionCaseInsensitive', ['message', "'%s'" % (query, )] ] message_condition = [msg_substr, '!=', 0] if is_event_id(query): or_condition = [message_condition, ['event_id', '=', query]] conditions.append(or_condition) else: conditions.append(message_condition) if tags: for tag_name, tag_val in tags.items(): operator = 'IN' if isinstance(tag_val, list) else '=' conditions.append( [u'tags[{}]'.format(tag_name), operator, tag_val]) default_end = timezone.now() default_start = default_end - timedelta(days=90) data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], start=max(start, default_start) if start else default_start, end=min(end, default_end) if end else default_end, conditions=conditions, filter_keys={ 'project_id': [group.project_id], 'issue': [group.id] }, selected_columns=SnubaEvent.selected_columns + ['tags.key', 'tags.value'], orderby='-timestamp', referrer='api.group-events', ) return self.paginate(request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user), paginator=GenericOffsetPaginator(data_fn=data_fn))
def _get_events_snuba(self, request, group, environments, query, tags, start, end): default_end = timezone.now() default_start = default_end - timedelta(days=90) params = { 'issue.id': [group.id], 'project_id': [group.project_id], 'start': start if start else default_start, 'end': end if end else default_end } direct_hit_resp = get_direct_hit_response(request, query, params, 'api.group-events') if direct_hit_resp: return direct_hit_resp if environments: params['environment'] = [env.name for env in environments] full = request.GET.get('full', False) snuba_args = get_snuba_query_args(request.GET.get('query', None), params) # TODO(lb): remove once boolean search is fully functional if snuba_args: has_boolean_op_flag = features.has('organizations:boolean-search', group.project.organization, actor=request.user) if snuba_args.pop('has_boolean_terms', False) and not has_boolean_op_flag: raise GroupEventsError( 'Boolean search operator OR and AND not allowed in this search.' ) snuba_cols = None if full else eventstore.full_columns data_fn = partial(eventstore.get_events, additional_columns=snuba_cols, referrer='api.group-events', **snuba_args) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate(request=request, on_results=lambda results: serialize( results, request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn))
def get(self, request, organization): if features.has('organizations:events-v2', organization, actor=request.user): return self.get_v2(request, organization) # Check for a direct hit on event ID query = request.GET.get('query', '').strip() try: direct_hit_resp = get_direct_hit_response( request, query, self.get_filter_params(request, organization), 'api.organization-events') except (OrganizationEventsError, NoProjects): pass else: if direct_hit_resp: return direct_hit_resp full = request.GET.get('full', False) try: snuba_args = self.get_snuba_query_args(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: # return empty result if org doesn't have projects # or user doesn't have access to projects in org data_fn = lambda *args, **kwargs: [] else: snuba_cols = SnubaEvent.minimal_columns if full else SnubaEvent.selected_columns data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], selected_columns=snuba_cols, orderby='-timestamp', referrer='api.organization-events', **snuba_args) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn))
def get(self, request, organization): # Check for a direct hit on event ID query = request.GET.get("query", "").strip() try: direct_hit_resp = get_direct_hit_response( request, query, self.get_filter_params(request, organization), "api.organization-events-direct-hit", ) except (OrganizationEventsError, NoProjects): pass else: if direct_hit_resp: return direct_hit_resp full = request.GET.get("full", False) try: snuba_args = self.get_snuba_query_args_legacy(request, organization) except OrganizationEventsError as e: return Response({"detail": six.text_type(e)}, status=400) except NoProjects: # return empty result if org doesn't have projects # or user doesn't have access to projects in org data_fn = lambda *args, **kwargs: [] else: data_fn = partial( eventstore.get_events, referrer="api.organization-events", filter=eventstore.Filter( start=snuba_args["start"], end=snuba_args["end"], conditions=snuba_args["conditions"], project_ids=snuba_args["filter_keys"].get("project_id", None), group_ids=snuba_args["filter_keys"].get("group_id", None), ), ) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize(results, request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn), )
def get_v2(self, request, organization): try: params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args_v2(request, organization, params) fields = snuba_args.get('selected_columns') groupby = snuba_args.get('groupby', []) if not fields and not groupby: return Response({'detail': 'No fields or groupings provided'}, status=400) if any(field for field in groupby if field not in ALLOWED_GROUPINGS): message = ( 'Invalid groupby value requested. Allowed values are ' + ', '.join(ALLOWED_GROUPINGS)) return Response({'detail': message}, status=400) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: return Response([]) filters = snuba_args.get('filter_keys', {}) has_global_views = features.has('organizations:global-views', organization, actor=request.user) if not has_global_views and len(filters.get('project_id', [])) > 1: return Response( {'detail': 'You cannot view events from multiple projects.'}, status=400) data_fn = partial(lambda **kwargs: transform_aliases_and_query( skip_conditions=True, **kwargs)['data'], referrer='api.organization-events-v2', **snuba_args) return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results( request, organization, params['project_id'], results), )
def get(self, request, project): """ List a Project's Events ``````````````````````` Return a list of events bound to a project. Note: This endpoint is experimental and may be removed without notice. :qparam bool full: if this is set to true then the event payload will include the full event body, including the stacktrace. Set to 1 to enable. :pparam string organization_slug: the slug of the organization the groups belong to. :pparam string project_slug: the slug of the project the groups belong to. """ from sentry.api.paginator import GenericOffsetPaginator query = request.GET.get("query") conditions = [] if query: conditions.append( [["positionCaseInsensitive", ["message", "'%s'" % (query,)]], "!=", 0] ) full = request.GET.get("full", False) cols = None if full else eventstore.full_columns data_fn = partial( eventstore.get_events, additional_columns=cols, filter=eventstore.Filter(conditions=conditions, project_ids=[project.id]), referrer="api.project-events", ) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize(results, request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn), )
def get(self, request, project): """ List a Project's Events ``````````````````````` Return a list of events bound to a project. Note: This endpoint is experimental and may be removed without notice. :pparam string organization_slug: the slug of the organization the groups belong to. :pparam string project_slug: the slug of the project the groups belong to. """ from sentry.api.paginator import GenericOffsetPaginator from sentry.models import SnubaEvent from sentry.utils.snuba import raw_query query = request.GET.get('query') conditions = [] if query: conditions.append( [['positionCaseInsensitive', ['message', "'%s'" % (query, )]], '!=', 0]) full = request.GET.get('full', False) snuba_cols = SnubaEvent.minimal_columns if full else SnubaEvent.selected_columns data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], conditions=conditions, filter_keys={'project_id': [project.id]}, selected_columns=snuba_cols, orderby='-timestamp', referrer='api.project-events', ) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn))
def get(self, request, organization): if not self.has_feature(organization, request): return Response(status=404) try: params = self.get_snuba_params(request, organization) except NoProjects: return Response([]) referrer = request.GET.get("referrer") referrer = (referrer if referrer in ALLOWED_EVENTS_V2_REFERRERS else "api.organization-events-v2") def data_fn(offset, limit): return discover.query( selected_columns=request.GET.getlist("field")[:], query=request.GET.get("query"), params=params, orderby=self.get_orderby(request), offset=offset, limit=limit, referrer=referrer, auto_fields=True, auto_aggregations=True, use_aggregate_conditions=True, ) with self.handle_query_errors(): # Don't include cursor headers if the client won't be using them if request.GET.get("noPagination"): return Response( self.handle_results_with_meta( request, organization, params["project_id"], data_fn(0, self.get_per_page(request)), )) else: return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results_with_meta( request, organization, params["project_id"], results), )
def get(self, request, organization): try: snuba_args = self.get_snuba_query_args(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], selected_columns=SnubaEvent.selected_columns, orderby='-timestamp', referrer='api.organization-events', **snuba_args) return self.paginate(request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user), paginator=GenericOffsetPaginator(data_fn=data_fn))
def get(self, request: Request, organization: Organization) -> Response: if not self.has_feature(request, organization): return Response(status=404) try: params = self.get_snuba_params(request, organization) except NoProjects: return Response(status=404) serializer = SpansPerformanceSerializer(data=request.GET) if not serializer.is_valid(): return Response(serializer.errors, status=400) serialized = serializer.validated_data fields = serialized.get("field", []) query = serialized.get("query") span_ops = serialized.get("spanOp") span_groups = serialized.get("spanGroup") direction, orderby_column = self.get_orderby_column(request) def data_fn(offset: int, limit: int) -> Any: suspects = query_suspect_span_groups( params, fields, query, span_ops, span_groups, direction, orderby_column, limit, offset, ) return [suspect.serialize() for suspect in suspects] with self.handle_query_errors(): return self.paginate( request, paginator=GenericOffsetPaginator(data_fn=data_fn), default_per_page=10, max_per_page=100, )
def do_query(self, projects, request, **kwargs): requested_query = deepcopy(kwargs) selected_columns = kwargs['selected_columns'] groupby_columns = kwargs['groupby'] if 'project.name' in requested_query['selected_columns']: selected_columns.remove('project.name') if 'project.id' not in selected_columns: selected_columns.append('project.id') if 'project.name' in requested_query['groupby']: groupby_columns.remove('project.name') if 'project.id' not in groupby_columns: groupby_columns.append('project.id') for aggregation in kwargs['aggregations']: if aggregation[1] == 'project.name': aggregation[1] = 'project.id' if not kwargs['aggregations']: data_fn = partial( snuba.transform_aliases_and_query, referrer='discover', **kwargs ) return self.paginate( request=request, on_results=lambda results: self.handle_results(results, requested_query, projects), paginator=GenericOffsetPaginator(data_fn=data_fn), max_per_page=1000 ) else: snuba_results = snuba.transform_aliases_and_query( referrer='discover', **kwargs ) return Response(self.handle_results( snuba_results, requested_query, projects, ), status=200)
def get(self, request, organization): # Check for a direct hit on event ID query = request.GET.get("query", "").strip() try: direct_hit_resp = get_direct_hit_response( request, query, self.get_filter_params(request, organization), "api.organization-events", ) except (OrganizationEventsError, NoProjects): pass else: if direct_hit_resp: return direct_hit_resp full = request.GET.get("full", False) try: snuba_args = self.get_snuba_query_args_legacy( request, organization) except OrganizationEventsError as exc: return Response({"detail": exc.message}, status=400) except NoProjects: # return empty result if org doesn't have projects # or user doesn't have access to projects in org data_fn = lambda *args, **kwargs: [] else: cols = None if full else eventstore.full_columns data_fn = partial(eventstore.get_events, additional_columns=cols, referrer="api.organization-events", **snuba_args) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize(results, request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn), )
def _get_events_snuba(self, request, group, environment, query, tags): conditions = [] if query: msg_substr = [ 'positionCaseInsensitive', ['message', "'%s'" % (query, )] ] message_condition = [msg_substr, '!=', 0] if is_event_id(query): or_condition = [message_condition, ['event_id', '=', query]] conditions.append(or_condition) else: conditions.append(message_condition) if tags: conditions.extend([[u'tags[{}]'.format(k), '=', v] for (k, v) in tags.items()]) now = timezone.now() data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], start=now - timedelta(days=90), end=now, conditions=conditions, filter_keys={ 'project_id': [group.project_id], 'issue': [group.id] }, selected_columns=SnubaEvent.selected_columns + ['tags.key', 'tags.value'], orderby='-timestamp', referrer='api.group-events', ) return self.paginate(request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user), paginator=GenericOffsetPaginator(data_fn=data_fn))
def get(self, request, organization): try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) try: project_ids = self.get_project_ids(request, organization) except ValueError: return Response({'detail': 'Invalid project ids'}, status=400) environments = self.get_environments(request, organization) params = { 'start': start, 'end': end, 'project_id': project_ids, } if environments: params['environment'] = environments try: snuba_args = get_snuba_query_args(query=request.GET.get('query'), params=params) except InvalidSearchQuery as exc: return Response({'detail': exc.message}, status=400) data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], selected_columns=SnubaEvent.selected_columns, orderby='-timestamp', referrer='api.organization-events', **snuba_args) return self.paginate(request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user), paginator=GenericOffsetPaginator(data_fn=data_fn))
def get(self, request: Request, organization) -> Response: """ Returns a paginated list of members bound to a organization with a SCIM Users GET Request. """ # note that SCIM doesn't care about changing results as they're queried query_params = self.get_query_parameters(request) queryset = (OrganizationMember.objects.filter( Q(invite_status=InviteStatus.APPROVED.value), Q(user__is_active=True) | Q(user__isnull=True), organization=organization, ).select_related("user").order_by("email", "user__email")) if query_params["filter"]: queryset = queryset.filter( Q(email__iexact=query_params["filter"]) | Q(user__email__iexact=query_params["filter"]) ) # not including secondary email vals (dups, etc.) def data_fn(offset, limit): return list(queryset[offset:offset + limit]) def on_results(results): results = serialize( results, None, _scim_member_serializer_with_expansion(organization), ) return self.list_api_format(results, queryset.count(), query_params["start_index"]) return self.paginate( request=request, on_results=on_results, paginator=GenericOffsetPaginator(data_fn=data_fn), default_per_page=query_params["count"], queryset=queryset, cursor_cls=SCIMCursor, )
def get(self, request, project): """ List a Project's Events ``````````````````````` Return a list of events bound to a project. Note: This endpoint is experimental and may be removed without notice. :pparam string organization_slug: the slug of the organization the groups belong to. :pparam string project_slug: the slug of the project the groups belong to. """ from sentry.api.paginator import GenericOffsetPaginator query = request.GET.get('query') conditions = [] if query: conditions.append( [['positionCaseInsensitive', ['message', "'%s'" % (query, )]], '!=', 0]) full = request.GET.get('full', False) cols = None if full else eventstore.full_columns data_fn = partial( eventstore.get_events, conditions=conditions, filter_keys={'project_id': [project.id]}, additional_columns=cols, referrer='api.project-events', ) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate(request=request, on_results=lambda results: serialize( results, request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn))
def do_query(self, projects, request, **kwargs): requested_query = deepcopy(kwargs) selected_columns = kwargs["selected_columns"] groupby_columns = kwargs["groupby"] if "project.name" in requested_query["selected_columns"]: selected_columns.remove("project.name") if "project.id" not in selected_columns: selected_columns.append("project.id") if "project.name" in requested_query["groupby"]: groupby_columns.remove("project.name") if "project.id" not in groupby_columns: groupby_columns.append("project.id") for aggregation in kwargs["aggregations"]: if aggregation[1] == "project.name": aggregation[1] = "project.id" if not kwargs["aggregations"]: data_fn = partial(snuba.transform_aliases_and_query, referrer="discover", **kwargs) return self.paginate( request=request, on_results=lambda results: self.handle_results( results, requested_query, projects), paginator=GenericOffsetPaginator(data_fn=data_fn), max_per_page=1000, ) else: snuba_results = snuba.transform_aliases_and_query( referrer="discover", **kwargs) return Response(self.handle_results(snuba_results, requested_query, projects), status=200)
def _get_events_snuba(self, request, group, environments, query, tags, start, end): default_end = timezone.now() default_start = default_end - timedelta(days=90) params = { "group_ids": [group.id], "project_id": [group.project_id], "start": start if start else default_start, "end": end if end else default_end, } direct_hit_resp = get_direct_hit_response(request, query, params, "api.group-events") if direct_hit_resp: return direct_hit_resp if environments: params["environment"] = [env.name for env in environments] full = request.GET.get("full", False) snuba_filter = get_filter(request.GET.get("query", None), params) snuba_filter.conditions.append(["event.type", "!=", "transaction"]) snuba_cols = None if full else eventstore.full_columns data_fn = partial( eventstore.get_events, additional_columns=snuba_cols, referrer="api.group-events", filter=snuba_filter, ) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize(results, request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn), )
def _get_events_snuba(self, request, group, environments, query, tags, start, end): default_end = timezone.now() default_start = default_end - timedelta(days=90) params = { 'issue.id': [group.id], 'project_id': [group.project_id], 'start': start if start else default_start, 'end': end if end else default_end } direct_hit_resp = get_direct_hit_response(request, query, params, 'api.group-events') if direct_hit_resp: return direct_hit_resp if environments: params['environment'] = [env.name for env in environments] full = request.GET.get('full', False) snuba_args = get_snuba_query_args(request.GET.get('query', None), params) snuba_cols = SnubaEvent.minimal_columns if full else SnubaEvent.selected_columns data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], selected_columns=snuba_cols, orderby='-timestamp', referrer='api.group-events', **snuba_args) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn))
def get(self, request, organization, key): if not TAG_KEY_RE.match(key): return Response( {'detail': 'Invalid tag key format for "%s"' % (key, )}, status=400) try: filter_params = self.get_filter_params(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) query = 'tags_key:%s' % (key, ) try: snuba_args = get_snuba_query_args(query, params=filter_params) except InvalidSearchQuery as exc: return Response({'detail': exc.message}, status=400) data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], aggregations=[ ('count()', '', 'count'), ], orderby='-count', groupby=['tags_value'], referrer='api.organization-tags', **snuba_args) return self.paginate( request=request, on_results=lambda results: [{ 'value': row['tags_value'], 'count': row['count'], } for row in results], paginator=GenericOffsetPaginator(data_fn=data_fn), )
def get(self, request: Request, organization: Organization) -> Response: if not self.has_feature(request, organization): return Response(status=404) try: params = self.get_snuba_params(request, organization) except NoProjects: return Response(status=404) query = request.GET.get("query") def data_fn(offset: int, limit: int) -> Any: builder = QueryBuilder( dataset=Dataset.Discover, params=params, selected_columns=["spans_op", "count()"], array_join="spans_op", query=query, limit=limit, offset=offset, orderby="-count", ) snql_query = builder.get_snql_query() results = raw_snql_query(snql_query, "api.organization-events-span-ops") return [ SpanOp(op=row["spans_op"], count=row["count"]) for row in results["data"] ] with self.handle_query_errors(): return self.paginate( request, paginator=GenericOffsetPaginator(data_fn=data_fn), default_per_page=20, max_per_page=20, )
def _get_events_snuba(self, request, group, environments, query, tags, start, end): default_end = timezone.now() default_start = default_end - timedelta(days=90) params = { "group_ids": [group.id], "project_id": [group.project_id], "organization_id": group.project.organization_id, "start": start if start else default_start, "end": end if end else default_end, } direct_hit_resp = get_direct_hit_response(request, query, params, "api.group-events") if direct_hit_resp: return direct_hit_resp if environments: params["environment"] = [env.name for env in environments] full = request.GET.get("full", False) try: snuba_filter = get_filter(request.GET.get("query", None), params) except InvalidSearchQuery as e: raise ParseError(detail=six.text_type(e)) snuba_filter.conditions.append(["event.type", "!=", "transaction"]) data_fn = partial(eventstore.get_events, referrer="api.group-events", filter=snuba_filter) serializer = EventSerializer() if full else SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize(results, request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn), )
def get(self, request, organization): if not features.has("organizations:discover-basic", organization, actor=request.user): return Response(status=404) try: params = self.get_filter_params(request, organization) except OrganizationEventsError as exc: raise ParseError(detail=six.text_type(exc)) except NoProjects: return Response([]) params["organization_id"] = organization.id has_global_views = features.has("organizations:global-views", organization, actor=request.user) if not has_global_views and len(params.get("project_id", [])) > 1: raise ParseError( detail="You cannot view events from multiple projects.") def data_fn(offset, limit): return discover.query( selected_columns=request.GET.getlist("field")[:], query=request.GET.get("query"), params=params, reference_event=self.reference_event(request, organization, params.get("start"), params.get("end")), orderby=self.get_orderby(request), offset=offset, limit=limit, referrer="api.organization-events-v2", auto_fields=True, use_aggregate_conditions=True, ) try: return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results_with_meta( request, organization, params["project_id"], results), ) except discover.InvalidSearchQuery as error: raise ParseError(detail=six.text_type(error)) except (snuba.SnubaError, snuba.QueryOutsideRetentionError) as error: logger.info( "organization.events.snuba-error", extra={ "organization_id": organization.id, "user_id": request.user.id, "error": six.text_type(error), }, ) message = "Internal error. Please try again." if isinstance(error, snuba.QueryIllegalTypeOfArgument): message = "Invalid query. Argument to function is wrong type." elif isinstance(error, snuba.QueryOutsideRetentionError): message = "Invalid date range. Please try a more recent date range." elif isinstance( error, ( snuba.RateLimitExceeded, snuba.QueryMemoryLimitExceeded, snuba.QueryTooManySimultaneous, ), ): message = "Query timeout. Please try again. If the problem persists try a smaller date range or fewer projects." elif isinstance( error, ( snuba.UnqualifiedQueryError, snuba.QueryExecutionError, snuba.SchemaValidationError, ), ): message = "Invalid query." raise ParseError(detail=message)
def get(self, request, organization): if not features.has("organizations:discover-basic", organization, actor=request.user): return Response(status=404) with sentry_sdk.start_span(op="discover.endpoint", description="filter_params") as span: span.set_tag("organization", organization) try: params = self.get_filter_params(request, organization) except NoProjects: return Response([]) params = self.quantize_date_params(request, params) has_global_views = features.has("organizations:global-views", organization, actor=request.user) if not has_global_views and len(params.get("project_id", [])) > 1: raise ParseError( detail="You cannot view events from multiple projects.") def data_fn(offset, limit): return discover.query( selected_columns=request.GET.getlist("field")[:], query=request.GET.get("query"), params=params, reference_event=self.reference_event(request, organization, params.get("start"), params.get("end")), orderby=self.get_orderby(request), offset=offset, limit=limit, referrer=request.GET.get("referrer", "api.organization-events-v2"), auto_fields=True, use_aggregate_conditions=True, ) try: return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results_with_meta( request, organization, params["project_id"], results), ) except (discover.InvalidSearchQuery, snuba.QueryOutsideRetentionError) as error: raise ParseError(detail=six.text_type(error)) except snuba.QueryIllegalTypeOfArgument: raise ParseError( detail="Invalid query. Argument to function is wrong type.") except snuba.SnubaError as error: message = "Internal error. Please try again." if isinstance( error, ( snuba.RateLimitExceeded, snuba.QueryMemoryLimitExceeded, snuba.QueryTooManySimultaneous, ), ): message = "Query timeout. Please try again. If the problem persists try a smaller date range or fewer projects." elif isinstance( error, ( snuba.UnqualifiedQueryError, snuba.QueryExecutionError, snuba.SchemaValidationError, ), ): sentry_sdk.capture_exception(error) message = "Internal error. Your query failed to run." raise ParseError(detail=message)
def get(self, request, id: str, group: Group): """ Retrieve information about a particular grouping level, including a list of issues it would create. ``` GET /api/0/issues/<group_id>/grouping/levels/<level_id>/new-issues/ [ {"hash": "...", "latestEvent": ..., "eventCount": 132}, ... ] ``` Available level IDs can be fetched from `GroupingLevelsEndpoint`. Each row/array item corresponds to one *new issue* that selecting this level would create in place of the *affected issues*. The array items are not groups, but groups that will be created, therefore a lot of information normally available for groups is missing. - `latestEvent`: a sample event in the same format returned by the event details endpoint(s). - `hash`: The grouping hash, probably insignificant to the user but can be shown for diagnostic purposes. - `eventCount`: How many events this issue would contain. Note that like with any other event count, this number can change all the time because events keep coming in. The "would-be issues" are returned in-order such that the most recently seen "issue" is at the top, i.e. it is sorted in descending order of `latestEvent.dateCreated`. The *affected issue* (=to-be-deleted issue) is often just the current one, however if the previewed grouping level is reduced, this endpoint can return a list of entries which together have more events than the current issue (meaning issues will be merged together). In the future there will be an endpoint that allows you to fetch the list of affected issues. For now the UI should simply show a warning if the level is decreased (and possibly only if the summed up events of the new issues are more than what the current issue has). """ check_feature(group.project.organization, request) parsed_id = int(id) def data_fn(offset=None, limit=None): return _query_snuba(group, parsed_id, offset=offset, limit=limit) def on_results(results): return _process_snuba_results(results, group, parsed_id, request.user) return self.paginate( request=request, on_results=on_results, paginator=GenericOffsetPaginator(data_fn=data_fn), )
def get(self, request, organization): if not self.has_feature(organization, request): return Response(status=404) with sentry_sdk.start_span(op="discover.endpoint", description="filter_params") as span: span.set_tag("organization", organization) try: params = self.get_filter_params(request, organization) except NoProjects: return Response([]) params = self.quantize_date_params(request, params) has_global_views = features.has("organizations:global-views", organization, actor=request.user) if not has_global_views and len(params.get("project_id", [])) > 1: raise ParseError( detail="You cannot view events from multiple projects.") middle = params["start"] + timedelta( seconds=(params["end"] - params["start"]).total_seconds() * 0.5) start, middle, end = ( datetime.strftime(params["start"], DateArg.date_format), datetime.strftime(middle, DateArg.date_format), datetime.strftime(params["end"], DateArg.date_format), ) trend_function = request.GET.get("trendFunction", "p50()") function, columns = parse_function(trend_function) trend_column = self.trend_columns.get(function) if trend_column is None: raise ParseError(detail=u"{} is not a supported trend function". format(trend_function)) count_column = self.trend_columns.get("count_range") percentage_column = self.trend_columns["percentage"] selected_columns = request.GET.getlist("field")[:] query = request.GET.get("query") orderby = self.get_orderby(request) def data_fn(offset, limit): return discover.query( selected_columns=selected_columns + [ trend_column["format"].format( *columns, start=start, end=middle, index="1"), trend_column["format"].format( *columns, start=middle, end=end, index="2"), percentage_column["format"].format( alias=trend_column["alias"]), "minus({alias}2,{alias}1)".format( alias=trend_column["alias"]), count_column["format"].format( start=start, end=middle, index="1"), count_column["format"].format( start=middle, end=end, index="2"), percentage_column["format"].format( alias=count_column["alias"]), "absolute_correlation()", ], query=query, params=params, orderby=orderby, offset=offset, limit=limit, referrer="api.trends.get-percentage-change", auto_fields=True, use_aggregate_conditions=True, ) def on_results(events_results): def get_event_stats(query_columns, query, params, rollup, reference_event): return discover.top_events_timeseries( query_columns, selected_columns, query, params, orderby, rollup, min(5, len(events_results["data"])), organization, top_events=events_results, referrer="api.trends.get-event-stats", ) stats_results = (self.get_event_stats_data( request, organization, get_event_stats, top_events=True, query_column=trend_function, ) if len(events_results["data"]) > 0 else {}) return { "events": self.handle_results_with_meta(request, organization, params["project_id"], events_results), "stats": stats_results, } with self.handle_query_errors(): return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=on_results, default_per_page=5, max_per_page=5, )
def get(self, request, organization): try: params, aggregate_column, filter_query = self.setup( request, organization) except NoProjects: return Response([]) all_tag_keys = None tag_key = None if self.has_tag_page_feature(organization, request): all_tag_keys = request.GET.get("allTagKeys") tag_key = request.GET.get("tagKey") def data_fn(offset, limit): with sentry_sdk.start_span(op="discover.endpoint", description="discover_query"): referrer = "api.organization-events-facets-performance.top-tags" tag_data = query_tag_data( filter_query=filter_query, aggregate_column=aggregate_column, referrer=referrer, params=params, ) if not tag_data: return {"data": []} results = query_facet_performance( tag_data=tag_data, filter_query=filter_query, aggregate_column=aggregate_column, referrer=referrer, orderby=self.get_orderby(request), limit=limit, offset=offset, params=params, all_tag_keys=all_tag_keys, tag_key=tag_key, ) if not results: return {"data": []} for row in results["data"]: row["tags_value"] = tagstore.get_tag_value_label( row["tags_key"], row["tags_value"]) row["tags_key"] = tagstore.get_standardized_key( row["tags_key"]) return results with self.handle_query_errors(): return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results_with_meta( request, organization, params["project_id"], results), default_per_page=5, max_per_page=20, )
def get(self, request, organization): """ List saved queries for organization """ if not self.has_feature(organization, request): return self.respond(status=404) queryset = (DiscoverSavedQuery.objects.filter( organization=organization).select_related( "created_by").prefetch_related("projects").extra( select={"lower_name": "lower(name)"})) query = request.query_params.get("query") if query: tokens = tokenize_query(query) for key, value in six.iteritems(tokens): if key == "name" or key == "query": value = " ".join(value) queryset = queryset.filter(name__icontains=value) elif key == "version": value = " ".join(value) queryset = queryset.filter(version=value) else: queryset = queryset.none() sort_by = request.query_params.get("sortBy") if sort_by in ("name", "-name"): order_by = [ "-lower_name" if sort_by.startswith("-") else "lower_name", "-date_created", ] elif sort_by in ("dateCreated", "-dateCreated"): order_by = "-date_created" if sort_by.startswith( "-") else "date_created" elif sort_by in ("dateUpdated", "-dateUpdated"): order_by = "-date_updated" if sort_by.startswith( "-") else "date_updated" elif sort_by == "myqueries": order_by = [ Case(When(created_by_id=request.user.id, then=-1), default="created_by_id"), "-date_created", ] else: order_by = "lower_name" if not isinstance(order_by, list): order_by = [order_by] queryset = queryset.order_by(*order_by) # Old discover expects all queries and uses this parameter. if request.query_params.get("all") == "1": saved_queries = list(queryset.all()) return Response(serialize(saved_queries), status=200) def data_fn(offset, limit): return list(queryset[offset:offset + limit]) return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda x: serialize(x, request.user), default_per_page=25, )
def get(self, request, organization): if not self.has_feature(organization, request): return Response(status=404) try: params = self.get_snuba_params(request, organization) except NoProjects: return Response([]) with sentry_sdk.start_span(op="discover.endpoint", description="trend_dates"): middle_date = request.GET.get("middle") if middle_date: try: middle = parse_datetime_string(middle_date) except InvalidQuery: raise ParseError(detail="{} is not a valid date format".format(middle_date)) if middle <= params["start"] or middle >= params["end"]: raise ParseError( detail="The middle date should be within the duration of the query" ) else: middle = params["start"] + timedelta( seconds=(params["end"] - params["start"]).total_seconds() * 0.5 ) start, middle, end = ( datetime.strftime(params["start"], DateArg.date_format), datetime.strftime(middle, DateArg.date_format), datetime.strftime(params["end"], DateArg.date_format), ) trend_type = request.GET.get("trendType", REGRESSION) if trend_type not in TREND_TYPES: raise ParseError(detail=u"{} is not a supported trend type".format(trend_type)) params["aliases"] = self.get_function_aliases(trend_type) trend_function = request.GET.get("trendFunction", "p50()") function, columns = parse_function(trend_function) trend_columns = self.get_trend_columns(function, columns, start, middle, end) selected_columns = request.GET.getlist("field")[:] orderby = self.get_orderby(request) query = request.GET.get("query") def data_fn(offset, limit): return discover.query( selected_columns=selected_columns + trend_columns, query=query, params=params, orderby=orderby, offset=offset, limit=limit, referrer="api.trends.get-percentage-change", auto_fields=True, auto_aggregations=True, use_aggregate_conditions=True, ) with self.handle_query_errors(): return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=self.build_result_handler( request, organization, params, trend_function, selected_columns, orderby, query ), default_per_page=5, max_per_page=5, )