def serialize(self, obj, attrs, user): incident = obj.incident event_stats = None if obj.event_stats_snapshot: serializer = SnubaTSResultSerializer( obj.incident.organization, None, user, ) event_stats = serializer.serialize( SnubaTSResult( obj.event_stats_snapshot.snuba_values, obj.event_stats_snapshot.start, obj.event_stats_snapshot.end, obj.event_stats_snapshot.period, )) return { 'id': six.text_type(obj.id), 'incidentIdentifier': six.text_type(incident.identifier), 'user': attrs['user'], 'type': obj.type, 'value': obj.value, 'previousValue': obj.previous_value, 'comment': obj.comment, 'eventStats': event_stats, 'dateCreated': obj.date_added, }
def get(self, request, organization): try: snuba_args = self.get_snuba_query_args(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) interval = parse_stats_period(request.GET.get('interval', '1h')) if interval is None: interval = timedelta(hours=1) rollup = int(interval.total_seconds()) result = raw_query(aggregations=[ ('count()', '', 'count'), ], orderby='time', groupby=['time'], rollup=rollup, referrer='api.organization-events-stats', **snuba_args) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( SnubaTSResult(result, snuba_args['start'], snuba_args['end'], rollup), ), status=200, )
def get(self, request, organization): if not features.has( "organizations:events-v2", organization, actor=request.user): return self.get_v1_results(request, organization) try: column = request.GET.get("yAxis", "count()") # Backwards compatibility for incidents which uses the old # column aliases as it straddles both versions of events/discover. # We will need these aliases until discover2 flags are enabled for all # users. if column == "user_count": column = "count_unique(user)" elif column == "event_count": column = "count()" params = self.get_filter_params(request, organization) result = discover.timeseries_query( selected_columns=[column], query=request.GET.get("query"), params=params, rollup=self.get_rollup(request), reference_event=self.reference_event(request, organization), referrer="api.organization-event-stats", ) except InvalidSearchQuery as err: raise ParseError(detail=six.text_type(err)) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response(serializer.serialize(result), status=200)
def get_v1_results(self, request, organization): try: snuba_args = self.get_snuba_query_args_legacy( request, organization) except (OrganizationEventsError, InvalidSearchQuery) as exc: raise ParseError(detail=six.text_type(exc)) except NoProjects: return Response({"data": []}) rollup = self.get_rollup(request) snuba_args = self.get_field(request, snuba_args) result = snuba.transform_aliases_and_query( aggregations=snuba_args.get("aggregations"), conditions=snuba_args.get("conditions"), filter_keys=snuba_args.get("filter_keys"), start=snuba_args.get("start"), end=snuba_args.get("end"), orderby="time", groupby=["time"], rollup=rollup, referrer="api.organization-events-stats", limit=10000, ) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( snuba.SnubaTSResult(result, snuba_args["start"], snuba_args["end"], rollup)), status=200, )
def get(self, request, organization): try: snuba_args = self.get_snuba_query_args(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: return Response({'data': []}) interval = parse_stats_period(request.GET.get('interval', '1h')) if interval is None: interval = timedelta(hours=1) rollup = int(interval.total_seconds()) result = raw_query( aggregations=[ ('count()', '', 'count'), ], orderby='time', groupby=['time'], rollup=rollup, referrer='api.organization-events-stats', limit=10000, **snuba_args ) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( SnubaTSResult(result, snuba_args['start'], snuba_args['end'], rollup), ), status=200, )
def serialize(self, obj, attrs, user): incident = obj.incident event_stats = None if obj.event_stats_snapshot: serializer = SnubaTSResultSerializer(obj.incident.organization, None, user) event_stats = serializer.serialize( SnubaTSResult( obj.event_stats_snapshot.snuba_values, obj.event_stats_snapshot.start, obj.event_stats_snapshot.end, obj.event_stats_snapshot.period, ) ) return { "id": six.text_type(obj.id), "incidentIdentifier": six.text_type(incident.identifier), "user": attrs["user"], "type": obj.type, "value": obj.value, "previousValue": obj.previous_value, "comment": obj.comment, "eventStats": event_stats, "dateCreated": obj.date_added, }
def get_event_stats_data(self, request, organization, get_event_stats, top_events=False): try: columns = request.GET.getlist("yAxis", ["count()"]) query = request.GET.get("query") params = self.get_filter_params(request, organization) rollup = get_rollup_from_request( request, params, "1h", InvalidSearchQuery( "Your interval and date range would create too many results. " "Use a larger interval, or a smaller date range."), ) # Backwards compatibility for incidents which uses the old # column aliases as it straddles both versions of events/discover. # We will need these aliases until discover2 flags are enabled for all # users. column_map = { "user_count": "count_unique(user)", "event_count": "count()", "rpm()": "rpm(%d)" % rollup, "rps()": "rps(%d)" % rollup, } query_columns = [ column_map.get(column, column) for column in columns ] reference_event = self.reference_event(request, organization, params.get("start"), params.get("end")) result = get_event_stats(query_columns, query, params, rollup, reference_event) except (discover.InvalidSearchQuery, snuba.QueryOutsideRetentionError) as error: raise ParseError(detail=six.text_type(error)) serializer = SnubaTSResultSerializer(organization, None, request.user) if top_events: results = {} for key, event_result in six.iteritems(result): if len(query_columns) > 1: results[key] = self.serialize_multiple_axis( serializer, event_result, columns, query_columns) else: # Need to get function alias if count is a field, but not the axis results[key] = serializer.serialize( event_result, get_function_alias(query_columns[0])) return results elif len(query_columns) > 1: return self.serialize_multiple_axis(serializer, result, columns, query_columns) else: return serializer.serialize(result)
def get(self, request, organization): """ Returns a time series view over statsPeriod over interval. """ try: lookup = SnubaLookup.get(request.GET['tag']) except KeyError: raise ResourceDoesNotExist stats_period = parse_stats_period(request.GET.get( 'statsPeriod', '24h')) if stats_period is None or stats_period < self.MIN_STATS_PERIOD or stats_period >= self.MAX_STATS_PERIOD: return Response({'detail': 'Invalid statsPeriod'}, status=400) interval = parse_stats_period(request.GET.get('interval', '1h')) if interval is None: interval = timedelta(hours=1) try: project_ids = self.get_project_ids(request, organization) except ValueError: return Response({'detail': 'Invalid project ids'}, status=400) if not project_ids: return self.empty() environment = self.get_environment(request, organization) query_condition = self.get_query_condition(request, organization) end = timezone.now() start = end - stats_period rollup = int(interval.total_seconds()) data = query( end=end, start=start, rollup=rollup, selected_columns=lookup.selected_columns, aggregations=[ ('count()', '', 'count'), ], filter_keys={ 'project_id': project_ids, }, conditions=lookup.conditions + query_condition + environment, groupby=['time'] + lookup.columns, orderby='time', ) serializer = SnubaTSResultSerializer(organization, lookup, request.user) return Response( serializer.serialize(SnubaTSResult(data, start, end, rollup), ), status=200, )
def get(self, request, organization): """ Returns a time series view over statsPeriod over interval. """ try: lookup = SnubaLookup.get(request.GET['tag']) except KeyError: raise ResourceDoesNotExist try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) interval = parse_stats_period(request.GET.get('interval', '1h')) if interval is None: interval = timedelta(hours=1) try: project_ids = self.get_project_ids(request, organization) except ValueError: return Response({'detail': 'Invalid project ids'}, status=400) if not project_ids: return self.empty() environment = self.get_environment(request, organization) query_condition = self.get_query_condition(request, organization) rollup = int(interval.total_seconds()) data = query( end=end, start=start, rollup=rollup, selected_columns=lookup.selected_columns, aggregations=[ ('count()', '', 'count'), ], filter_keys={ 'project_id': project_ids, }, conditions=lookup.conditions + query_condition + environment, groupby=['time'] + lookup.columns, orderby='time', ) serializer = SnubaTSResultSerializer(organization, lookup, request.user) return Response( serializer.serialize( SnubaTSResult(data, start, end, rollup), ), status=200, )
def get(self, request, organization): """ Returns a time series view over statsPeriod over interval. """ try: lookup = SnubaLookup.get(request.GET['tag']) except KeyError: raise ResourceDoesNotExist try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) interval = parse_stats_period(request.GET.get('interval', '1h')) if interval is None: interval = timedelta(hours=1) try: project_ids = self.get_project_ids(request, organization) except ValueError: return Response({'detail': 'Invalid project ids'}, status=400) if not project_ids: return self.empty() environment_conditions = self.get_environments(request, organization) query_condition = self.get_query_condition(request, organization) rollup = int(interval.total_seconds()) data = query( end=end, start=start, rollup=rollup, selected_columns=lookup.selected_columns, aggregations=[ ('count()', '', 'count'), ], filter_keys={'project_id': project_ids}, conditions=lookup.conditions + query_condition + environment_conditions, groupby=['time'] + lookup.columns, orderby='time', ) serializer = SnubaTSResultSerializer(organization, lookup, request.user) return Response( serializer.serialize(SnubaTSResult(data, start, end, rollup), ), status=200, )
def get(self, request, organization): if not features.has("organizations:discover-basic", organization, actor=request.user): return self.get_v1_results(request, organization) try: columns = request.GET.getlist("yAxis", ["count()"]) params = self.get_filter_params(request, organization) rollup = self.get_rollup(request, params) # Backwards compatibility for incidents which uses the old # column aliases as it straddles both versions of events/discover. # We will need these aliases until discover2 flags are enabled for all # users. column_map = { "user_count": "count_unique(user)", "event_count": "count()", "rpm()": "rpm(%d)" % rollup, "rps()": "rps(%d)" % rollup, } query_columns = [ column_map.get(column, column) for column in columns ] result = discover.timeseries_query( selected_columns=query_columns, query=request.GET.get("query"), params=params, rollup=rollup, reference_event=self.reference_event(request, organization, params.get("start"), params.get("end")), referrer="api.organization-event-stats", ) except InvalidSearchQuery as err: raise ParseError(detail=six.text_type(err)) serializer = SnubaTSResultSerializer(organization, None, request.user) if len(columns) > 1: # Return with requested yAxis as the key data = { column: serializer.serialize( result, get_aggregate_alias( AGGREGATE_PATTERN.search(query_column))) for column, query_column in zip(columns, query_columns) } else: data = serializer.serialize(result) return Response(data, status=200)
def get(self, request, organization, incident): """ Fetch total event counts, unique user counts and trend graph for an Incident. `````````````````` :auth: required """ stats = bulk_get_incident_stats([incident], windowed_stats=True)[0] event_stats_serializer = SnubaTSResultSerializer( organization, None, request.user) results = { "eventStats": event_stats_serializer.serialize(stats["event_stats"]), "totalEvents": stats["total_events"], "uniqueUsers": stats["unique_users"], } return Response(results)
def serialize(self, obj, attrs, user): serializer = SnubaTSResultSerializer(obj.organization, None, user) aggregates = attrs['aggregates'] return { 'id': six.text_type(obj.id), 'identifier': six.text_type(obj.identifier), 'organizationId': six.text_type(obj.organization_id), 'projects': attrs['projects'], 'status': obj.status, 'title': obj.title, 'query': obj.query, 'dateStarted': obj.date_started, 'dateDetected': obj.date_detected, 'dateAdded': obj.date_added, 'dateClosed': obj.date_closed, 'eventStats': serializer.serialize(attrs['event_stats']), 'totalEvents': aggregates['count'], 'uniqueUsers': aggregates['unique_users'], }
def get(self, request, organization): if not features.has( "organizations:events-v2", organization, actor=request.user): return self.get_v1_results(request, organization) try: params = self.get_filter_params(request, organization) result = discover.timeseries_query( selected_columns=[request.GET.get("yAxis", "count()")], query=request.GET.get("query"), params=params, rollup=self.get_rollup(request), reference_event=self.reference_event(request, organization), referrer="api.organization-event-stats", ) except InvalidSearchQuery as err: raise ParseError(detail=six.text_type(err)) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response(serializer.serialize(result), status=200)
def serialize(self, obj, attrs, user): serializer = SnubaTSResultSerializer(obj.organization, None, user) aggregates = attrs['aggregates'] return { 'id': six.text_type(obj.id), 'identifier': obj.identifier, 'organizationId': six.text_type(obj.organization_id), 'projects': attrs['projects'], 'status': obj.status, 'title': obj.title, 'query': obj.query, 'dateStarted': obj.date_started, 'dateDetected': obj.date_detected, 'dateAdded': obj.date_added, 'dateClosed': obj.date_closed, 'eventStats': serializer.serialize(attrs['event_stats']), 'totalEvents': aggregates['count'], 'uniqueUsers': aggregates['unique_users'], }
def get(self, request, organization): try: snuba_args = self.get_snuba_query_args(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: return Response({'data': []}) interval = parse_stats_period(request.GET.get('interval', '1h')) if interval is None: interval = timedelta(hours=1) rollup = int(interval.total_seconds()) y_axis = request.GET.get('yAxis', None) if not y_axis or y_axis == 'event_count': aggregations = [('count()', '', 'count')] elif y_axis == 'user_count': aggregations = [ ('uniq', 'tags[sentry:user]', 'count'), ] snuba_args['filter_keys']['tags_key'] = ['sentry:user'] else: return Response( {'detail': 'Param yAxis value %s not recognized.' % y_axis}, status=400) result = raw_query( aggregations=aggregations, orderby='time', groupby=['time'], rollup=rollup, referrer='api.organization-events-stats', limit=10000, **snuba_args ) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( SnubaTSResult(result, snuba_args['start'], snuba_args['end'], rollup), ), status=200, )
def serialize(self, obj, attrs, user): serializer = SnubaTSResultSerializer(obj.organization, None, user) return { "id": six.text_type(obj.id), "identifier": six.text_type(obj.identifier), "organizationId": six.text_type(obj.organization_id), "projects": attrs["projects"], "status": obj.status, "type": obj.type, "title": obj.title, "query": obj.query, "dateStarted": obj.date_started, "dateDetected": obj.date_detected, "dateAdded": obj.date_added, "dateClosed": obj.date_closed, "eventStats": serializer.serialize(attrs["event_stats"]), "totalEvents": attrs["total_events"], "uniqueUsers": attrs["unique_users"], }
def get(self, request, organization): try: if features.has("organizations:events-v2", organization, actor=request.user): params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args(request, organization, params) else: snuba_args = self.get_snuba_query_args_legacy( request, organization) except (OrganizationEventsError, InvalidSearchQuery) as exc: raise ParseError(detail=six.text_type(exc)) except NoProjects: return Response({"data": []}) interval = parse_stats_period(request.GET.get("interval", "1h")) if interval is None: interval = timedelta(hours=1) rollup = int(interval.total_seconds()) snuba_args = self.get_field(request, snuba_args) result = snuba.transform_aliases_and_query( skip_conditions=True, aggregations=snuba_args.get("aggregations"), conditions=snuba_args.get("conditions"), filter_keys=snuba_args.get("filter_keys"), start=snuba_args.get("start"), end=snuba_args.get("end"), orderby="time", groupby=["time"], rollup=rollup, referrer="api.organization-events-stats", limit=10000, ) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( snuba.SnubaTSResult(result, snuba_args["start"], snuba_args["end"], rollup)), status=200, )
def get_event_stats_data(self, request, organization, get_event_stats): try: columns = request.GET.getlist("yAxis", ["count()"]) query = request.GET.get("query") params = self.get_filter_params(request, organization) rollup = get_rollup_from_request( request, params, "1h", InvalidSearchQuery( "Your interval and date range would create too many results. " "Use a larger interval, or a smaller date range." ), ) # Backwards compatibility for incidents which uses the old # column aliases as it straddles both versions of events/discover. # We will need these aliases until discover2 flags are enabled for all # users. column_map = { "user_count": "count_unique(user)", "event_count": "count()", "rpm()": "rpm(%d)" % rollup, "rps()": "rps(%d)" % rollup, } query_columns = [column_map.get(column, column) for column in columns] reference_event = self.reference_event( request, organization, params.get("start"), params.get("end") ) result = get_event_stats(query_columns, query, params, rollup, reference_event) except InvalidSearchQuery as err: raise ParseError(detail=six.text_type(err)) serializer = SnubaTSResultSerializer(organization, None, request.user) if len(columns) > 1: # Return with requested yAxis as the key return { column: serializer.serialize(result, get_function_alias(query_column)) for column, query_column in zip(columns, query_columns) } else: return serializer.serialize(result)
def get(self, request, organization): try: snuba_args = self.get_snuba_query_args_legacy(request, organization) except OrganizationEventsError as exc: return Response({"detail": exc.message}, status=400) except NoProjects: return Response({"data": []}) interval = parse_stats_period(request.GET.get("interval", "1h")) if interval is None: interval = timedelta(hours=1) rollup = int(interval.total_seconds()) y_axis = request.GET.get("yAxis", None) if not y_axis or y_axis == "event_count": aggregations = [("count()", "", "count")] elif y_axis == "user_count": aggregations = [("uniq", "tags[sentry:user]", "count")] snuba_args["filter_keys"]["tags_key"] = ["sentry:user"] else: return Response({"detail": "Param yAxis value %s not recognized." % y_axis}, status=400) result = raw_query( aggregations=aggregations, orderby="time", groupby=["time"], rollup=rollup, referrer="api.organization-events-stats", limit=10000, **snuba_args ) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( SnubaTSResult(result, snuba_args["start"], snuba_args["end"], rollup) ), status=200, )
def get_v1_results(self, request, organization): try: snuba_args = self.get_snuba_query_args_legacy( request, organization) except InvalidSearchQuery as exc: raise ParseError(detail=str(exc)) except NoProjects: return Response({"data": []}) snuba_args = self.get_field(request, snuba_args) rollup = get_rollup_from_request( request, snuba_args, default_interval=None, error=InvalidSearchQuery( "Your interval and date range would create too many results. " "Use a larger interval, or a smaller date range."), ) result = transform_aliases_and_query( aggregations=snuba_args.get("aggregations"), conditions=snuba_args.get("conditions"), filter_keys=snuba_args.get("filter_keys"), start=snuba_args.get("start"), end=snuba_args.get("end"), orderby="time", groupby=["time"], rollup=rollup, referrer="api.organization-events-stats", limit=10000, ) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( snuba.SnubaTSResult(result, snuba_args["start"], snuba_args["end"], rollup)), status=200, )
def get_event_stats_data( self, request: Request, organization: Organization, get_event_stats: Callable[ [Sequence[str], str, Dict[str, str], int, bool, Optional[timedelta]], SnubaTSResult ], top_events: int = 0, query_column: str = "count()", params: Optional[Dict[str, Any]] = None, query: Optional[str] = None, allow_partial_buckets: bool = False, zerofill_results: bool = True, comparison_delta: Optional[timedelta] = None, ) -> Dict[str, Any]: with self.handle_query_errors(): with sentry_sdk.start_span( op="discover.endpoint", description="base.stats_query_creation" ): columns = request.GET.getlist("yAxis", [query_column]) if query is None: query = request.GET.get("query") if params is None: try: # events-stats is still used by events v1 which doesn't require global views params = self.get_snuba_params( request, organization, check_global_views=False ) except NoProjects: return {"data": []} try: rollup = get_rollup_from_request( request, params, default_interval=None, error=InvalidSearchQuery(), top_events=top_events, ) # If the user sends an invalid interval, use the default instead except InvalidSearchQuery: sentry_sdk.set_tag("user.invalid_interval", request.GET.get("interval")) date_range = params["end"] - params["start"] stats_period = parse_stats_period(get_interval_from_range(date_range, False)) rollup = int(stats_period.total_seconds()) if stats_period is not None else 3600 if comparison_delta is not None: retention = quotas.get_event_retention(organization=organization) comparison_start = params["start"] - comparison_delta if retention and comparison_start < timezone.now() - timedelta(days=retention): raise ValidationError("Comparison period is outside your retention window") # Backwards compatibility for incidents which uses the old # column aliases as it straddles both versions of events/discover. # We will need these aliases until discover2 flags are enabled for all # users. # We need these rollup columns to generate correct events-stats results column_map = { "user_count": "count_unique(user)", "event_count": "count()", "epm()": "epm(%d)" % rollup, "eps()": "eps(%d)" % rollup, "tpm()": "tpm(%d)" % rollup, "tps()": "tps(%d)" % rollup, } query_columns = [column_map.get(column, column) for column in columns] with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_query"): result = get_event_stats( query_columns, query, params, rollup, zerofill_results, comparison_delta ) serializer = SnubaTSResultSerializer(organization, None, request.user) with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_serialization"): # When the request is for top_events, result can be a SnubaTSResult in the event that # there were no top events found. In this case, result contains a zerofilled series # that acts as a placeholder. is_multiple_axis = len(query_columns) > 1 if top_events > 0 and isinstance(result, dict): results = {} for key, event_result in result.items(): if is_multiple_axis: results[key] = self.serialize_multiple_axis( serializer, event_result, columns, query_columns, allow_partial_buckets, zerofill_results=zerofill_results, ) else: # Need to get function alias if count is a field, but not the axis results[key] = serializer.serialize( event_result, column=resolve_axis_column(query_columns[0]), allow_partial_buckets=allow_partial_buckets, zerofill_results=zerofill_results, ) serialized_result = results elif is_multiple_axis: serialized_result = self.serialize_multiple_axis( serializer, result, columns, query_columns, allow_partial_buckets, zerofill_results=zerofill_results, ) else: extra_columns = None if comparison_delta: extra_columns = ["comparisonCount"] serialized_result = serializer.serialize( result, resolve_axis_column(query_columns[0]), allow_partial_buckets=allow_partial_buckets, zerofill_results=zerofill_results, extra_columns=extra_columns, ) return serialized_result
def get_event_stats_data( self, request, organization, get_event_stats, top_events=False, query_column="count()", params=None, query=None, ): with self.handle_query_errors(): with sentry_sdk.start_span( op="discover.endpoint", description="base.stats_query_creation"): columns = request.GET.getlist("yAxis", [query_column]) if query is None: query = request.GET.get("query") if params is None: try: # events-stats is still used by events v1 which doesn't require global views params = self.get_snuba_params( request, organization, check_global_views=False) except NoProjects: return {"data": []} rollup = get_rollup_from_request( request, params, "1h", InvalidSearchQuery( "Your interval and date range would create too many results. " "Use a larger interval, or a smaller date range."), ) # Backwards compatibility for incidents which uses the old # column aliases as it straddles both versions of events/discover. # We will need these aliases until discover2 flags are enabled for all # users. column_map = { "user_count": "count_unique(user)", "event_count": "count()", "epm()": "epm(%d)" % rollup, "eps()": "eps(%d)" % rollup, } query_columns = [ column_map.get(column, column) for column in columns ] with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_query"): result = get_event_stats(query_columns, query, params, rollup) serializer = SnubaTSResultSerializer(organization, None, request.user) with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_serialization"): if top_events: results = {} for key, event_result in six.iteritems(result): if len(query_columns) > 1: results[key] = self.serialize_multiple_axis( serializer, event_result, columns, query_columns) else: # Need to get function alias if count is a field, but not the axis results[key] = serializer.serialize( event_result, column=get_function_alias(query_columns[0])) return results elif len(query_columns) > 1: return self.serialize_multiple_axis(serializer, result, columns, query_columns) else: return serializer.serialize(result)
def get_event_stats_data( self, request, organization, get_event_stats, top_events=0, query_column="count()", params=None, query=None, allow_partial_buckets=False, ): with self.handle_query_errors(): with sentry_sdk.start_span( op="discover.endpoint", description="base.stats_query_creation"): columns = request.GET.getlist("yAxis", [query_column]) if query is None: query = request.GET.get("query") if params is None: try: # events-stats is still used by events v1 which doesn't require global views params = self.get_snuba_params( request, organization, check_global_views=False) except NoProjects: return {"data": []} rollup = get_rollup_from_request( request, params, default_interval=None, error=InvalidSearchQuery( "Your interval and date range would create too many results. " "Use a larger interval, or a smaller date range."), top_events=top_events, ) # Backwards compatibility for incidents which uses the old # column aliases as it straddles both versions of events/discover. # We will need these aliases until discover2 flags are enabled for all # users. # We need these rollup columns to generate correct events-stats results column_map = { "user_count": "count_unique(user)", "event_count": "count()", "epm()": "epm(%d)" % rollup, "eps()": "eps(%d)" % rollup, "tpm()": "tpm(%d)" % rollup, "tps()": "tps(%d)" % rollup, } query_columns = [ column_map.get(column, column) for column in columns ] with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_query"): result = get_event_stats(query_columns, query, params, rollup) serializer = SnubaTSResultSerializer(organization, None, request.user) with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_serialization"): # When the request is for top_events, result can be a SnubaTSResult in the event that # there were no top events found. In this case, result contains a zerofilled series # that acts as a placeholder. if top_events > 0 and isinstance(result, dict): results = {} for key, event_result in result.items(): if len(query_columns) > 1: results[key] = self.serialize_multiple_axis( serializer, event_result, columns, query_columns, allow_partial_buckets) else: # Need to get function alias if count is a field, but not the axis results[key] = serializer.serialize( event_result, column=resolve_axis_column(query_columns[0]), allow_partial_buckets=allow_partial_buckets, ) return results elif len(query_columns) > 1: return self.serialize_multiple_axis(serializer, result, columns, query_columns, allow_partial_buckets) else: return serializer.serialize( result, resolve_axis_column(query_columns[0]), allow_partial_buckets=allow_partial_buckets, )