def get(self, request, organization): """ Returns a top-N view based on queryset over time period, as well as previous period. """ try: lookup = SnubaLookup.get(request.GET['tag']) except KeyError: raise ResourceDoesNotExist stats_period = parse_stats_period(request.GET.get( 'statsPeriod', '24h')) if stats_period is None or stats_period < self.MIN_STATS_PERIOD or stats_period >= self.MAX_STATS_PERIOD: return Response({'detail': 'Invalid statsPeriod'}, status=400) try: limit = int(request.GET.get('limit', '5')) except ValueError: return Response({'detail': 'Invalid limit'}, status=400) if limit > self.MAX_LIMIT: return Response( {'detail': 'Invalid limit: max %d' % self.MAX_LIMIT}, status=400) if limit <= 0: return self.empty() try: project_ids = self.get_project_ids(request, organization) except ValueError: return Response({'detail': 'Invalid project ids'}, status=400) if not project_ids: return self.empty() environment = self.get_environment(request, organization) query_condition = self.get_query_condition(request, organization) aggregations = [('count()', '', 'count')] # If we pass `?topk` this means we also are # layering on top_projects and total_projects for each value. if 'topk' in request.GET: try: topk = int(request.GET['topk']) except ValueError: return Response({'detail': 'Invalid topk'}, status=400) aggregations += [ ('topK(%d)' % topk, 'project_id', 'top_projects'), ('uniq', 'project_id', 'total_projects'), ] now = timezone.now() data = query( end=now, start=now - stats_period, selected_columns=lookup.selected_columns, aggregations=aggregations, filter_keys={ 'project_id': project_ids, }, conditions=lookup.conditions + query_condition + environment, groupby=lookup.columns, orderby='-count', limit=limit, ) if not data['data']: return self.empty() # Convert our results from current period into a condition # to be used in the next query for the previous period. # This way our values overlap to be able to deduce a delta. values = [] is_null = False for row in data['data']: value = lookup.encoder(value_from_row(row, lookup.columns)) if value is None: is_null = True else: values.append(value) previous = query( end=now - stats_period, start=now - (stats_period * 2), selected_columns=lookup.selected_columns, aggregations=[ ('count()', '', 'count'), ], filter_keys={ 'project_id': project_ids, }, conditions=lookup.conditions + query_condition + environment + [ [lookup.filter_key, 'IN', values] if values else [], [lookup.tagkey, 'IS NULL', None] if is_null else [], ], groupby=lookup.columns, ) serializer = SnubaResultSerializer(organization, lookup, request.user) return Response( serializer.serialize(SnubaResultSet(data, previous), ), status=200, )
def get(self, request, organization): """ Returns a top-N view based on queryset over time period, as well as previous period. """ try: lookup = SnubaLookup.get(request.GET['tag']) except KeyError: raise ResourceDoesNotExist stats_period = parse_stats_period(request.GET.get('statsPeriod', '24h')) if stats_period is None or stats_period < self.MIN_STATS_PERIOD or stats_period >= self.MAX_STATS_PERIOD: return Response({'detail': 'Invalid statsPeriod'}, status=400) try: limit = int(request.GET.get('limit', '5')) except ValueError: return Response({'detail': 'Invalid limit'}, status=400) if limit > self.MAX_LIMIT: return Response({'detail': 'Invalid limit: max %d' % self.MAX_LIMIT}, status=400) if limit <= 0: return self.empty() try: project_ids = self.get_project_ids(request, organization) except ValueError: return Response({'detail': 'Invalid project ids'}, status=400) if not project_ids: return self.empty() environment = self.get_environment(request, organization) query_condition = self.get_query_condition(request, organization) aggregations = [('count()', '', 'count')] # If we pass `?topk` this means we also are # layering on top_projects and total_projects for each value. if 'topk' in request.GET: try: topk = int(request.GET['topk']) except ValueError: return Response({'detail': 'Invalid topk'}, status=400) aggregations += [ ('topK(%d)' % topk, 'project_id', 'top_projects'), ('uniq', 'project_id', 'total_projects'), ] now = timezone.now() data = query( end=now, start=now - stats_period, selected_columns=lookup.selected_columns, aggregations=aggregations, filter_keys={ 'project_id': project_ids, }, conditions=lookup.conditions + query_condition + environment, groupby=lookup.columns, orderby='-count', limit=limit, ) if not data['data']: return self.empty() # Convert our results from current period into a condition # to be used in the next query for the previous period. # This way our values overlap to be able to deduce a delta. values = [] is_null = False for row in data['data']: value = lookup.encoder(value_from_row(row, lookup.columns)) if value is None: is_null = True else: values.append(value) previous = query( end=now - stats_period, start=now - (stats_period * 2), selected_columns=lookup.selected_columns, aggregations=[ ('count()', '', 'count'), ], filter_keys={ 'project_id': project_ids, }, conditions=lookup.conditions + query_condition + environment + [ [lookup.filter_key, 'IN', values] if values else [], [lookup.tagkey, 'IS NULL', None] if is_null else [], ], groupby=lookup.columns, ) serializer = SnubaResultSerializer(organization, lookup, request.user) return Response( serializer.serialize( SnubaResultSet(data, previous), ), status=200, )