コード例 #1
0
    def _get_events_snuba(self, request, group, environments, query, tags,
                          start, end):
        default_end = timezone.now()
        default_start = default_end - timedelta(days=90)
        params = {
            "group_ids": [group.id],
            "project_id": [group.project_id],
            "organization_id": group.project.organization_id,
            "start": start if start else default_start,
            "end": end if end else default_end,
        }
        direct_hit_resp = get_direct_hit_response(request, query, params,
                                                  "api.group-events")
        if direct_hit_resp:
            return direct_hit_resp

        if environments:
            params["environment"] = [env.name for env in environments]

        full = request.GET.get("full", False)
        try:
            snuba_filter = get_filter(request.GET.get("query", None), params)
        except InvalidSearchQuery as e:
            raise ParseError(detail=six.text_type(e))

        snuba_filter.conditions.append(["event.type", "!=", "transaction"])

        data_fn = partial(eventstore.get_events,
                          referrer="api.group-events",
                          filter=snuba_filter)
        serializer = EventSerializer() if full else SimpleEventSerializer()
        return self.paginate(
            request=request,
            on_results=lambda results: serialize(results, request.user,
                                                 serializer),
            paginator=GenericOffsetPaginator(data_fn=data_fn),
        )
コード例 #2
0
    def do_query(self, projects, request, **kwargs):
        requested_query = deepcopy(kwargs)

        selected_columns = kwargs['selected_columns']
        groupby_columns = kwargs['groupby']

        if 'project_name' in requested_query['selected_columns']:
            selected_columns.remove('project_name')
            if 'project_id' not in selected_columns:
                selected_columns.append('project_id')

        if 'project_name' in requested_query['groupby']:
            groupby_columns.remove('project_name')
            if 'project_id' not in groupby_columns:
                groupby_columns.append('project_id')

        for aggregation in kwargs['aggregations']:
            if aggregation[1] == 'project_name':
                aggregation[1] = 'project_id'

        if not kwargs['aggregations']:
            data_fn = partial(snuba.raw_query, referrer='discover', **kwargs)
            return self.paginate(
                request=request,
                on_results=lambda results: self.handle_results(
                    results, requested_query, projects),
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                max_per_page=1000)
        else:
            snuba_results = snuba.raw_query(referrer='discover', **kwargs)
            return Response(self.handle_results(
                snuba_results,
                requested_query,
                projects,
            ),
                            status=200)
コード例 #3
0
    def _get_events_snuba(self, request, group, environments, query, tags,
                          start, end):
        default_end = timezone.now()
        default_start = default_end - timedelta(days=90)
        params = {
            'issue.id': [group.id],
            'project_id': [group.project_id],
            'start': start if start else default_start,
            'end': end if end else default_end
        }
        direct_hit_resp = get_direct_hit_response(request, query, params,
                                                  'api.group-events')
        if direct_hit_resp:
            return direct_hit_resp

        if environments:
            params['environment'] = [env.name for env in environments]

        snuba_args = get_snuba_query_args(request.GET.get('query', None),
                                          params)

        data_fn = partial(
            # extract 'data' from raw_query result
            lambda *args, **kwargs: raw_query(*args, **kwargs)['data'],
            selected_columns=SnubaEvent.selected_columns,
            orderby='-timestamp',
            referrer='api.group-events',
            **snuba_args)

        serializer = SimpleEventSerializer()
        return self.paginate(
            request=request,
            on_results=lambda results: serialize(
                [SnubaEvent(row)
                 for row in results], request.user, serializer),
            paginator=GenericOffsetPaginator(data_fn=data_fn))
コード例 #4
0
    def get(self, request, organization):
        # note that SCIM doesn't care about changing results as they're queried

        query_params = self.get_query_parameters(request)

        queryset = (OrganizationMember.objects.filter(
            Q(invite_status=InviteStatus.APPROVED.value),
            Q(user__is_active=True) | Q(user__isnull=True),
            organization=organization,
        ).select_related("user").order_by("email", "user__email"))
        if query_params["filter"]:
            queryset = queryset.filter(
                Q(email__iexact=query_params["filter"])
                | Q(user__email__iexact=query_params["filter"])
            )  # not including secondary email vals (dups, etc.)

        def data_fn(offset, limit):
            return list(queryset[offset:offset + limit])

        def on_results(results):
            results = serialize(
                results,
                None,
                _scim_member_serializer_with_expansion(organization),
            )
            return self.list_api_format(results, queryset.count(),
                                        query_params["start_index"])

        return self.paginate(
            request=request,
            on_results=on_results,
            paginator=GenericOffsetPaginator(data_fn=data_fn),
            default_per_page=query_params["count"],
            queryset=queryset,
            cursor_cls=SCIMCursor,
        )
コード例 #5
0
    def get(self, request, organization):
        if not self.has_feature(organization, request):
            return Response(status=404)

        try:
            params = self.get_snuba_params(request, organization)
        except NoProjects:
            return Response([])

        with sentry_sdk.start_span(op="discover.endpoint", description="trend_dates"):
            middle_date = request.GET.get("middle")
            if middle_date:
                try:
                    middle = parse_datetime_string(middle_date)
                except InvalidQuery:
                    raise ParseError(detail="{} is not a valid date format".format(middle_date))
                if middle <= params["start"] or middle >= params["end"]:
                    raise ParseError(
                        detail="The middle date should be within the duration of the query"
                    )
            else:
                middle = params["start"] + timedelta(
                    seconds=(params["end"] - params["start"]).total_seconds() * 0.5
                )
            start, middle, end = (
                datetime.strftime(params["start"], DateArg.date_format),
                datetime.strftime(middle, DateArg.date_format),
                datetime.strftime(params["end"], DateArg.date_format),
            )

        trend_type = request.GET.get("trendType", REGRESSION)
        if trend_type not in TREND_TYPES:
            raise ParseError(detail=u"{} is not a supported trend type".format(trend_type))

        params["aliases"] = self.get_function_aliases(trend_type)

        trend_function = request.GET.get("trendFunction", "p50()")
        function, columns = parse_function(trend_function)
        trend_columns = self.get_trend_columns(function, columns, start, middle, end)

        selected_columns = request.GET.getlist("field")[:]
        orderby = self.get_orderby(request)

        query = request.GET.get("query")

        def data_fn(offset, limit):
            return discover.query(
                selected_columns=selected_columns + trend_columns,
                query=query,
                params=params,
                orderby=orderby,
                offset=offset,
                limit=limit,
                referrer="api.trends.get-percentage-change",
                auto_fields=True,
                auto_aggregations=True,
                use_aggregate_conditions=True,
            )

        with self.handle_query_errors():
            return self.paginate(
                request=request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                on_results=self.build_result_handler(
                    request, organization, params, trend_function, selected_columns, orderby, query
                ),
                default_per_page=5,
                max_per_page=5,
            )
コード例 #6
0
    def get(self, request, organization):
        """
        List saved queries for organization
        """
        if not self.has_feature(organization, request):
            return self.respond(status=404)

        queryset = (DiscoverSavedQuery.objects.filter(
            organization=organization).select_related(
                "created_by").prefetch_related("projects").extra(
                    select={"lower_name": "lower(name)"}))
        query = request.query_params.get("query")
        if query:
            tokens = tokenize_query(query)
            for key, value in six.iteritems(tokens):
                if key == "name" or key == "query":
                    value = " ".join(value)
                    queryset = queryset.filter(name__icontains=value)
                elif key == "version":
                    value = " ".join(value)
                    queryset = queryset.filter(version=value)
                else:
                    queryset = queryset.none()

        sort_by = request.query_params.get("sortBy")
        if sort_by in ("name", "-name"):
            order_by = [
                "-lower_name" if sort_by.startswith("-") else "lower_name",
                "-date_created",
            ]
        elif sort_by in ("dateCreated", "-dateCreated"):
            order_by = "-date_created" if sort_by.startswith(
                "-") else "date_created"
        elif sort_by in ("dateUpdated", "-dateUpdated"):
            order_by = "-date_updated" if sort_by.startswith(
                "-") else "date_updated"
        elif sort_by == "myqueries":
            order_by = [
                Case(When(created_by_id=request.user.id, then=-1),
                     default="created_by_id"),
                "-date_created",
            ]
        else:
            order_by = "lower_name"
        if not isinstance(order_by, list):
            order_by = [order_by]
        queryset = queryset.order_by(*order_by)

        # Old discover expects all queries and uses this parameter.
        if request.query_params.get("all") == "1":
            saved_queries = list(queryset.all())
            return Response(serialize(saved_queries), status=200)

        def data_fn(offset, limit):
            return list(queryset[offset:offset + limit])

        return self.paginate(
            request=request,
            paginator=GenericOffsetPaginator(data_fn=data_fn),
            on_results=lambda x: serialize(x, request.user),
            default_per_page=25,
        )
コード例 #7
0
    def get(self, request, organization):
        try:
            params, aggregate_column, filter_query = self.setup(
                request, organization)
        except NoProjects:
            return Response([])

        all_tag_keys = None
        tag_key = None

        if self.has_tag_page_feature(organization, request):
            all_tag_keys = request.GET.get("allTagKeys")
            tag_key = request.GET.get("tagKey")

        def data_fn(offset, limit):
            with sentry_sdk.start_span(op="discover.endpoint",
                                       description="discover_query"):
                referrer = "api.organization-events-facets-performance.top-tags"
                tag_data = query_tag_data(
                    filter_query=filter_query,
                    aggregate_column=aggregate_column,
                    referrer=referrer,
                    params=params,
                )

                if not tag_data:
                    return {"data": []}

                results = query_facet_performance(
                    tag_data=tag_data,
                    filter_query=filter_query,
                    aggregate_column=aggregate_column,
                    referrer=referrer,
                    orderby=self.get_orderby(request),
                    limit=limit,
                    offset=offset,
                    params=params,
                    all_tag_keys=all_tag_keys,
                    tag_key=tag_key,
                )

                if not results:
                    return {"data": []}

                for row in results["data"]:
                    row["tags_value"] = tagstore.get_tag_value_label(
                        row["tags_key"], row["tags_value"])
                    row["tags_key"] = tagstore.get_standardized_key(
                        row["tags_key"])

                return results

        with self.handle_query_errors():
            return self.paginate(
                request=request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                on_results=lambda results: self.handle_results_with_meta(
                    request, organization, params["project_id"], results),
                default_per_page=5,
                max_per_page=20,
            )
コード例 #8
0
    def get(self, request, organization):
        if not features.has("organizations:discover-basic",
                            organization,
                            actor=request.user):
            return Response(status=404)

        with sentry_sdk.start_span(op="discover.endpoint",
                                   description="filter_params") as span:
            span.set_tag("organization", organization)
            try:
                params = self.get_filter_params(request, organization)
            except NoProjects:
                return Response([])
            params = self.quantize_date_params(request, params)

            has_global_views = features.has("organizations:global-views",
                                            organization,
                                            actor=request.user)
            if not has_global_views and len(params.get("project_id", [])) > 1:
                raise ParseError(
                    detail="You cannot view events from multiple projects.")

        def data_fn(offset, limit):
            return discover.query(
                selected_columns=request.GET.getlist("field")[:],
                query=request.GET.get("query"),
                params=params,
                reference_event=self.reference_event(request, organization,
                                                     params.get("start"),
                                                     params.get("end")),
                orderby=self.get_orderby(request),
                offset=offset,
                limit=limit,
                referrer=request.GET.get("referrer",
                                         "api.organization-events-v2"),
                auto_fields=True,
                use_aggregate_conditions=True,
            )

        try:
            return self.paginate(
                request=request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                on_results=lambda results: self.handle_results_with_meta(
                    request, organization, params["project_id"], results),
            )
        except (discover.InvalidSearchQuery,
                snuba.QueryOutsideRetentionError) as error:
            raise ParseError(detail=six.text_type(error))
        except snuba.QueryIllegalTypeOfArgument:
            raise ParseError(
                detail="Invalid query. Argument to function is wrong type.")
        except snuba.SnubaError as error:
            message = "Internal error. Please try again."
            if isinstance(
                    error,
                (
                    snuba.RateLimitExceeded,
                    snuba.QueryMemoryLimitExceeded,
                    snuba.QueryTooManySimultaneous,
                ),
            ):
                message = "Query timeout. Please try again. If the problem persists try a smaller date range or fewer projects."
            elif isinstance(
                    error,
                (
                    snuba.UnqualifiedQueryError,
                    snuba.QueryExecutionError,
                    snuba.SchemaValidationError,
                ),
            ):
                sentry_sdk.capture_exception(error)
                message = "Internal error. Your query failed to run."

            raise ParseError(detail=message)
コード例 #9
0
    def get(self, request, organization):
        if not self.has_feature(organization, request):
            return Response(status=404)

        with sentry_sdk.start_span(op="discover.endpoint",
                                   description="filter_params") as span:
            span.set_tag("organization", organization)
            try:
                params = self.get_filter_params(request, organization)
            except NoProjects:
                return Response([])
            params = self.quantize_date_params(request, params)

            has_global_views = features.has("organizations:global-views",
                                            organization,
                                            actor=request.user)
            if not has_global_views and len(params.get("project_id", [])) > 1:
                raise ParseError(
                    detail="You cannot view events from multiple projects.")

            middle = params["start"] + timedelta(
                seconds=(params["end"] - params["start"]).total_seconds() *
                0.5)
            start, middle, end = (
                datetime.strftime(params["start"], DateArg.date_format),
                datetime.strftime(middle, DateArg.date_format),
                datetime.strftime(params["end"], DateArg.date_format),
            )

        trend_function = request.GET.get("trendFunction", "p50()")
        function, columns = parse_function(trend_function)
        trend_column = self.trend_columns.get(function)
        if trend_column is None:
            raise ParseError(detail=u"{} is not a supported trend function".
                             format(trend_function))

        count_column = self.trend_columns.get("count_range")
        percentage_column = self.trend_columns["percentage"]
        selected_columns = request.GET.getlist("field")[:]
        query = request.GET.get("query")
        orderby = self.get_orderby(request)

        def data_fn(offset, limit):
            return discover.query(
                selected_columns=selected_columns + [
                    trend_column["format"].format(
                        *columns, start=start, end=middle, index="1"),
                    trend_column["format"].format(
                        *columns, start=middle, end=end, index="2"),
                    percentage_column["format"].format(
                        alias=trend_column["alias"]),
                    "minus({alias}2,{alias}1)".format(
                        alias=trend_column["alias"]),
                    count_column["format"].format(
                        start=start, end=middle, index="1"),
                    count_column["format"].format(
                        start=middle, end=end, index="2"),
                    percentage_column["format"].format(
                        alias=count_column["alias"]),
                    "absolute_correlation()",
                ],
                query=query,
                params=params,
                orderby=orderby,
                offset=offset,
                limit=limit,
                referrer="api.trends.get-percentage-change",
                auto_fields=True,
                use_aggregate_conditions=True,
            )

        def on_results(events_results):
            def get_event_stats(query_columns, query, params, rollup,
                                reference_event):
                return discover.top_events_timeseries(
                    query_columns,
                    selected_columns,
                    query,
                    params,
                    orderby,
                    rollup,
                    min(5, len(events_results["data"])),
                    organization,
                    top_events=events_results,
                    referrer="api.trends.get-event-stats",
                )

            stats_results = (self.get_event_stats_data(
                request,
                organization,
                get_event_stats,
                top_events=True,
                query_column=trend_function,
            ) if len(events_results["data"]) > 0 else {})

            return {
                "events":
                self.handle_results_with_meta(request, organization,
                                              params["project_id"],
                                              events_results),
                "stats":
                stats_results,
            }

        with self.handle_query_errors():
            return self.paginate(
                request=request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                on_results=on_results,
                default_per_page=5,
                max_per_page=5,
            )
コード例 #10
0
    def get(self, request, id: str, group: Group):
        """
        Retrieve information about a particular grouping level, including a
        list of issues it would create.

        ```
        GET /api/0/issues/<group_id>/grouping/levels/<level_id>/new-issues/

        [
            {"hash": "...", "latestEvent": ..., "eventCount": 132},
            ...
        ]
        ```

        Available level IDs can be fetched from `GroupingLevelsEndpoint`.

        Each row/array item corresponds to one *new issue* that selecting this
        level would create in place of the *affected issues*. The array items
        are not groups, but groups that will be created, therefore a lot of
        information normally available for groups is missing.

        - `latestEvent`: a sample event in the same format returned by the
          event details endpoint(s).

        - `hash`: The grouping hash, probably insignificant to the user but can
          be shown for diagnostic purposes.

        - `eventCount`: How many events this issue would contain. Note that
          like with any other event count, this number can change all the time
          because events keep coming in.

        The "would-be issues" are returned in-order such that the most recently
        seen "issue" is at the top, i.e. it is sorted in descending order of
        `latestEvent.dateCreated`.

        The *affected issue* (=to-be-deleted issue) is often just the current one,
        however if the previewed grouping level is reduced, this endpoint can
        return a list of entries which together have more events than the
        current issue (meaning issues will be merged together).

        In the future there will be an endpoint that allows you to fetch the
        list of affected issues. For now the UI should simply show a warning if
        the level is decreased (and possibly only if the summed up events of
        the new issues are more than what the current issue has).
        """

        check_feature(group.project.organization, request)

        parsed_id = int(id)

        def data_fn(offset=None, limit=None):
            return _query_snuba(group, parsed_id, offset=offset, limit=limit)

        def on_results(results):
            return _process_snuba_results(results, group, parsed_id,
                                          request.user)

        return self.paginate(
            request=request,
            on_results=on_results,
            paginator=GenericOffsetPaginator(data_fn=data_fn),
        )
コード例 #11
0
    def get(self, request, organization):
        if not features.has("organizations:discover-basic",
                            organization,
                            actor=request.user):
            return Response(status=404)

        try:
            params = self.get_filter_params(request, organization)
        except OrganizationEventsError as exc:
            raise ParseError(detail=six.text_type(exc))
        except NoProjects:
            return Response([])

        params["organization_id"] = organization.id

        has_global_views = features.has("organizations:global-views",
                                        organization,
                                        actor=request.user)
        if not has_global_views and len(params.get("project_id", [])) > 1:
            raise ParseError(
                detail="You cannot view events from multiple projects.")

        def data_fn(offset, limit):
            return discover.query(
                selected_columns=request.GET.getlist("field")[:],
                query=request.GET.get("query"),
                params=params,
                reference_event=self.reference_event(request, organization,
                                                     params.get("start"),
                                                     params.get("end")),
                orderby=self.get_orderby(request),
                offset=offset,
                limit=limit,
                referrer="api.organization-events-v2",
                auto_fields=True,
                use_aggregate_conditions=True,
            )

        try:
            return self.paginate(
                request=request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                on_results=lambda results: self.handle_results_with_meta(
                    request, organization, params["project_id"], results),
            )
        except discover.InvalidSearchQuery as error:
            raise ParseError(detail=six.text_type(error))
        except (snuba.SnubaError, snuba.QueryOutsideRetentionError) as error:
            logger.info(
                "organization.events.snuba-error",
                extra={
                    "organization_id": organization.id,
                    "user_id": request.user.id,
                    "error": six.text_type(error),
                },
            )
            message = "Internal error. Please try again."
            if isinstance(error, snuba.QueryIllegalTypeOfArgument):
                message = "Invalid query. Argument to function is wrong type."
            elif isinstance(error, snuba.QueryOutsideRetentionError):
                message = "Invalid date range. Please try a more recent date range."
            elif isinstance(
                    error,
                (
                    snuba.RateLimitExceeded,
                    snuba.QueryMemoryLimitExceeded,
                    snuba.QueryTooManySimultaneous,
                ),
            ):
                message = "Query timeout. Please try again. If the problem persists try a smaller date range or fewer projects."
            elif isinstance(
                    error,
                (
                    snuba.UnqualifiedQueryError,
                    snuba.QueryExecutionError,
                    snuba.SchemaValidationError,
                ),
            ):
                message = "Invalid query."

            raise ParseError(detail=message)
コード例 #12
0
    def get(self, request: Request, organization: Organization) -> Response:
        if not self.has_feature(request, organization):
            return Response(status=404)

        try:
            params = self.get_snuba_params(request, organization)
        except NoProjects:
            return Response(status=404)

        serializer = SpansPerformanceSerializer(data=request.GET)
        if not serializer.is_valid():
            return Response(serializer.errors, status=400)
        serialized = serializer.validated_data

        fields = serialized.get("field", [])
        query = serialized.get("query")
        span_ops = serialized.get("spanOp")
        span_groups = serialized.get("spanGroup")
        per_suspect = serialized.get("perSuspect")

        direction, orderby_column = self.get_orderby_column(request)

        def data_fn(offset: int, limit: int) -> Any:
            suspects = query_suspect_span_groups(
                params,
                fields,
                query,
                span_ops,
                span_groups,
                direction,
                orderby_column,
                limit,
                offset,
            )

            # Because we want to support pagination, the limit is 1 more than will be
            # returned and displayed. Since this extra result is only used for
            # pagination, we do not need to get any example transactions for it.
            suspects_requiring_examples = [
                Span(suspect.op, suspect.group)
                for suspect in suspects[:limit - 1]
            ]

            example_transactions = query_example_transactions(
                params, query, direction, orderby_column,
                suspects_requiring_examples, per_suspect)

            return [
                SuspectSpanWithExamples(
                    examples=[
                        get_example_transaction(
                            event,
                            suspect.op,
                            suspect.group,
                        ) for event in example_transactions.get(
                            Span(suspect.op, suspect.group), [])
                    ],
                    **dataclasses.asdict(suspect),
                ).serialize() for suspect in suspects
            ]

        with self.handle_query_errors():
            return self.paginate(
                request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                default_per_page=4,
                max_per_page=4,
            )
コード例 #13
0
    def get(self, request: Request, organization) -> Response:
        if not self.has_feature(organization, request):
            return Response(status=404)
        use_snql = self.has_snql_feature(organization, request)
        sentry_sdk.set_tag("discover.use-snql", use_snql)

        try:
            params = self.get_snuba_params(request, organization)
        except NoProjects:
            return Response([])

        with sentry_sdk.start_span(op="discover.endpoint", description="trend_dates"):
            middle_date = request.GET.get("middle")
            if middle_date:
                try:
                    middle = parse_datetime_string(middle_date)
                except InvalidQuery:
                    raise ParseError(detail=f"{middle_date} is not a valid date format")
                if middle <= params["start"] or middle >= params["end"]:
                    raise ParseError(
                        detail="The middle date should be within the duration of the query"
                    )
            else:
                middle = params["start"] + timedelta(
                    seconds=(params["end"] - params["start"]).total_seconds() * 0.5
                )
            middle = datetime.strftime(middle, DateArg.date_format)

        trend_type = request.GET.get("trendType", REGRESSION)
        if trend_type not in TREND_TYPES:
            raise ParseError(detail=f"{trend_type} is not a supported trend type")

        trend_function = request.GET.get("trendFunction", "p50()")
        try:
            function, columns, _ = parse_function(trend_function)
        except InvalidSearchQuery as error:
            raise ParseError(detail=error)
        if len(columns) == 0:
            # Default to duration
            column = "transaction.duration"
        else:
            column = columns[0]

        selected_columns = self.get_field_list(organization, request)
        orderby = self.get_orderby(request)
        query = request.GET.get("query")

        if use_snql:
            with self.handle_query_errors():
                trend_query = TrendQueryBuilder(
                    dataset=Dataset.Discover,
                    params=params,
                    selected_columns=selected_columns,
                    auto_fields=False,
                    auto_aggregations=True,
                    use_aggregate_conditions=True,
                )
                snql_trend_columns = self.resolve_trend_columns(
                    trend_query, function, column, middle
                )
                trend_query.columns.extend(snql_trend_columns.values())
                trend_query.aggregates.extend(snql_trend_columns.values())
                trend_query.params["aliases"] = self.get_snql_function_aliases(
                    snql_trend_columns, trend_type
                )
                # Both orderby and conditions need to be resolved after the columns because of aliasing
                trend_query.orderby = trend_query.resolve_orderby(orderby)
                where, having = trend_query.resolve_conditions(query, use_aggregate_conditions=True)
                trend_query.where += where
                trend_query.having += having
        else:
            params["aliases"] = self.get_function_aliases(trend_type)
            trend_columns = self.get_trend_columns(function, column, middle)

        def data_fn(offset, limit):
            if use_snql:
                trend_query.offset = Offset(offset)
                trend_query.limit = Limit(limit)
                result = raw_snql_query(
                    trend_query.get_snql_query(),
                    referrer="api.trends.get-percentage-change.wip-snql",
                )
                result = discover.transform_results(
                    result, trend_query.function_alias_map, {}, None
                )
                return result
            else:
                return discover.query(
                    selected_columns=selected_columns + trend_columns,
                    query=query,
                    params=params,
                    orderby=orderby,
                    offset=offset,
                    limit=limit,
                    referrer="api.trends.get-percentage-change",
                    auto_fields=True,
                    auto_aggregations=True,
                    use_aggregate_conditions=True,
                )

        with self.handle_query_errors():
            return self.paginate(
                request=request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                on_results=self.build_result_handler(
                    request,
                    organization,
                    params,
                    trend_function,
                    selected_columns,
                    orderby,
                    query,
                    use_snql,
                ),
                default_per_page=5,
                max_per_page=5,
            )
コード例 #14
0
    def get(self, request, organization):
        if not self.has_feature(organization, request):
            return Response(status=404)

        try:
            params = self.get_snuba_params(request, organization)
        except NoProjects:
            return Response([])

        with sentry_sdk.start_span(op="discover.endpoint", description="trend_dates"):
            middle = params["start"] + timedelta(
                seconds=(params["end"] - params["start"]).total_seconds() * 0.5
            )
            start, middle, end = (
                datetime.strftime(params["start"], DateArg.date_format),
                datetime.strftime(middle, DateArg.date_format),
                datetime.strftime(params["end"], DateArg.date_format),
            )

        trend_function = request.GET.get("trendFunction", "p50()")
        function, columns = parse_function(trend_function)
        trend_column = self.trend_columns.get(function)
        if trend_column is None:
            raise ParseError(detail=u"{} is not a supported trend function".format(trend_function))

        count_column = self.trend_columns.get("count_range")
        percentage_column = self.trend_columns["percentage"]
        selected_columns = request.GET.getlist("field")[:]
        orderby = self.get_orderby(request)

        # t_score, and the columns required to calculate it
        variance_column = self.trend_columns["variance"]
        avg_column = self.trend_columns["avg"]
        t_score = self.trend_columns["t_score"]["format"].format(
            avg=avg_column["alias"], variance=variance_column["alias"], count=count_column["alias"],
        )
        t_score_columns = [
            variance_column["format"].format(start=start, end=middle, index="1"),
            variance_column["format"].format(start=middle, end=end, index="2"),
            t_score,
        ]
        # Only add average when its not the baseline
        if function != "avg":
            t_score_columns.extend(
                [
                    avg_column["format"].format(start=start, end=middle, index="1"),
                    avg_column["format"].format(start=middle, end=end, index="2"),
                ]
            )

        trend_percentage = percentage_column["format"].format(alias=trend_column["alias"])
        query = self.get_query(request, trend_percentage, t_score)

        def data_fn(offset, limit):
            return discover.query(
                selected_columns=selected_columns
                + t_score_columns
                + [
                    trend_column["format"].format(*columns, start=start, end=middle, index="1"),
                    trend_column["format"].format(*columns, start=middle, end=end, index="2"),
                    trend_percentage,
                    "minus({alias}2,{alias}1)".format(alias=trend_column["alias"]),
                    count_column["format"].format(start=start, end=middle, index="1"),
                    count_column["format"].format(start=middle, end=end, index="2"),
                    percentage_column["format"].format(alias=count_column["alias"]),
                ],
                query=query,
                params=params,
                orderby=orderby,
                offset=offset,
                limit=limit,
                referrer="api.trends.get-percentage-change",
                auto_fields=True,
                auto_aggregations=True,
                use_aggregate_conditions=True,
            )

        with self.handle_query_errors():
            return self.paginate(
                request=request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                on_results=self.build_result_handler(
                    request, organization, params, trend_function, selected_columns, orderby, query
                ),
                default_per_page=5,
                max_per_page=5,
            )
コード例 #15
0
    def get(self, request, organization):
        if not self.has_feature(organization, request):
            return Response(status=404)

        try:
            params = self.get_snuba_params(request, organization)
        except NoProjects:
            return Response([])

        filter_query = request.GET.get("query")
        aggregate_column = request.GET.get("aggregateColumn")

        ALLOWED_AGGREGATE_COLUMNS = {
            "transaction.duration",
            "measurements.lcp",
            "spans.browser",
            "spans.http",
            "spans.db",
            "spans.resource",
        }

        if not aggregate_column:
            raise ParseError(detail="'aggregateColumn' must be provided.")

        if aggregate_column not in ALLOWED_AGGREGATE_COLUMNS:
            raise ParseError(
                detail=f"'{aggregate_column}' is not a supported tags column.")

        if len(params.get("project_id", [])) > 1:
            raise ParseError(
                detail=
                "You cannot view facet performance for multiple projects.")

        def data_fn(offset, limit):
            with sentry_sdk.start_span(op="discover.endpoint",
                                       description="discover_query"):
                referrer = "api.organization-events-facets-performance.top-tags"
                tag_data = query_tag_data(
                    filter_query=filter_query,
                    aggregate_column=aggregate_column,
                    referrer=referrer,
                    params=params,
                )

                if not tag_data:
                    return {"data": []}

                results = query_facet_performance(
                    tag_data=tag_data,
                    filter_query=filter_query,
                    aggregate_column=aggregate_column,
                    referrer=referrer,
                    orderby=self.get_orderby(request),
                    limit=limit,
                    offset=offset,
                    params=params,
                )

                if not results:
                    return {"data": []}

                for row in results["data"]:
                    row["tags_value"] = tagstore.get_tag_value_label(
                        row["tags_key"], row["tags_value"])
                    row["tags_key"] = tagstore.get_standardized_key(
                        row["tags_key"])

                return results

        with self.handle_query_errors():
            return self.paginate(
                request=request,
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                on_results=lambda results: self.handle_results_with_meta(
                    request, organization, params["project_id"], results),
                default_per_page=5,
                max_per_page=20,
            )