Пример #1
0
    def get_event_stats_data(self,
                             request,
                             organization,
                             get_event_stats,
                             top_events=False):
        try:
            columns = request.GET.getlist("yAxis", ["count()"])
            query = request.GET.get("query")
            params = self.get_filter_params(request, organization)
            rollup = get_rollup_from_request(
                request,
                params,
                "1h",
                InvalidSearchQuery(
                    "Your interval and date range would create too many results. "
                    "Use a larger interval, or a smaller date range."),
            )
            # Backwards compatibility for incidents which uses the old
            # column aliases as it straddles both versions of events/discover.
            # We will need these aliases until discover2 flags are enabled for all
            # users.
            column_map = {
                "user_count": "count_unique(user)",
                "event_count": "count()",
                "rpm()": "rpm(%d)" % rollup,
                "rps()": "rps(%d)" % rollup,
            }
            query_columns = [
                column_map.get(column, column) for column in columns
            ]
            reference_event = self.reference_event(request, organization,
                                                   params.get("start"),
                                                   params.get("end"))

            result = get_event_stats(query_columns, query, params, rollup,
                                     reference_event)
        except (discover.InvalidSearchQuery,
                snuba.QueryOutsideRetentionError) as error:
            raise ParseError(detail=six.text_type(error))
        serializer = SnubaTSResultSerializer(organization, None, request.user)

        if top_events:
            results = {}
            for key, event_result in six.iteritems(result):
                if len(query_columns) > 1:
                    results[key] = self.serialize_multiple_axis(
                        serializer, event_result, columns, query_columns)
                else:
                    # Need to get function alias if count is a field, but not the axis
                    results[key] = serializer.serialize(
                        event_result, get_function_alias(query_columns[0]))
            return results
        elif len(query_columns) > 1:
            return self.serialize_multiple_axis(serializer, result, columns,
                                                query_columns)
        else:
            return serializer.serialize(result)
Пример #2
0
    def get_event_stats_data(self, request, organization, get_event_stats):
        try:
            columns = request.GET.getlist("yAxis", ["count()"])
            query = request.GET.get("query")
            params = self.get_filter_params(request, organization)
            rollup = get_rollup_from_request(
                request,
                params,
                "1h",
                InvalidSearchQuery(
                    "Your interval and date range would create too many results. "
                    "Use a larger interval, or a smaller date range."
                ),
            )
            # Backwards compatibility for incidents which uses the old
            # column aliases as it straddles both versions of events/discover.
            # We will need these aliases until discover2 flags are enabled for all
            # users.
            column_map = {
                "user_count": "count_unique(user)",
                "event_count": "count()",
                "rpm()": "rpm(%d)" % rollup,
                "rps()": "rps(%d)" % rollup,
            }
            query_columns = [column_map.get(column, column) for column in columns]
            reference_event = self.reference_event(
                request, organization, params.get("start"), params.get("end")
            )

            result = get_event_stats(query_columns, query, params, rollup, reference_event)
        except InvalidSearchQuery as err:
            raise ParseError(detail=six.text_type(err))
        serializer = SnubaTSResultSerializer(organization, None, request.user)
        if len(columns) > 1:
            # Return with requested yAxis as the key
            return {
                column: serializer.serialize(result, get_function_alias(query_column))
                for column, query_column in zip(columns, query_columns)
            }
        else:
            return serializer.serialize(result)
Пример #3
0
    def get_v1_results(self, request, organization):
        try:
            snuba_args = self.get_snuba_query_args_legacy(
                request, organization)
        except InvalidSearchQuery as exc:
            raise ParseError(detail=str(exc))
        except NoProjects:
            return Response({"data": []})

        snuba_args = self.get_field(request, snuba_args)
        rollup = get_rollup_from_request(
            request,
            snuba_args,
            default_interval=None,
            error=InvalidSearchQuery(
                "Your interval and date range would create too many results. "
                "Use a larger interval, or a smaller date range."),
        )

        result = transform_aliases_and_query(
            aggregations=snuba_args.get("aggregations"),
            conditions=snuba_args.get("conditions"),
            filter_keys=snuba_args.get("filter_keys"),
            start=snuba_args.get("start"),
            end=snuba_args.get("end"),
            orderby="time",
            groupby=["time"],
            rollup=rollup,
            referrer="api.organization-events-stats",
            limit=10000,
        )
        serializer = SnubaTSResultSerializer(organization, None, request.user)
        return Response(
            serializer.serialize(
                snuba.SnubaTSResult(result, snuba_args["start"],
                                    snuba_args["end"], rollup)),
            status=200,
        )
Пример #4
0
    def get_event_stats_data(
        self,
        request: Request,
        organization: Organization,
        get_event_stats: Callable[
            [Sequence[str], str, Dict[str, str], int, bool, Optional[timedelta]], SnubaTSResult
        ],
        top_events: int = 0,
        query_column: str = "count()",
        params: Optional[Dict[str, Any]] = None,
        query: Optional[str] = None,
        allow_partial_buckets: bool = False,
        zerofill_results: bool = True,
        comparison_delta: Optional[timedelta] = None,
    ) -> Dict[str, Any]:
        with self.handle_query_errors():
            with sentry_sdk.start_span(
                op="discover.endpoint", description="base.stats_query_creation"
            ):
                columns = request.GET.getlist("yAxis", [query_column])
                if query is None:
                    query = request.GET.get("query")
                if params is None:
                    try:
                        # events-stats is still used by events v1 which doesn't require global views
                        params = self.get_snuba_params(
                            request, organization, check_global_views=False
                        )
                    except NoProjects:
                        return {"data": []}

                try:
                    rollup = get_rollup_from_request(
                        request,
                        params,
                        default_interval=None,
                        error=InvalidSearchQuery(),
                        top_events=top_events,
                    )
                # If the user sends an invalid interval, use the default instead
                except InvalidSearchQuery:
                    sentry_sdk.set_tag("user.invalid_interval", request.GET.get("interval"))
                    date_range = params["end"] - params["start"]
                    stats_period = parse_stats_period(get_interval_from_range(date_range, False))
                    rollup = int(stats_period.total_seconds()) if stats_period is not None else 3600

                if comparison_delta is not None:
                    retention = quotas.get_event_retention(organization=organization)
                    comparison_start = params["start"] - comparison_delta
                    if retention and comparison_start < timezone.now() - timedelta(days=retention):
                        raise ValidationError("Comparison period is outside your retention window")

                # Backwards compatibility for incidents which uses the old
                # column aliases as it straddles both versions of events/discover.
                # We will need these aliases until discover2 flags are enabled for all
                # users.
                # We need these rollup columns to generate correct events-stats results
                column_map = {
                    "user_count": "count_unique(user)",
                    "event_count": "count()",
                    "epm()": "epm(%d)" % rollup,
                    "eps()": "eps(%d)" % rollup,
                    "tpm()": "tpm(%d)" % rollup,
                    "tps()": "tps(%d)" % rollup,
                }

                query_columns = [column_map.get(column, column) for column in columns]
            with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_query"):
                result = get_event_stats(
                    query_columns, query, params, rollup, zerofill_results, comparison_delta
                )

        serializer = SnubaTSResultSerializer(organization, None, request.user)

        with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_serialization"):
            # When the request is for top_events, result can be a SnubaTSResult in the event that
            # there were no top events found. In this case, result contains a zerofilled series
            # that acts as a placeholder.
            is_multiple_axis = len(query_columns) > 1
            if top_events > 0 and isinstance(result, dict):
                results = {}
                for key, event_result in result.items():
                    if is_multiple_axis:
                        results[key] = self.serialize_multiple_axis(
                            serializer,
                            event_result,
                            columns,
                            query_columns,
                            allow_partial_buckets,
                            zerofill_results=zerofill_results,
                        )
                    else:
                        # Need to get function alias if count is a field, but not the axis
                        results[key] = serializer.serialize(
                            event_result,
                            column=resolve_axis_column(query_columns[0]),
                            allow_partial_buckets=allow_partial_buckets,
                            zerofill_results=zerofill_results,
                        )
                serialized_result = results
            elif is_multiple_axis:
                serialized_result = self.serialize_multiple_axis(
                    serializer,
                    result,
                    columns,
                    query_columns,
                    allow_partial_buckets,
                    zerofill_results=zerofill_results,
                )
            else:
                extra_columns = None
                if comparison_delta:
                    extra_columns = ["comparisonCount"]
                serialized_result = serializer.serialize(
                    result,
                    resolve_axis_column(query_columns[0]),
                    allow_partial_buckets=allow_partial_buckets,
                    zerofill_results=zerofill_results,
                    extra_columns=extra_columns,
                )

            return serialized_result
Пример #5
0
    def get(self, request, project, version):
        """
        Get a Project Release's Stats
        `````````````````````````````

        Returns the stats of a given release under a project.

        :pparam string organization_slug: the slug of the organization the
                                          release belongs to.
        :pparam string project_slug: the slug of the project to list the
                                     release files of.
        :pparam string version: the version identifier of the release.
        :auth: required
        """
        stats_type = request.GET.get("type") or "sessions"
        if stats_type not in ("users", "sessions"):
            return Response({"detail": "invalid stat"}, status=400)

        try:
            params = self.get_filter_params(request, project)
            rollup = get_rollup_from_request(
                request,
                params,
                "24h",
                ProjectEventsError(
                    "Your interval and date range would create too many results. "
                    "Use a larger interval, or a smaller date range."),
            )
            # The minimum interval is one hour on the server
            rollup = max(rollup, 3600)
        except ProjectEventsError as e:
            return Response({"detail": six.text_type(e)}, status=400)

        release = upsert_missing_release(project, version)
        if release is None:
            raise ResourceDoesNotExist

        stats, totals = get_project_release_stats(
            project_id=params["project_id"][0],
            release=version,
            stat=stats_type,
            rollup=rollup,
            start=params["start"],
            end=params["end"],
            environments=params.get("environment"),
        )

        users_breakdown = []
        for data in get_crash_free_breakdown(
                project_id=params["project_id"][0],
                release=version,
                environments=params.get("environment"),
                start=release.date_added,
        ):
            users_breakdown.append({
                "date":
                data["date"],
                "totalUsers":
                data["total_users"],
                "crashFreeUsers":
                data["crash_free_users"],
                "totalSessions":
                data["total_sessions"],
                "crashFreeSessions":
                data["crash_free_sessions"],
            })

        return Response(
            serialize({
                "stats": stats,
                "statTotals": totals,
                "usersBreakdown": users_breakdown
            }),
            status=200,
        )
Пример #6
0
    def get_event_stats_data(
        self,
        request,
        organization,
        get_event_stats,
        top_events=False,
        query_column="count()",
        params=None,
        query=None,
    ):
        with self.handle_query_errors():
            with sentry_sdk.start_span(
                    op="discover.endpoint",
                    description="base.stats_query_creation"):
                columns = request.GET.getlist("yAxis", [query_column])
                if query is None:
                    query = request.GET.get("query")
                if params is None:
                    try:
                        # events-stats is still used by events v1 which doesn't require global views
                        params = self.get_snuba_params(
                            request, organization, check_global_views=False)
                    except NoProjects:
                        return {"data": []}

                rollup = get_rollup_from_request(
                    request,
                    params,
                    "1h",
                    InvalidSearchQuery(
                        "Your interval and date range would create too many results. "
                        "Use a larger interval, or a smaller date range."),
                )
                # Backwards compatibility for incidents which uses the old
                # column aliases as it straddles both versions of events/discover.
                # We will need these aliases until discover2 flags are enabled for all
                # users.
                column_map = {
                    "user_count": "count_unique(user)",
                    "event_count": "count()",
                    "epm()": "epm(%d)" % rollup,
                    "eps()": "eps(%d)" % rollup,
                }
                query_columns = [
                    column_map.get(column, column) for column in columns
                ]
            with sentry_sdk.start_span(op="discover.endpoint",
                                       description="base.stats_query"):
                result = get_event_stats(query_columns, query, params, rollup)

        serializer = SnubaTSResultSerializer(organization, None, request.user)

        with sentry_sdk.start_span(op="discover.endpoint",
                                   description="base.stats_serialization"):
            if top_events:
                results = {}
                for key, event_result in six.iteritems(result):
                    if len(query_columns) > 1:
                        results[key] = self.serialize_multiple_axis(
                            serializer, event_result, columns, query_columns)
                    else:
                        # Need to get function alias if count is a field, but not the axis
                        results[key] = serializer.serialize(
                            event_result,
                            column=get_function_alias(query_columns[0]))
                return results
            elif len(query_columns) > 1:
                return self.serialize_multiple_axis(serializer, result,
                                                    columns, query_columns)
            else:
                return serializer.serialize(result)
Пример #7
0
    def get_event_stats_data(
        self,
        request,
        organization,
        get_event_stats,
        top_events=0,
        query_column="count()",
        params=None,
        query=None,
        allow_partial_buckets=False,
    ):
        with self.handle_query_errors():
            with sentry_sdk.start_span(
                    op="discover.endpoint",
                    description="base.stats_query_creation"):
                columns = request.GET.getlist("yAxis", [query_column])
                if query is None:
                    query = request.GET.get("query")
                if params is None:
                    try:
                        # events-stats is still used by events v1 which doesn't require global views
                        params = self.get_snuba_params(
                            request, organization, check_global_views=False)
                    except NoProjects:
                        return {"data": []}

                rollup = get_rollup_from_request(
                    request,
                    params,
                    default_interval=None,
                    error=InvalidSearchQuery(
                        "Your interval and date range would create too many results. "
                        "Use a larger interval, or a smaller date range."),
                    top_events=top_events,
                )
                # Backwards compatibility for incidents which uses the old
                # column aliases as it straddles both versions of events/discover.
                # We will need these aliases until discover2 flags are enabled for all
                # users.
                # We need these rollup columns to generate correct events-stats results
                column_map = {
                    "user_count": "count_unique(user)",
                    "event_count": "count()",
                    "epm()": "epm(%d)" % rollup,
                    "eps()": "eps(%d)" % rollup,
                    "tpm()": "tpm(%d)" % rollup,
                    "tps()": "tps(%d)" % rollup,
                }

                query_columns = [
                    column_map.get(column, column) for column in columns
                ]
            with sentry_sdk.start_span(op="discover.endpoint",
                                       description="base.stats_query"):
                result = get_event_stats(query_columns, query, params, rollup)

        serializer = SnubaTSResultSerializer(organization, None, request.user)

        with sentry_sdk.start_span(op="discover.endpoint",
                                   description="base.stats_serialization"):
            # When the request is for top_events, result can be a SnubaTSResult in the event that
            # there were no top events found. In this case, result contains a zerofilled series
            # that acts as a placeholder.
            if top_events > 0 and isinstance(result, dict):
                results = {}
                for key, event_result in result.items():
                    if len(query_columns) > 1:
                        results[key] = self.serialize_multiple_axis(
                            serializer, event_result, columns, query_columns,
                            allow_partial_buckets)
                    else:
                        # Need to get function alias if count is a field, but not the axis
                        results[key] = serializer.serialize(
                            event_result,
                            column=resolve_axis_column(query_columns[0]),
                            allow_partial_buckets=allow_partial_buckets,
                        )
                return results
            elif len(query_columns) > 1:
                return self.serialize_multiple_axis(serializer, result,
                                                    columns, query_columns,
                                                    allow_partial_buckets)
            else:
                return serializer.serialize(
                    result,
                    resolve_axis_column(query_columns[0]),
                    allow_partial_buckets=allow_partial_buckets,
                )