Ejemplo n.º 1
0
    def test_aggregate_function(self):
        result = discover.timeseries_query(
            selected_columns=["count()"],
            query="",
            params={
                "start": self.day_ago,
                "end": self.day_ago + timedelta(hours=2),
                "project_id": [self.project.id],
            },
            rollup=3600,
        )
        assert len(result.data["data"]) == 3
        assert [2] == [
            val["count"] for val in result.data["data"] if "count" in val
        ]

        result = discover.timeseries_query(
            selected_columns=["count_unique(user)"],
            query="",
            params={
                "start": self.day_ago,
                "end": self.day_ago + timedelta(hours=2),
                "project_id": [self.project.id],
            },
            rollup=3600,
        )
        assert len(result.data["data"]) == 3
        keys = []
        for row in result.data["data"]:
            keys.extend(list(row.keys()))
        assert "count" in keys
        assert "time" in keys
Ejemplo n.º 2
0
 def test_invalid_field_in_function(self):
     with pytest.raises(InvalidSearchQuery):
         discover.timeseries_query(
             selected_columns=["min(transaction)"],
             query="transaction:api.issue.delete",
             params={"project_id": [self.project.id]},
             rollup=1800,
         )
Ejemplo n.º 3
0
 def test_missing_start_and_end(self):
     with pytest.raises(InvalidSearchQuery) as err:
         discover.timeseries_query(
             selected_columns=["count()"],
             query="transaction:api.issue.delete",
             params={"project_id": [self.project.id]},
             rollup=1800,
         )
     assert "without a start and end" in six.text_type(err)
Ejemplo n.º 4
0
 def test_no_aggregations(self):
     with pytest.raises(InvalidSearchQuery) as err:
         discover.timeseries_query(
             selected_columns=["transaction", "title"],
             query="transaction:api.issue.delete",
             params={
                 "start": self.day_ago,
                 "end": self.day_ago + timedelta(hours=2),
                 "project_id": [self.project.id],
             },
             rollup=1800,
         )
     assert "no aggregation" in six.text_type(err)
 def get_event_stats(
     query_columns: Sequence[str],
     query: str,
     params: Dict[str, str],
     rollup: int,
     zerofill_results: bool,
     comparison_delta: Optional[datetime],
 ) -> SnubaTSResult:
     if top_events > 0:
         return discover.top_events_timeseries(
             timeseries_columns=query_columns,
             selected_columns=self.get_field_list(
                 organization, request),
             equations=self.get_equation_list(organization, request),
             user_query=query,
             params=params,
             orderby=self.get_orderby(request),
             rollup=rollup,
             limit=top_events,
             organization=organization,
             referrer=referrer + ".find-topn",
             allow_empty=False,
             zerofill_results=zerofill_results,
             include_other=self.has_top_events(organization, request),
         )
     return discover.timeseries_query(
         selected_columns=query_columns,
         query=query,
         params=params,
         rollup=rollup,
         referrer=referrer,
         zerofill_results=zerofill_results,
         comparison_delta=comparison_delta,
     )
Ejemplo n.º 6
0
    def get(self, request: Request, organization) -> Response:
        if not self.has_feature(organization, request):
            return Response(status=404)

        start, end = get_date_range_from_params(request.GET)
        time_params = get_time_params(start, end)
        query_params = self.get_snuba_params(request, organization)
        query = request.GET.get("query")
        query = f"{query} event.type:transaction" if query else "event.type:transaction"

        datetime_format = "%Y-%m-%d %H:%M:%S"
        ads_request = {
            "query": query,
            "params": query_params,
            "start": start.strftime(datetime_format),
            "end": end.strftime(datetime_format),
            "granularity": time_params.granularity,
        }

        # overwrite relevant time params
        query_params["start"] = time_params.query_start
        query_params["end"] = time_params.query_end

        with self.handle_query_errors():
            snuba_response = timeseries_query(
                selected_columns=["count()"],
                query=query,
                params=query_params,
                rollup=time_params.granularity,
                referrer="transaction-anomaly-detection",
                zerofill_results=False,
            )
            ads_request["data"] = snuba_response.data["data"]

            return get_anomalies(ads_request)
Ejemplo n.º 7
0
    def get(self, request, organization):
        if not features.has(
                "organizations:events-v2", organization, actor=request.user):
            return self.get_v1_results(request, organization)

        try:
            column = request.GET.get("yAxis", "count()")
            # Backwards compatibility for incidents which uses the old
            # column aliases as it straddles both versions of events/discover.
            # We will need these aliases until discover2 flags are enabled for all
            # users.
            if column == "user_count":
                column = "count_unique(user)"
            elif column == "event_count":
                column = "count()"

            params = self.get_filter_params(request, organization)
            result = discover.timeseries_query(
                selected_columns=[column],
                query=request.GET.get("query"),
                params=params,
                rollup=self.get_rollup(request),
                reference_event=self.reference_event(request, organization),
                referrer="api.organization-event-stats",
            )
        except InvalidSearchQuery as err:
            raise ParseError(detail=six.text_type(err))
        serializer = SnubaTSResultSerializer(organization, None, request.user)
        return Response(serializer.serialize(result), status=200)
Ejemplo n.º 8
0
 def get_event_stats(query_columns, query, params, rollup,
                     zerofill_results):
     if top_events > 0:
         return discover.top_events_timeseries(
             timeseries_columns=query_columns,
             selected_columns=self.get_field_list(
                 organization, request),
             equations=self.get_equation_list(organization, request),
             user_query=query,
             params=params,
             orderby=self.get_orderby(request),
             rollup=rollup,
             limit=top_events,
             organization=organization,
             referrer="api.organization-event-stats.find-topn",
             allow_empty=False,
             zerofill_results=zerofill_results,
             include_other=self.has_top_events(organization, request),
         )
     return discover.timeseries_query(
         selected_columns=query_columns,
         query=query,
         params=params,
         rollup=rollup,
         referrer="api.organization-event-stats",
         zerofill_results=zerofill_results,
     )
 def get_event_stats(query_columns, query, params, rollup,
                     reference_event):
     return discover.timeseries_query(
         selected_columns=query_columns,
         query=query,
         params=params,
         rollup=rollup,
         reference_event=reference_event,
         referrer="api.organization-event-stats",
     )
Ejemplo n.º 10
0
 def test_field_alias(self):
     result = discover.timeseries_query(
         selected_columns=["p95"],
         query="event.type:transaction transaction:api.issue.delete",
         params={
             "start": self.day_ago,
             "end": self.day_ago + timedelta(hours=2),
             "project_id": [self.project.id],
         },
         rollup=3600,
     )
     assert len(result.data["data"]) == 3
Ejemplo n.º 11
0
 def test_aggregate_function(self):
     result = discover.timeseries_query(
         selected_columns=["count()"],
         query="",
         params={
             "start": self.day_ago,
             "end": self.day_ago + timedelta(hours=2),
             "project_id": [self.project.id],
         },
         rollup=3600,
     )
     assert len(result.data) == 3
     assert [2] == [val["count"] for val in result.data if "count" in val]
Ejemplo n.º 12
0
    def get(self, request, organization):
        if not features.has("organizations:discover-basic",
                            organization,
                            actor=request.user):
            return self.get_v1_results(request, organization)

        try:
            columns = request.GET.getlist("yAxis", ["count()"])
            params = self.get_filter_params(request, organization)
            rollup = self.get_rollup(request, params)
            # Backwards compatibility for incidents which uses the old
            # column aliases as it straddles both versions of events/discover.
            # We will need these aliases until discover2 flags are enabled for all
            # users.
            column_map = {
                "user_count": "count_unique(user)",
                "event_count": "count()",
                "rpm()": "rpm(%d)" % rollup,
                "rps()": "rps(%d)" % rollup,
            }
            query_columns = [
                column_map.get(column, column) for column in columns
            ]

            result = discover.timeseries_query(
                selected_columns=query_columns,
                query=request.GET.get("query"),
                params=params,
                rollup=rollup,
                reference_event=self.reference_event(request, organization,
                                                     params.get("start"),
                                                     params.get("end")),
                referrer="api.organization-event-stats",
            )
        except InvalidSearchQuery as err:
            raise ParseError(detail=six.text_type(err))
        serializer = SnubaTSResultSerializer(organization, None, request.user)
        if len(columns) > 1:
            # Return with requested yAxis as the key
            data = {
                column: serializer.serialize(
                    result,
                    get_aggregate_alias(
                        AGGREGATE_PATTERN.search(query_column)))
                for column, query_column in zip(columns, query_columns)
            }
        else:
            data = serializer.serialize(result)
        return Response(data, status=200)
Ejemplo n.º 13
0
 def test_zerofilling(self):
     result = discover.timeseries_query(
         selected_columns=["count()"],
         query="",
         params={
             "start": self.day_ago,
             "end": self.day_ago + timedelta(hours=3),
             "project_id": [self.project.id],
         },
         rollup=3600,
     )
     assert len(result.data["data"]) == 4, "Should have empty results"
     assert [2, 1] == [
         val["count"] for val in result.data["data"] if "count" in val
     ], result.data["data"]
Ejemplo n.º 14
0
    def get(self, request, organization):
        if not features.has(
                "organizations:events-v2", organization, actor=request.user):
            return self.get_v1_results(request, organization)

        try:
            params = self.get_filter_params(request, organization)
            result = discover.timeseries_query(
                selected_columns=[request.GET.get("yAxis", "count()")],
                query=request.GET.get("query"),
                params=params,
                rollup=self.get_rollup(request),
                reference_event=self.reference_event(request, organization),
                referrer="api.organization-event-stats",
            )
        except InvalidSearchQuery as err:
            raise ParseError(detail=six.text_type(err))
        serializer = SnubaTSResultSerializer(organization, None, request.user)
        return Response(serializer.serialize(result), status=200)
Ejemplo n.º 15
0
 def get_event_stats(query_columns, query, params, rollup):
     if top_events:
         return discover.top_events_timeseries(
             timeseries_columns=query_columns,
             selected_columns=request.GET.getlist("field")[:],
             user_query=query,
             params=params,
             orderby=self.get_orderby(request),
             rollup=rollup,
             limit=limit,
             organization=organization,
             referrer="api.organization-event-stats.find-topn",
         )
     return discover.timeseries_query(
         selected_columns=query_columns,
         query=query,
         params=params,
         rollup=rollup,
         referrer="api.organization-event-stats",
     )
Ejemplo n.º 16
0
 def test_reference_event(self):
     ref = discover.ReferenceEvent(
         self.organization,
         "{}:{}".format(self.project.slug, "a" * 32),
         ["message", "count()", "last_seen"],
     )
     result = discover.timeseries_query(
         selected_columns=["count()"],
         query="",
         params={
             "start": self.day_ago,
             "end": self.day_ago + timedelta(hours=3),
             "project_id": [self.project.id],
         },
         reference_event=ref,
         rollup=3600,
     )
     assert len(result.data) == 4
     assert [1,
             1] == [val["count"] for val in result.data if "count" in val]
Ejemplo n.º 17
0
def timeseries_query(
    selected_columns: Sequence[str],
    query: str,
    params: Dict[str, str],
    rollup: int,
    referrer: str,
    zerofill_results: bool = True,
    comparison_delta: Optional[timedelta] = None,
    functions_acl: Optional[List[str]] = None,
    use_snql: Optional[bool] = False,
) -> SnubaTSResult:
    """
    High-level API for doing arbitrary user timeseries queries against events.
    this API should match that of sentry.snuba.discover.timeseries_query
    """
    metrics_compatible = False
    equations, columns = categorize_columns(selected_columns)
    if comparison_delta is None and not equations:
        metrics_compatible = True

    if metrics_compatible:
        try:
            metrics_query = TimeseriesMetricQueryBuilder(
                params,
                rollup,
                query=query,
                selected_columns=columns,
                functions_acl=functions_acl,
            )
            result = metrics_query.run_query(referrer + ".metrics-enhanced")
            result = discover.transform_results(result, metrics_query.function_alias_map, {}, None)
            result["data"] = (
                discover.zerofill(
                    result["data"],
                    params["start"],
                    params["end"],
                    rollup,
                    "time",
                )
                if zerofill_results
                else result["data"]
            )
            return SnubaTSResult(
                {"data": result["data"], "isMetricsData": True},
                params["start"],
                params["end"],
                rollup,
            )
        # raise Invalid Queries since the same thing will happen with discover
        except InvalidSearchQuery as error:
            raise error
        # any remaining errors mean we should try again with discover
        except IncompatibleMetricsQuery:
            metrics_compatible = False

    # This isn't a query we can enhance with metrics
    if not metrics_compatible:
        return discover.timeseries_query(
            selected_columns,
            query,
            params,
            rollup,
            referrer,
            zerofill_results,
            comparison_delta,
            functions_acl,
            use_snql,
        )
    return SnubaTSResult()