Esempio n. 1
0
    def test_conversion_of_environment_filter_key(self):
        result = transform_aliases_and_query(
            selected_columns=["id", "message"],
            filter_keys={
                "environment": [self.create_environment(self.project).id],
                "project_id": [self.project.id],
            },
        )
        assert len(result["data"]) == 0

        result = transform_aliases_and_query(
            selected_columns=["id", "message"],
            filter_keys={"environment": [self.environment.id], "project_id": [self.project.id]},
        )
        assert len(result["data"]) == 1
    def get_v1_results(self, request, organization):
        try:
            snuba_args = self.get_snuba_query_args_legacy(
                request, organization)
        except (OrganizationEventsError, InvalidSearchQuery) as exc:
            raise ParseError(detail=six.text_type(exc))
        except NoProjects:
            return Response({"data": []})

        snuba_args = self.get_field(request, snuba_args)
        rollup = self.get_rollup(request, snuba_args)

        result = transform_aliases_and_query(
            aggregations=snuba_args.get("aggregations"),
            conditions=snuba_args.get("conditions"),
            filter_keys=snuba_args.get("filter_keys"),
            start=snuba_args.get("start"),
            end=snuba_args.get("end"),
            orderby="time",
            groupby=["time"],
            rollup=rollup,
            referrer="api.organization-events-stats",
            limit=10000,
        )
        serializer = SnubaTSResultSerializer(organization, None, request.user)
        return Response(
            serializer.serialize(
                snuba.SnubaTSResult(result, snuba_args["start"],
                                    snuba_args["end"], rollup)),
            status=200,
        )
Esempio n. 3
0
    def do_query(self, projects, request, **kwargs):
        requested_query = deepcopy(kwargs)

        selected_columns = kwargs["selected_columns"]
        groupby_columns = kwargs["groupby"]

        if "project.name" in requested_query["selected_columns"]:
            selected_columns.remove("project.name")
            if "project.id" not in selected_columns:
                selected_columns.append("project.id")

        if "project.name" in requested_query["groupby"]:
            groupby_columns.remove("project.name")
            if "project.id" not in groupby_columns:
                groupby_columns.append("project.id")

        for aggregation in kwargs["aggregations"]:
            if aggregation[1] == "project.name":
                aggregation[1] = "project.id"

        if not kwargs["aggregations"]:

            data_fn = partial(transform_aliases_and_query, referrer="discover", **kwargs)
            return self.paginate(
                request=request,
                on_results=lambda results: self.handle_results(results, requested_query, projects),
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                max_per_page=1000,
            )
        else:
            snuba_results = transform_aliases_and_query(referrer="discover", **kwargs)
            return Response(
                self.handle_results(snuba_results, requested_query, projects), status=200
            )
Esempio n. 4
0
 def test_field_aliasing_in_conditions(self):
     result = transform_aliases_and_query(
         selected_columns=["project.id", "user.email"],
         conditions=[["user.email", "=", "*****@*****.**"]],
         filter_keys={"project_id": [self.project.id]},
     )
     data = result["data"]
     assert len(data) == 1
     assert data[0]["project.id"] == self.project.id
     assert data[0]["user.email"] == "*****@*****.**"
Esempio n. 5
0
 def test_field_aliasing_in_aggregate_functions_and_groupby(self):
     result = transform_aliases_and_query(
         selected_columns=["project.id"],
         aggregations=[["uniq", "user.email", "uniq_email"]],
         filter_keys={"project_id": [self.project.id]},
         groupby=["project.id"],
     )
     data = result["data"]
     assert len(data) == 1
     assert data[0]["project.id"] == self.project.id
     assert data[0]["uniq_email"] == 1
Esempio n. 6
0
 def test_autoconversion_of_time_column(self):
     result = transform_aliases_and_query(
         aggregations=[["count", "", "count"]],
         filter_keys={"project_id": [self.project.id]},
         start=before_now(minutes=5),
         end=before_now(),
         groupby=["time"],
         orderby=["time"],
         rollup=3600,
     )
     data = result["data"]
     assert isinstance(data[-1]["time"], int)
     assert data[-1]["count"] == 1
Esempio n. 7
0
    def test_autoconversion_of_time_column(self):
        result = transform_aliases_and_query(
            aggregations=[["count", None, "count"]],
            filter_keys={"project_id": [self.project.id]},
            start=before_now(minutes=10),
            end=before_now(minutes=-1),
            groupby=["time"],
            orderby=["time"],
            rollup=3600,
        )

        # If the date range spans across two hours, then one row will have results
        # and the other one won't.
        for row in result["data"]:
            assert isinstance(row["time"], int)
            if "count" in row:
                assert row["count"] == 1
Esempio n. 8
0
    def get_v1_results(self, request, organization):
        try:
            snuba_args = self.get_snuba_query_args_legacy(
                request, organization)
        except InvalidSearchQuery as exc:
            raise ParseError(detail=str(exc))
        except NoProjects:
            return Response({"data": []})

        snuba_args = self.get_field(request, snuba_args)
        rollup = get_rollup_from_request(
            request,
            snuba_args,
            default_interval=None,
            error=InvalidSearchQuery(
                "Your interval and date range would create too many results. "
                "Use a larger interval, or a smaller date range."),
        )

        result = transform_aliases_and_query(
            aggregations=snuba_args.get("aggregations"),
            conditions=snuba_args.get("conditions"),
            filter_keys=snuba_args.get("filter_keys"),
            start=snuba_args.get("start"),
            end=snuba_args.get("end"),
            orderby="time",
            groupby=["time"],
            rollup=rollup,
            referrer="api.organization-events-stats",
            limit=10000,
        )
        serializer = SnubaTSResultSerializer(organization, None, request.user)
        return Response(
            serializer.serialize(
                snuba.SnubaTSResult(result, snuba_args["start"],
                                    snuba_args["end"], rollup)),
            status=200,
        )