def test_conditions_order_and_groupby_aliasing(self, mock_query): mock_query.return_value = { "meta": [{"name": "transaction_name"}, {"name": "duration"}], "data": [{"transaction_name": "api.do_things", "duration": 200}], } transform_aliases_and_query( selected_columns=["transaction", "transaction.duration"], conditions=[ ["transaction.duration", "=", 200], ["time", ">", "2019-09-23"], ["http.method", "=", "GET"], ], aggregations=[["count", "", "count"]], groupby=["transaction.op"], orderby=["-timestamp", "-count"], filter_keys={"project_id": [self.project.id]}, ) mock_query.assert_called_with( selected_columns=["transaction_name", "duration"], conditions=[ ["duration", "=", 200], ["bucketed_end", ">", "2019-09-23"], ["tags[http.method]", "=", "GET"], ], aggregations=[["count", "", "count"]], filter_keys={"project_id": [self.project.id]}, groupby=["transaction_op"], orderby=["-finish_ts", "-count"], dataset=Dataset.Transactions, arrayjoin=None, end=None, start=None, having=None, )
def test_condition_reformat_nested_conditions(self, mock_query): mock_query.return_value = { "meta": [{ "name": "id" }, { "name": "duration" }], "data": [{ "id": "a" * 32, "duration": 200 }], } transform_aliases_and_query( skip_conditions=True, selected_columns=["id", "transaction.duration"], conditions=[[["timestamp", ">", "2019-09-26T12:13:14"], ["id", "=", "a" * 32]]], filter_keys={"project_id": [self.project.id]}, ) mock_query.assert_called_with( selected_columns=["event_id", "duration"], conditions=[[ ["finish_ts", ">", "2019-09-26T12:13:14"], ["event_id", "=", "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"], ]], filter_keys={"project_id": [self.project.id]}, dataset=Dataset.Transactions, aggregations=None, arrayjoin=None, end=None, start=None, having=None, orderby=None, groupby=None, )
def test_condition_not_remove_type_csp(self, mock_query): mock_query.return_value = { "meta": [{ "name": "transaction_name" }, { "name": "duration" }], "data": [{ "transaction_name": "api.do_things", "duration": 200 }], } transform_aliases_and_query( selected_columns=["transaction", "transaction.duration"], conditions=[ ["event.type", "=", "transaction"], ["type", "=", "csp"], ["duration", ">", 200], ], groupby=["transaction.op"], filter_keys={"project_id": [self.project.id]}, ) mock_query.assert_called_with( selected_columns=["transaction_name", "duration"], conditions=[["tags[type]", "=", "csp"], ["duration", ">", 200]], filter_keys={"project_id": [self.project.id]}, groupby=["transaction_op"], dataset=Dataset.Transactions, aggregations=None, arrayjoin=None, end=None, start=None, having=None, orderby=None, )
def test_conditions_nested_function_aliasing(self, mock_query): mock_query.return_value = { "meta": [{"name": "transaction_name"}], "data": [{"transaction_name": "api.do_things"}], } transform_aliases_and_query( selected_columns=["transaction"], conditions=[ ["event.type", "=", "transaction"], ["match", [["ifNull", ["tags[user_email]", ""]], "'(?i)^.*\@sentry\.io$'"]], [["positionCaseInsensitive", ["message", "'recent-searches'"]], "!=", 0], ], aggregations=[["count", "", "count"]], filter_keys={"project_id": [self.project.id]}, ) mock_query.assert_called_with( selected_columns=["transaction_name"], conditions=[ ["match", [["ifNull", ["tags[user_email]", ""]], "'(?i)^.*\@sentry\.io$'"]], [["positionCaseInsensitive", ["transaction_name", "'recent-searches'"]], "!=", 0], ], aggregations=[["count", "", "count"]], filter_keys={"project_id": [self.project.id]}, dataset=Dataset.Transactions, groupby=None, orderby=None, arrayjoin=None, end=None, start=None, having=None, )
def test_orderby_aliasing(self, mock_query): mock_query.return_value = { "meta": [{ "name": "transaction_name" }, { "name": "duration" }], "data": [{ "transaction_name": "api.do_things", "duration": 200 }], } transform_aliases_and_query( selected_columns=["transaction", "transaction.duration"], filter_keys={"project_id": [self.project.id]}, orderby=["timestamp"], ) mock_query.assert_called_with( selected_columns=["transaction_name", "duration"], filter_keys={"project_id": [self.project.id]}, dataset=Dataset.Transactions, orderby=["finish_ts"], aggregations=None, arrayjoin=None, end=None, start=None, conditions=None, groupby=None, having=None, )
def test_selected_columns_opaque_string(self, mock_query): mock_query.return_value = { "meta": [{"name": "transaction"}, {"name": "p95"}], "data": [{"transaction": "api.do_things", "p95": 200}], } transform_aliases_and_query( selected_columns=["transaction"], aggregations=[ ["quantile(0.95)(duration)", "", "p95"], ["uniq", "transaction", "uniq_transaction"], ], filter_keys={"project_id": [self.project.id]}, ) mock_query.assert_called_with( selected_columns=["transaction_name"], aggregations=[ ["quantile(0.95)(duration)", "", "p95"], ["uniq", "transaction_name", "uniq_transaction"], ], filter_keys={"project_id": [self.project.id]}, dataset=Dataset.Transactions, arrayjoin=None, end=None, start=None, conditions=None, groupby=None, having=None, orderby=None, )
def test_selected_columns_aliasing_in_function(self, mock_query): mock_query.return_value = { "meta": [{"name": "transaction"}, {"name": "duration"}], "data": [{"transaction": "api.do_things", "duration": 200}], } transform_aliases_and_query( selected_columns=["transaction", "transaction.duration"], aggregations=[ ["argMax", ["id", "transaction.duration"], "longest"], ["uniq", "transaction", "uniq_transaction"], ], filter_keys={"project_id": [self.project.id]}, ) mock_query.assert_called_with( selected_columns=["transaction_name", "duration"], aggregations=[ ["argMax", ["event_id", "duration"], "longest"], ["uniq", "transaction_name", "uniq_transaction"], ], filter_keys={"project_id": [self.project.id]}, dataset=Dataset.Transactions, arrayjoin=None, end=None, start=None, conditions=None, groupby=None, having=None, orderby=None, )
def test_condition_transform_skip_conditions(self, mock_query): mock_query.return_value = { "meta": [{ "name": "transaction_name" }, { "name": "duration" }], "data": [{ "transaction_name": "api.do_things", "duration": 200 }], } transform_aliases_and_query( skip_conditions=True, selected_columns=["transaction", "transaction.duration"], conditions=[["http_method", "=", "GET"]], groupby=["transaction.op"], filter_keys={"project_id": [self.project.id]}, ) mock_query.assert_called_with( selected_columns=["transaction_name", "duration"], conditions=[["tags[http_method]", "=", "GET"]], filter_keys={"project_id": [self.project.id]}, groupby=["transaction_op"], dataset=Dataset.Transactions, aggregations=None, arrayjoin=None, end=None, start=None, having=None, orderby=None, )
def test_conversion_of_environment_filter_key(self): result = transform_aliases_and_query( selected_columns=["id", "message"], filter_keys={ "environment": [self.create_environment(self.project).id], "project_id": [self.project.id], }, ) assert len(result["data"]) == 0 result = transform_aliases_and_query( selected_columns=["id", "message"], filter_keys={"environment": [self.environment.id], "project_id": [self.project.id]}, ) assert len(result["data"]) == 1
def get_v1_results(self, request, organization): try: snuba_args = self.get_snuba_query_args_legacy( request, organization) except (OrganizationEventsError, InvalidSearchQuery) as exc: raise ParseError(detail=six.text_type(exc)) except NoProjects: return Response({"data": []}) rollup = self.get_rollup(request) snuba_args = self.get_field(request, snuba_args) result = snuba.transform_aliases_and_query( aggregations=snuba_args.get("aggregations"), conditions=snuba_args.get("conditions"), filter_keys=snuba_args.get("filter_keys"), start=snuba_args.get("start"), end=snuba_args.get("end"), orderby="time", groupby=["time"], rollup=rollup, referrer="api.organization-events-stats", limit=10000, ) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( snuba.SnubaTSResult(result, snuba_args["start"], snuba_args["end"], rollup)), status=200, )
def get_v2(self, request, organization): try: params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args_v2(request, organization, params) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: return Response([]) filters = snuba_args.get('filter_keys', {}) has_global_views = features.has( 'organizations:global-views', organization, actor=request.user) if not has_global_views and len(filters.get('project_id', [])) > 1: return Response({ 'detail': 'You cannot view events from multiple projects.' }, status=400) data_fn = partial( lambda **kwargs: transform_aliases_and_query( skip_conditions=True, **kwargs)['data'], referrer='api.organization-events-v2', **snuba_args ) return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results( request, organization, params['project_id'], results), )
def do_query(self, projects, request, **kwargs): requested_query = deepcopy(kwargs) selected_columns = kwargs["selected_columns"] groupby_columns = kwargs["groupby"] if "project.name" in requested_query["selected_columns"]: selected_columns.remove("project.name") if "project.id" not in selected_columns: selected_columns.append("project.id") if "project.name" in requested_query["groupby"]: groupby_columns.remove("project.name") if "project.id" not in groupby_columns: groupby_columns.append("project.id") for aggregation in kwargs["aggregations"]: if aggregation[1] == "project.name": aggregation[1] = "project.id" if not kwargs["aggregations"]: data_fn = partial(snuba.transform_aliases_and_query, referrer="discover", **kwargs) return self.paginate( request=request, on_results=lambda results: self.handle_results(results, requested_query, projects), paginator=GenericOffsetPaginator(data_fn=data_fn), max_per_page=1000, ) else: snuba_results = snuba.transform_aliases_and_query(referrer="discover", **kwargs) return Response( self.handle_results(snuba_results, requested_query, projects), status=200 )
def get(self, request, organization): if not features.has('organizations:events-v2', organization, actor=request.user): return self.get_legacy(request, organization) try: params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args(request, organization, params) fields = snuba_args.get('selected_columns') groupby = snuba_args.get('groupby', []) if not fields and not groupby: return Response({'detail': 'No fields or groupings provided'}, status=400) if any(field for field in groupby if field not in ALLOWED_GROUPINGS): message = ('Invalid groupby value requested. Allowed values are ' + ', '.join(ALLOWED_GROUPINGS)) return Response({'detail': message}, status=400) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: return Response([]) filters = snuba_args.get('filter_keys', {}) has_global_views = features.has( 'organizations:global-views', organization, actor=request.user) if not has_global_views and len(filters.get('project_id', [])) > 1: return Response({ 'detail': 'You cannot view events from multiple projects.' }, status=400) data_fn = partial( lambda **kwargs: transform_aliases_and_query( skip_conditions=True, **kwargs)['data'], referrer='api.organization-events-v2', **snuba_args ) try: return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results( request, organization, params['project_id'], results), ) except SnubaError as error: logger.info( 'organization.events.snuba-error', extra={ 'organization_id': organization.id, 'user_id': request.user.id, 'error': six.text_type(error), } ) return Response({ 'detail': 'Invalid query.' }, status=400)
def test_field_aliasing_in_conditions(self): result = transform_aliases_and_query( selected_columns=["project.id", "user.email"], conditions=[["user.email", "=", "*****@*****.**"]], filter_keys={"project_id": [self.project.id]}, ) data = result["data"] assert len(data) == 1 assert data[0]["project.id"] == self.project.id assert data[0]["user.email"] == "*****@*****.**"
def test_field_aliasing_in_aggregate_functions_and_groupby(self): result = transform_aliases_and_query( selected_columns=["project.id"], aggregations=[["uniq", "user.email", "uniq_email"]], filter_keys={"project_id": [self.project.id]}, groupby=["project.id"], ) data = result["data"] assert len(data) == 1 assert data[0]["project.id"] == self.project.id assert data[0]["uniq_email"] == 1
def test_autoconversion_of_time_column(self): result = transform_aliases_and_query( aggregations=[["count", "", "count"]], filter_keys={"project_id": [self.project.id]}, start=before_now(minutes=5), end=before_now(), groupby=["time"], orderby=["time"], rollup=3600, ) data = result["data"] assert isinstance(data[-1]["time"], int) assert data[-1]["count"] == 1
def get(self, request, organization): if not features.has( "organizations:events-v2", organization, actor=request.user): return Response(status=404) try: params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args(request, organization, params) if not snuba_args.get("selected_columns") and not snuba_args.get( "aggregations"): return Response({"detail": "No fields provided"}, status=400) except OrganizationEventsError as exc: return Response({"detail": exc.message}, status=400) except NoProjects: return Response([]) filters = snuba_args.get("filter_keys", {}) has_global_views = features.has("organizations:global-views", organization, actor=request.user) if not has_global_views and len(filters.get("project_id", [])) > 1: return Response( {"detail": "You cannot view events from multiple projects."}, status=400) data_fn = partial( lambda **kwargs: snuba.transform_aliases_and_query(**kwargs), referrer="api.organization-events-v2", **snuba_args) try: return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results_with_meta( request, organization, params["project_id"], results), ) except snuba.SnubaError as error: logger.info( "organization.events.snuba-error", extra={ "organization_id": organization.id, "user_id": request.user.id, "error": six.text_type(error), }, ) return Response({"detail": "Invalid query."}, status=400)
def get(self, request, organization): try: params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args(request, organization, params) except OrganizationEventsError as exc: return Response({"detail": exc.message}, status=400) except NoProjects: return Response({"count": 0}) data = snuba.transform_aliases_and_query( aggregations=[["count()", "", "count"]], referrer="api.organization-event-meta", **snuba_args)["data"][0] return Response({"count": data["count"]})
def get(self, request, organization): if not features.has( 'organizations:events-v2', organization, actor=request.user): return Response(status=404) try: params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args(request, organization, params) if not snuba_args.get('selected_columns') and not snuba_args.get( 'aggregations'): return Response({'detail': 'No fields provided'}, status=400) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: return Response([]) filters = snuba_args.get('filter_keys', {}) has_global_views = features.has('organizations:global-views', organization, actor=request.user) if not has_global_views and len(filters.get('project_id', [])) > 1: return Response( {'detail': 'You cannot view events from multiple projects.'}, status=400) data_fn = partial(lambda **kwargs: snuba.transform_aliases_and_query( skip_conditions=True, **kwargs), referrer='api.organization-events-v2', **snuba_args) try: return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=lambda results: self.handle_results_with_meta( request, organization, params['project_id'], results), ) except snuba.SnubaError as error: logger.info('organization.events.snuba-error', extra={ 'organization_id': organization.id, 'user_id': request.user.id, 'error': six.text_type(error), }) return Response({'detail': 'Invalid query.'}, status=400)
def get_v2(self, request, organization): try: snuba_args = self.get_snuba_query_args_v2(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: return Response([]) else: data_fn = partial( lambda *args, **kwargs: transform_aliases_and_query( *args, **kwargs)['data'], referrer='api.organization-events-v2', **snuba_args) return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn))
def do_query(self, projects, request, **kwargs): requested_query = deepcopy(kwargs) selected_columns = kwargs['selected_columns'] groupby_columns = kwargs['groupby'] if 'project.name' in requested_query['selected_columns']: selected_columns.remove('project.name') if 'project.id' not in selected_columns: selected_columns.append('project.id') if 'project.name' in requested_query['groupby']: groupby_columns.remove('project.name') if 'project.id' not in groupby_columns: groupby_columns.append('project.id') for aggregation in kwargs['aggregations']: if aggregation[1] == 'project.name': aggregation[1] = 'project.id' if not kwargs['aggregations']: data_fn = partial( snuba.transform_aliases_and_query, referrer='discover', **kwargs ) return self.paginate( request=request, on_results=lambda results: self.handle_results(results, requested_query, projects), paginator=GenericOffsetPaginator(data_fn=data_fn), max_per_page=1000 ) else: snuba_results = snuba.transform_aliases_and_query( referrer='discover', **kwargs ) return Response(self.handle_results( snuba_results, requested_query, projects, ), status=200)
def get(self, request, organization): try: if features.has("organizations:events-v2", organization, actor=request.user): params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args(request, organization, params) else: snuba_args = self.get_snuba_query_args_legacy( request, organization) except (OrganizationEventsError, InvalidSearchQuery) as exc: raise ParseError(detail=six.text_type(exc)) except NoProjects: return Response({"data": []}) interval = parse_stats_period(request.GET.get("interval", "1h")) if interval is None: interval = timedelta(hours=1) rollup = int(interval.total_seconds()) snuba_args = self.get_field(request, snuba_args) result = snuba.transform_aliases_and_query( skip_conditions=True, aggregations=snuba_args.get("aggregations"), conditions=snuba_args.get("conditions"), filter_keys=snuba_args.get("filter_keys"), start=snuba_args.get("start"), end=snuba_args.get("end"), orderby="time", groupby=["time"], rollup=rollup, referrer="api.organization-events-stats", limit=10000, ) serializer = SnubaTSResultSerializer(organization, None, request.user) return Response( serializer.serialize( snuba.SnubaTSResult(result, snuba_args["start"], snuba_args["end"], rollup)), status=200, )
def do_query(self, projects, request, **kwargs): requested_query = deepcopy(kwargs) selected_columns = kwargs['selected_columns'] groupby_columns = kwargs['groupby'] if 'project.name' in requested_query['selected_columns']: selected_columns.remove('project.name') if 'project.id' not in selected_columns: selected_columns.append('project.id') if 'project.name' in requested_query['groupby']: groupby_columns.remove('project.name') if 'project.id' not in groupby_columns: groupby_columns.append('project.id') for aggregation in kwargs['aggregations']: if aggregation[1] == 'project.name': aggregation[1] = 'project.id' if not kwargs['aggregations']: data_fn = partial( snuba.transform_aliases_and_query, referrer='discover', **kwargs ) return self.paginate( request=request, on_results=lambda results: self.handle_results(results, requested_query, projects), paginator=GenericOffsetPaginator(data_fn=data_fn), max_per_page=1000 ) else: snuba_results = snuba.transform_aliases_and_query( referrer='discover', **kwargs ) return Response(self.handle_results( snuba_results, requested_query, projects, ), status=200)
def get(self, request, organization): if not features.has( "organizations:events-v2", organization, actor=request.user): return Response(status=404) try: params = self.get_filter_params(request, organization) snuba_args = self.get_snuba_query_args(request, organization, params) except OrganizationEventsError as exc: return Response({"detail": exc.message}, status=400) except NoProjects: return Response({"detail": "A valid project must be included."}, status=400) try: key = self._validate_key(request) self._validate_project_ids(request, organization, snuba_args) except OrganizationEventsError as error: return Response({"detail": six.text_type(error)}, status=400) if key == PROJECT_KEY: colname = "project_id" conditions = snuba_args["conditions"] else: colname = key additional_conditions = [] # the "no environment" environment is null in snuba if not ("environment" in params and "" in params["environment"]): additional_conditions = [[colname, "IS NOT NULL", None]] conditions = snuba_args["conditions"] + additional_conditions top_values = transform_aliases_and_query( start=snuba_args["start"], end=snuba_args["end"], conditions=conditions, filter_keys=snuba_args["filter_keys"], groupby=[colname], aggregations=[("count()", None, "count")], orderby="-count", limit=TOP_VALUES_DEFAULT_LIMIT, referrer="api.organization-events-distribution", )["data"] projects = { p.id: p.slug for p in self.get_projects(request, organization) } if key == PROJECT_KEY: resp = { "key": PROJECT_KEY, "topValues": [{ "value": projects[v["project_id"]], "name": projects[v["project_id"]], "count": v["count"], } for v in top_values], } else: resp = { "key": key, "topValues": [{ "value": v[colname], "name": tagstore.get_tag_value_label(colname, v[colname]), "count": v["count"], } for v in top_values], } return Response(resp)