def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "timestamp"}, ("timestamp", )), TagsExpanderProcessor(), BasicFunctionsProcessor(), ProjectRateLimiterProcessor(project_column="project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TagsExpanderProcessor(), BasicFunctionsProcessor(), TimeSeriesColumnProcessor(self.__time_group_columns), HandledFunctionsProcessor("exception_stacks.mechanism_handled", self.get_data_model()), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "finish_ts"}, ("start_ts", "finish_ts")), TagsExpanderProcessor(), BasicFunctionsProcessor(), apdex_processor(self.get_data_model()), failure_rate_processor(self.get_data_model()), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TagsExpanderProcessor(), BasicFunctionsProcessor(), apdex_processor(self.get_data_model()), failure_rate_processor(self.get_data_model()), TimeSeriesColumnProcessor(self.__time_group_columns), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor( {"time": "timestamp", "rtime": "received"}, ("timestamp", "received") ), TagsExpanderProcessor(), BasicFunctionsProcessor(), HandledFunctionsProcessor( "exception_stacks.mechanism_handled", self.get_data_model() ), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: columnset = self.get_data_model() return [ TagsExpanderProcessor(), BasicFunctionsProcessor(), # Apdex and Impact seem very good candidates for # being defined by the Transaction entity when it will # exist, so it would run before Storage selection. apdex_processor(columnset), failure_rate_processor(columnset), HandledFunctionsProcessor("exception_stacks.mechanism_handled", columnset), TimeSeriesColumnProcessor({"time": "timestamp"}), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "finish_ts"}, ("start_ts", "finish_ts", "timestamp")), TagsExpanderProcessor(), BasicFunctionsProcessor(), apdex_processor(), failure_rate_processor(), ReferrerRateLimiterProcessor(), ProjectReferrerRateLimiter("project_id"), ProjectRateLimiterProcessor(project_column="project_id"), ResourceQuotaProcessor("project_id"), ]
def get_query_processors(self) -> Sequence[LogicalProcessor]: return [ TagsExpanderProcessor(), TimeSeriesProcessor( {"events.time": "events.timestamp"}, [ "events.timestamp", "events.received", "groups.last_seen", "groups.first_seen", "groups.active_at", ], ), ]
def test_tags_expander() -> None: query_body = { "selected_columns": [ ["f1", ["tags_key", "column2"], "f1_alias"], ["f2", [], "f2_alias"], ], "aggregations": [ ["count", "platform", "platforms"], ["testF", ["platform", "tags_value"], "top_platforms"], ], "conditions": [["tags_key", "=", "tags_key"]], "having": [["tags_value", "IN", ["tag"]]], } events = get_dataset("events") query = parse_query(query_body, events) processor = TagsExpanderProcessor() request_settings = HTTPRequestSettings() processor.process_query(query, request_settings) assert query.get_selected_columns_from_ast() == [ SelectedExpression( "platforms", FunctionCall("platforms", "count", (Column("platform", None, "platform"), )), ), SelectedExpression( "top_platforms", FunctionCall( "top_platforms", "testF", ( Column("platform", None, "platform"), FunctionCall("tags_value", "arrayJoin", (Column(None, None, "tags.value"), )), ), ), ), SelectedExpression( "f1_alias", FunctionCall( "f1_alias", "f1", ( FunctionCall("tags_key", "arrayJoin", (Column(None, None, "tags.key"), )), Column("column2", None, "column2"), ), ), ), SelectedExpression("f2_alias", FunctionCall("f2_alias", "f2", tuple())), ] assert query.get_condition_from_ast() == binary_condition( None, OPERATOR_TO_FUNCTION["="], FunctionCall("tags_key", "arrayJoin", (Column(None, None, "tags.key"), )), Literal(None, "tags_key"), ) assert query.get_having_from_ast() == in_condition( None, FunctionCall("tags_value", "arrayJoin", (Column(None, None, "tags.value"), )), [Literal(None, "tag")], )
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "timestamp"}, ("timestamp", )), TagsExpanderProcessor(), BasicFunctionsProcessor(), ]
def get_query_processors(self) -> Sequence[LogicalProcessor]: return [ TagsExpanderProcessor(), TimeSeriesColumnProcessor(self.__time_group_columns), ]
def test_tags_expander() -> None: query_body = """ MATCH (events) SELECT count(platform) AS platforms, testF(platform, tags_value) AS top_platforms, f1(tags_key, column2) AS f1_alias, f2() AS f2_alias WHERE tags_key = 'tags_key' AND project_id = 1 AND timestamp >= toDateTime('2020-01-01 12:00:00') AND timestamp < toDateTime('2020-01-02 12:00:00') HAVING tags_value IN tuple('tag') """ events = get_dataset("events") query, _ = parse_snql_query(query_body, events) processor = TagsExpanderProcessor() query_settings = HTTPQuerySettings() processor.process_query(query, query_settings) assert query.get_selected_columns() == [ SelectedExpression( "platforms", FunctionCall( "_snuba_platforms", "count", (Column("_snuba_platform", None, "platform"), ), ), ), SelectedExpression( "top_platforms", FunctionCall( "_snuba_top_platforms", "testF", ( Column("_snuba_platform", None, "platform"), FunctionCall( "_snuba_tags_value", "arrayJoin", (Column(None, None, "tags.value"), ), ), ), ), ), SelectedExpression( "f1_alias", FunctionCall( "_snuba_f1_alias", "f1", ( FunctionCall( "_snuba_tags_key", "arrayJoin", (Column(None, None, "tags.key"), ), ), Column("_snuba_column2", None, "column2"), ), ), ), SelectedExpression("f2_alias", FunctionCall("_snuba_f2_alias", "f2", tuple())), ] condition = query.get_condition() assert condition is not None conds = get_first_level_and_conditions(condition) assert conds[0] == binary_condition( OPERATOR_TO_FUNCTION["="], FunctionCall("_snuba_tags_key", "arrayJoin", (Column(None, None, "tags.key"), )), Literal(None, "tags_key"), ) assert query.get_having() == in_condition( FunctionCall("_snuba_tags_value", "arrayJoin", (Column(None, None, "tags.value"), )), [Literal(None, "tag")], )