def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"bucketed_started": "started"}, ("started", "received")), ProjectRateLimiterProcessor(project_column="project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), ApdexProcessor(), ImpactProcessor(), TimeSeriesColumnProcessor(self.__time_group_columns), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "timestamp"}, ("timestamp", )), TagsExpanderProcessor(), BasicFunctionsProcessor(), ProjectRateLimiterProcessor(project_column="project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"time": "timestamp"}, ("timestamp", )), ReferrerRateLimiterProcessor(), OrganizationRateLimiterProcessor(org_column="org_id"), ]
def test_format_expressions(pre_format: Query, expected_query: Query) -> None: copy = deepcopy(pre_format) BasicFunctionsProcessor().process_query(copy, HTTPRequestSettings()) assert (copy.get_selected_columns_from_ast() == expected_query.get_selected_columns_from_ast()) assert copy.get_groupby_from_ast() == expected_query.get_groupby_from_ast() assert copy.get_condition_from_ast( ) == expected_query.get_condition_from_ast()
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "finish_ts"}, ("start_ts", "finish_ts")), TagsExpanderProcessor(), BasicFunctionsProcessor(), apdex_processor(self.get_data_model()), failure_rate_processor(self.get_data_model()), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TagsExpanderProcessor(), BasicFunctionsProcessor(), apdex_processor(self.get_data_model()), failure_rate_processor(self.get_data_model()), TimeSeriesColumnProcessor(self.__time_group_columns), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TagsExpanderProcessor(), BasicFunctionsProcessor(), TimeSeriesColumnProcessor(self.__time_group_columns), HandledFunctionsProcessor("exception_stacks.mechanism_handled", self.get_data_model()), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"bucketed_time": "timestamp"}, ("timestamp", )), ProjectRateLimiterProcessor(project_column="project_id"), TagsTypeTransformer(), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), # Apdex and Impact seem very good candidates for # being defined by the Transaction entity when it will # exist, so it would run before Storage selection. ApdexProcessor(), ImpactProcessor(), TimeSeriesColumnProcessor({}), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor( {"time": "timestamp", "rtime": "received"}, ("timestamp", "received") ), TagsExpanderProcessor(), BasicFunctionsProcessor(), HandledFunctionsProcessor( "exception_stacks.mechanism_handled", self.get_data_model() ), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"bucketed_started": "started"}, ("started", "received")), ReferrerRateLimiterProcessor(), OrganizationRateLimiterProcessor(org_column="org_id"), ProjectReferrerRateLimiter("project_id"), ProjectRateLimiterProcessor(project_column="project_id"), ResourceQuotaProcessor("project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: columnset = self.get_data_model() return [ TagsExpanderProcessor(), BasicFunctionsProcessor(), # Apdex and Impact seem very good candidates for # being defined by the Transaction entity when it will # exist, so it would run before Storage selection. apdex_processor(columnset), failure_rate_processor(columnset), HandledFunctionsProcessor("exception_stacks.mechanism_handled", columnset), TimeSeriesColumnProcessor({"time": "timestamp"}), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "finish_ts"}, ("start_ts", "finish_ts", "timestamp")), TagsExpanderProcessor(), BasicFunctionsProcessor(), apdex_processor(), failure_rate_processor(), ReferrerRateLimiterProcessor(), ProjectReferrerRateLimiter("project_id"), ProjectRateLimiterProcessor(project_column="project_id"), ResourceQuotaProcessor("project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), ApdexProcessor(), ImpactProcessor(), PrewhereProcessor(), NestedFieldConditionOptimizer( "tags", "_tags_flattened", {"start_ts", "finish_ts"}, BEGINNING_OF_TIME ), NestedFieldConditionOptimizer( "contexts", "_contexts_flattened", {"start_ts", "finish_ts"}, BEGINNING_OF_TIME, ), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), ProjectRateLimiterProcessor("project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"time": "timestamp"}, ("timestamp",)), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"bucketed_started": "started"}, ("started", "received")), ]