def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "timestamp"}, ("timestamp", )), TagsExpanderProcessor(), BasicFunctionsProcessor(), ProjectRateLimiterProcessor(project_column="project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"time": "timestamp"}, ("timestamp", )), ReferrerRateLimiterProcessor(), OrganizationRateLimiterProcessor(org_column="org_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"bucketed_started": "started"}, ("started", "received")), ProjectRateLimiterProcessor(project_column="project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "finish_ts"}, ("start_ts", "finish_ts")), TagsExpanderProcessor(), BasicFunctionsProcessor(), apdex_processor(self.get_data_model()), failure_rate_processor(self.get_data_model()), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"bucketed_time": "timestamp"}, ("timestamp", )), ProjectRateLimiterProcessor(project_column="project_id"), TagsTypeTransformer(), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"bucketed_started": "started"}, ("started", "received")), ReferrerRateLimiterProcessor(), OrganizationRateLimiterProcessor(org_column="org_id"), ProjectReferrerRateLimiter("project_id"), ProjectRateLimiterProcessor(project_column="project_id"), ResourceQuotaProcessor("project_id"), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor( {"time": "timestamp", "rtime": "received"}, ("timestamp", "received") ), TagsExpanderProcessor(), BasicFunctionsProcessor(), HandledFunctionsProcessor( "exception_stacks.mechanism_handled", self.get_data_model() ), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ GranularityProcessor(), TimeSeriesProcessor({"bucketed_time": "timestamp"}, ("timestamp", )), ReferrerRateLimiterProcessor(), OrganizationRateLimiterProcessor(org_column="org_id"), ProjectReferrerRateLimiter("project_id"), ProjectRateLimiterProcessor(project_column="project_id"), ResourceQuotaProcessor("project_id"), TagsTypeTransformer(), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ TimeSeriesProcessor({"time": "finish_ts"}, ("start_ts", "finish_ts", "timestamp")), TagsExpanderProcessor(), BasicFunctionsProcessor(), apdex_processor(), failure_rate_processor(), ReferrerRateLimiterProcessor(), ProjectReferrerRateLimiter("project_id"), ProjectRateLimiterProcessor(project_column="project_id"), ResourceQuotaProcessor("project_id"), ]
def get_query_processors(self) -> Sequence[LogicalProcessor]: return [ TagsExpanderProcessor(), TimeSeriesProcessor( {"events.time": "events.timestamp"}, [ "events.timestamp", "events.received", "groups.last_seen", "groups.first_seen", "groups.active_at", ], ), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: columnset = self.get_data_model() return [ TimeSeriesProcessor({"time": "timestamp"}, ("timestamp", )), TagsExpanderProcessor(), BasicFunctionsProcessor(), # Apdex and Impact seem very good candidates for # being defined by the Transaction entity when it will # exist, so it would run before Storage selection. apdex_processor(columnset), failure_rate_processor(columnset), HandledFunctionsProcessor("exception_stacks.mechanism_handled", columnset), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"bucketed_started": "started"}, ("started", "received")), ]
def get_query_processors(self) -> Sequence[QueryProcessor]: return [ BasicFunctionsProcessor(), TimeSeriesProcessor({"time": "timestamp"}, ("timestamp",)), ]