def release_filter_converter(builder: QueryBuilder, search_filter: SearchFilter) -> WhereType: """Parse releases for potential aliases like `latest`""" if search_filter.value.is_wildcard(): operator = search_filter.operator value = search_filter.value else: operator_conversions = {"=": "IN", "!=": "NOT IN"} operator = operator_conversions.get(search_filter.operator, search_filter.operator) value = SearchValue( reduce( lambda x, y: x + y, [ parse_release( v, builder.params["project_id"], builder.params.get("environment_objects"), builder.params.get("organization_id"), ) for v in to_list(search_filter.value.value) ], [], ) ) return builder._default_filter_converter(SearchFilter(search_filter.key, operator, value))
def filter_by_stage( self, organization_id: int, operator: str, value, project_ids: Sequence[int] = None, environments: List[str] = None, ) -> models.QuerySet: from sentry.models import ReleaseProjectEnvironment, ReleaseStages from sentry.search.events.filter import to_list if not environments or len(environments) != 1: raise InvalidSearchQuery( "Choose a single environment to filter by release stage.") filters = { ReleaseStages.ADOPTED: Q(adopted__isnull=False, unadopted__isnull=True), ReleaseStages.REPLACED: Q(adopted__isnull=False, unadopted__isnull=False), ReleaseStages.LOW_ADOPTION: Q(adopted__isnull=True, unadopted__isnull=True), } value = to_list(value) operator_conversions = {"=": "IN", "!=": "NOT IN"} if operator in operator_conversions.keys(): operator = operator_conversions.get(operator) for stage in value: if stage not in filters: raise InvalidSearchQuery("Unsupported release.stage value.") rpes = ReleaseProjectEnvironment.objects.filter( release__organization_id=organization_id, ).select_related( "release") if project_ids: rpes = rpes.filter(project_id__in=project_ids) query = Q() if operator == "IN": for stage in value: query |= filters[stage] elif operator == "NOT IN": for stage in value: query &= ~filters[stage] qs = self.filter(id__in=Subquery( rpes.filter(query).values_list("release_id", flat=True))) return qs
def convert_search_filter(search_filter): if search_filter.key.name in value_converters: converter = value_converters[search_filter.key.name] new_value = converter(to_list(search_filter.value.raw_value), projects, user, environments) search_filter = search_filter._replace( value=SearchValue(new_value), operator="IN" if search_filter.operator in EQUALITY_OPERATORS else "NOT IN", ) elif isinstance(search_filter, AggregateFilter): raise InvalidSearchQuery( f"Aggregate filters ({search_filter.key.name}) are not supported in issue searches." ) return search_filter
def project_slug_converter( builder: QueryBuilder, search_filter: SearchFilter ) -> Optional[WhereType]: """Convert project slugs to ids and create a filter based on those. This is cause we only store project ids in clickhouse. """ value = search_filter.value.value if Op(search_filter.operator) == Op.EQ and value == "": raise InvalidSearchQuery( 'Cannot query for has:project or project:"" as every event will have a project' ) slugs = to_list(value) project_slugs: Mapping[str, int] = { slug: project_id for slug, project_id in builder.project_slugs.items() if slug in slugs } missing: List[str] = [slug for slug in slugs if slug not in project_slugs] if missing and search_filter.operator in constants.EQUALITY_OPERATORS: raise InvalidSearchQuery( f"Invalid query. Project(s) {', '.join(missing)} do not exist or are not actively selected." ) # Sorted for consistent query results project_ids = list(sorted(project_slugs.values())) if project_ids: # Create a new search filter with the correct values converted_filter = builder.convert_search_filter_to_condition( SearchFilter( SearchKey("project.id"), search_filter.operator, SearchValue(project_ids if search_filter.is_in_filter else project_ids[0]), ) ) if converted_filter: if search_filter.operator in constants.EQUALITY_OPERATORS: builder.projects_to_filter.update(project_ids) return converted_filter return None
def unfurl_discover(data, integration, links: List[UnfurlableUrl]) -> UnfurledUrl: orgs_by_slug = {org.slug: org for org in integration.organizations.all()} unfurls = {} for link in links: org_slug = link.args["org_slug"] org = orgs_by_slug.get(org_slug) # If the link shared is an org w/o the slack integration do not unfurl if not org: continue if not features.has("organizations:chart-unfurls", org): continue params = link.args["query"] query_id = params.get("id", None) saved_query = {} if query_id: try: response = client.get( auth=ApiKey(organization=org, scope_list=["org:read"]), path= f"/organizations/{org_slug}/discover/saved/{query_id}/", ) except Exception as exc: logger.error( "Failed to load saved query for unfurl: %s", str(exc), exc_info=True, ) else: saved_query = response.data # Override params from Discover Saved Query if they aren't in the URL params.setlist( "order", params.getlist("sort") or to_list(saved_query.get("orderby"))) params.setlist( "name", params.getlist("name") or to_list(saved_query.get("name"))) params.setlist( "yAxis", params.getlist("yAxis") or to_list(saved_query.get("yAxis", "count()"))) params.setlist( "field", params.getlist("field") or to_list(saved_query.get("fields"))) # Only override if key doesn't exist since we want to account for # an intermediate state where the query could have been cleared if "query" not in params: params.setlist( "query", params.getlist("query") or to_list(saved_query.get("query"))) display_mode = str( params.get("display") or saved_query.get("display", "default")) if "daily" in display_mode: params.setlist("interval", ["1d"]) if "top5" in display_mode: params.setlist("topEvents", [f"{TOP_N}"]) try: resp = client.get( auth=ApiKey(organization=org, scope_list=["org:read"]), path=f"/organizations/{org_slug}/events-stats/", params=params, ) except Exception as exc: logger.error( "Failed to load events-stats for unfurl: %s", str(exc), exc_info=True, ) continue chart_data = {"seriesName": params.get("yAxis"), "stats": resp.data} style = display_modes.get(display_mode, display_modes["default"]) try: url = generate_chart(style, chart_data) except RuntimeError as exc: logger.error( "Failed to generate chat for discover unfurl: %s", str(exc), exc_info=True, ) continue unfurls[link.url] = build_discover_attachment( title=link.args["query"].get("name", "Dashboards query"), chart_url=url, ) return unfurls
def unfurl_discover( data: HttpRequest, integration: Integration, links: List[UnfurlableUrl], user: Optional["User"], ) -> UnfurledUrl: orgs_by_slug = {org.slug: org for org in integration.organizations.all()} unfurls = {} for link in links: org_slug = link.args["org_slug"] org = orgs_by_slug.get(org_slug) # If the link shared is an org w/o the slack integration do not unfurl if not org: continue if not features.has("organizations:discover-basic", org): continue params = link.args["query"] query_id = params.get("id", None) saved_query = {} if query_id: try: response = client.get( auth=ApiKey(organization=org, scope_list=["org:read"]), path=f"/organizations/{org_slug}/discover/saved/{query_id}/", ) except Exception as exc: logger.error( f"Failed to load saved query for unfurl: {exc}", exc_info=True, ) else: saved_query = response.data # Override params from Discover Saved Query if they aren't in the URL params.setlist( "order", params.getlist("sort") or (to_list(saved_query.get("orderby")) if saved_query.get("orderby") else []), ) params.setlist("name", params.getlist("name") or to_list(saved_query.get("name"))) fields = params.getlist("field") or to_list(saved_query.get("fields")) # Mimic Discover to pick the first aggregate as the yAxis option if # one isn't specified. axis_options = [field for field in fields if is_aggregate(field)] + [DEFAULT_AXIS_OPTION] params.setlist( "yAxis", params.getlist("yAxis") or to_list(saved_query.get("yAxis", axis_options[0])) ) params.setlist("field", params.getlist("field") or to_list(saved_query.get("fields"))) params.setlist( "project", params.getlist("project") or (to_list(saved_query.get("project")) if saved_query.get("project") else []), ) # Only override if key doesn't exist since we want to account for # an intermediate state where the query could have been cleared if "query" not in params: params.setlist( "query", params.getlist("query") or to_list(saved_query.get("query", "")) ) display_mode = str(params.get("display") or saved_query.get("display", "default")) if "daily" in display_mode: params.setlist("interval", ["1d"]) if "top5" in display_mode: params.setlist( "topEvents", params.getlist("topEvents") or to_list(saved_query.get("topEvents", f"{TOP_N}")), ) y_axis = params.getlist("yAxis")[0] if display_mode != "dailytop5": display_mode = get_top5_display_mode(y_axis) else: # topEvents param persists in the URL in some cases, we want to discard # it if it's not a top n display type. params.pop("topEvents", None) if "previous" in display_mode: stats_period = params.getlist("statsPeriod", [DEFAULT_PERIOD])[0] parsed_period = parse_stats_period(stats_period) if parsed_period and parsed_period <= timedelta(days=MAX_PERIOD_DAYS_INCLUDE_PREVIOUS): stats_period = get_double_period(stats_period) params.setlist("statsPeriod", [stats_period]) endpoint = "events-stats/" if "worldmap" in display_mode: endpoint = "events-geo/" params.setlist("field", params.getlist("yAxis")) params.pop("sort", None) try: resp = client.get( auth=ApiKey(organization=org, scope_list=["org:read"]), user=user, path=f"/organizations/{org_slug}/{endpoint}", params=params, ) except Exception as exc: logger.error( f"Failed to load {endpoint} for unfurl: {exc}", exc_info=True, ) continue chart_data = {"seriesName": params.get("yAxis"), "stats": resp.data} style = display_modes.get(display_mode, display_modes["default"]) try: url = generate_chart(style, chart_data) except RuntimeError as exc: logger.error( f"Failed to generate chart for discover unfurl: {exc}", exc_info=True, ) continue unfurls[link.url] = SlackDiscoverMessageBuilder( title=link.args["query"].get("name", "Dashboards query"), chart_url=url, ).build() analytics.record( "integrations.slack.chart_unfurl", organization_id=integration.organizations.all()[0].id, user_id=user.id if user else None, unfurls_count=len(unfurls), ) return unfurls