def __init__(self, environment_ids=None, start=None, end=None, search_filters=None): from sentry.search.snuba.executors import get_search_filter self.environment_ids = environment_ids # XXX: We copy this logic from `PostgresSnubaQueryExecutor.query`. Ideally we # should try and encapsulate this logic, but if you're changing this, change it # there as well. self.start = None start_params = [ _f for _f in [start, get_search_filter(search_filters, "date", ">")] if _f ] if start_params: self.start = max([_f for _f in start_params if _f]) self.end = None end_params = [ _f for _f in [end, get_search_filter(search_filters, "date", "<")] if _f ] if end_params: self.end = min(end_params) self.conditions = ([ convert_search_filter_to_snuba_query(search_filter) for search_filter in search_filters if search_filter.key.name not in self.skip_snuba_fields ] if search_filters is not None else [])
def inbox_search( projects: Sequence[Project], environments: Optional[Sequence[Environment]] = None, limit: int = 100, cursor: Optional[Cursor] = None, count_hits: bool = False, search_filters: Optional[Sequence[SearchFilter]] = None, date_from: Optional[datetime] = None, date_to: Optional[datetime] = None, max_hits: Optional[int] = None, ) -> CursorResult: now: datetime = timezone.now() end: Optional[datetime] = None end_params: List[datetime] = [ _f for _f in [date_to, get_search_filter(search_filters, "date", "<")] if _f ] if end_params: end = min(end_params) end = end if end else now + ALLOWED_FUTURE_DELTA # We only want to search back a week at most, since that's the oldest inbox rows # can be. earliest_date = now - timedelta(days=7) start_params = [ date_from, earliest_date, get_search_filter(search_filters, "date", ">") ] start = max([_f for _f in start_params if _f]) end = max([earliest_date, end]) if start >= end: return Paginator(Group.objects.none()).get_result() # Make sure search terms are valid invalid_search_terms = [ str(sf) for sf in search_filters if sf.key.name not in allowed_inbox_search_terms ] if invalid_search_terms: raise InvalidSearchQuery( f"Invalid search terms for 'inbox' search: {invalid_search_terms}") # Make sure this is an inbox search if not get_search_filter(search_filters, "for_review", "="): raise InvalidSearchQuery( "Sort key 'inbox' only supported for inbox search") if get_search_filter(search_filters, "status", "=") != GroupStatus.UNRESOLVED: raise InvalidSearchQuery( "Inbox search only works for 'unresolved' status") # We just filter on `GroupInbox.date_added` here, and don't filter by date # on the group. This keeps the query simpler and faster in some edge cases, # and date_added is a good enough proxy when we're using this sort. qs = GroupInbox.objects.filter( date_added__gte=start, date_added__lte=end, project__in=projects, ) if environments is not None: environment_ids: List[int] = [ environment.id for environment in environments ] qs = qs.filter(group_id__in=GroupEnvironment.objects.filter( environment_id__in=environment_ids).values_list( "group_id", flat=True).distinct()) owner_search = get_search_filter(search_filters, "assigned_or_suggested", "=") if owner_search: qs = qs.filter( assigned_or_suggested_filter(owner_search, projects, field_filter="group_id")) paginator = DateTimePaginator(qs.order_by("date_added"), "-date_added") results = paginator.get_result(limit, cursor, count_hits=count_hits, max_hits=max_hits) # We want to return groups from the endpoint, but have the cursor be related to the # GroupInbox rows. So we paginate on the GroupInbox results queryset, then fetch # the group_ids out and use them to get the actual groups. group_qs = Group.objects.filter( id__in=[r.group_id for r in results.results], project__in=projects, status=GroupStatus.UNRESOLVED, ) groups: Mapping[int, Group] = {g.id: g for g in group_qs} results.results = [ groups[r.group_id] for r in results.results if r.group_id in groups ] return results