def _serialize_entity(self, entity: Entity, filter: Filter, team_id: int) -> List[Dict[str, Any]]: if filter.interval is None: filter.interval = "day" serialized: Dict[str, Any] = { "action": entity.to_dict(), "label": entity.name, "count": 0, "data": [], "labels": [], "days": [], } response = [] events = process_entity_for_events( entity=entity, team_id=team_id, order_by=None, ) events = events.filter(filter_events(team_id, filter, entity)) new_dict = copy.deepcopy(serialized) new_dict.update( self.stickiness(filtered_events=events, entity=entity, filter=filter, team_id=team_id)) response.append(new_dict) return response
def trends(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: actions = self.get_queryset() actions = actions.filter(deleted=False) team = request.user.team_set.get() entities_list = [] filter = Filter(request=request) if len(filter.entities) == 0: # If no filters, automatically grab all actions and show those instead filter.entities = [ Entity({ 'id': action.id, 'name': action.name, 'type': TREND_FILTER_TYPE_ACTIONS }) for action in actions ] for entity in filter.entities: if entity.type == TREND_FILTER_TYPE_ACTIONS: try: db_action = [ action for action in actions if action.id == entity.id ][0] entity.name = db_action.name except IndexError: continue trend_entity = self._serialize_entity(entity=entity, filter=filter, request=request, team=team) entities_list.extend(trend_entity) return Response(entities_list)
def trends(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: actions = self.get_queryset() actions = actions.filter(deleted=False) team = request.user.team_set.get() entities_list = [] filter = Filter(request=request) if len(filter.entities) == 0: # If no filters, automatically grab all actions and show those instead filter.entities = [Entity({'id': action.id, 'name': action.name, 'type': TREND_FILTER_TYPE_ACTIONS}) for action in actions] if not filter.date_from: filter._date_from = Event.objects.filter(team=team)\ .order_by('timestamp')[0]\ .timestamp\ .replace(hour=0, minute=0, second=0, microsecond=0)\ .isoformat() if not filter.date_to: filter._date_to = now().isoformat() for entity in filter.entities: if entity.type == TREND_FILTER_TYPE_ACTIONS: try: db_action = [action for action in actions if action.id == entity.id][0] entity.name = db_action.name except IndexError: continue trend_entity = self._serialize_entity( entity=entity, filter=filter, request=request, team=team ) entities_list.extend(trend_entity) return Response(entities_list)
def calculate_trends(self, filter: Filter, team_id: int) -> List[Dict[str, Any]]: actions = Action.objects.filter(team_id=team_id).order_by("-id") if len(filter.actions) > 0: actions = Action.objects.filter(pk__in=[entity.id for entity in filter.actions], team_id=team_id) actions = actions.prefetch_related(Prefetch("steps", queryset=ActionStep.objects.order_by("id"))) entities_list = [] if len(filter.entities) == 0: # If no filters, automatically grab all actions and show those instead filter.entities = [ Entity({"id": action.id, "name": action.name, "type": TREND_FILTER_TYPE_ACTIONS,}) for action in actions ] if not filter.date_from: filter._date_from = ( Event.objects.filter(team_id=team_id) .order_by("timestamp")[0] .timestamp.replace(hour=0, minute=0, second=0, microsecond=0) .isoformat() ) if not filter.date_to: filter._date_to = now().isoformat() for entity in filter.entities: if entity.type == TREND_FILTER_TYPE_ACTIONS: try: db_action = [action for action in actions if action.id == entity.id][0] entity.name = db_action.name except IndexError: continue entities_list.extend( handle_compare(entity=entity, filter=filter, func=self._serialize_entity, team_id=team_id) ) return entities_list
def retention(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: team = self.team properties = request.GET.get("properties", "{}") try: properties = json.loads(properties) except json.decoder.JSONDecodeError: raise ValidationError("Properties are unparsable!") data: Dict[str, Any] = {"properties": properties} start_entity_data = request.GET.get("start_entity", None) if start_entity_data: entity_data = json.loads(start_entity_data) data.update({ "entites": [ Entity({ "id": entity_data["id"], "type": entity_data["type"] }) ] }) data.update({"date_from": "-11d"}) filter = RetentionFilter(data=data) result = retention.Retention().run(filter, team) return Response({"data": result})
def update_cache(cache_type: str, payload: dict) -> Optional[Union[dict, List[Dict[str, Any]]]]: result: Optional[Union[dict, List[Dict[str, Any]]]] = None if cache_type == TRENDS_ENDPOINT: # convert filter filter_dict = json.loads(payload["filter"]) entities = [ Entity(entity_dict) for entity_dict in filter_dict.get("entities", []) ] filter_dict.update({"entities": entities}) filter = Filter(data=filter_dict) result = _calculate_trends(filter, payload["params"], int(payload["team_id"])) elif cache_type == FUNNEL_ENDPOINT: result = _calculate_funnels(payload["pk"], payload["params"], int(payload["team_id"])) if payload["dashboard_id"]: dashboard_item = DashboardItem.objects.filter( pk=payload["dashboard_id"]) if dashboard_item[0] and dashboard_item[0].deleted: return None dashboard_item.update(last_refresh=datetime.datetime.now(), refreshing=False) return result
def _build_step_query(self, entity: Entity, index: int, entity_name: str, step_prefix: str) -> str: filters = self._build_filters(entity, index) if entity.type == TREND_FILTER_TYPE_ACTIONS: action = entity.get_action() for action_step in action.steps.all(): if entity_name not in self.params[entity_name]: self.params[entity_name].append(action_step.event) action_query, action_params = format_action_filter( team_id=self._team.pk, action=action, prepend=f"{entity_name}_{step_prefix}step_{index}") if action_query == "": return "" self.params.update(action_params) content_sql = "{actions_query} {filters}".format( actions_query=action_query, filters=filters, ) else: if entity.id not in self.params[entity_name]: self.params[entity_name].append(entity.id) event_param_key = f"{entity_name}_{step_prefix}event_{index}" self.params[event_param_key] = entity.id content_sql = f"event = %({event_param_key})s {filters}" return content_sql
def _serialize_entity(self, entity: Entity, filter: Filter, team_id: int) -> List[Dict[str, Any]]: if filter.interval is None: filter.interval = "day" serialized: Dict[str, Any] = { "action": entity.to_dict(), "label": entity.name, "count": 0, "data": [], "labels": [], "days": [], } response = [] events = process_entity_for_events(entity=entity, team_id=team_id, order_by="-timestamp",) events = events.filter(filter_events(team_id, filter, entity)) items = aggregate_by_interval( filtered_events=events, team_id=team_id, entity=entity, filter=filter, breakdown="properties__{}".format(filter.breakdown) if filter.breakdown else None, ) for value, item in items.items(): new_dict = copy.deepcopy(serialized) if value != "Total": new_dict.update(breakdown_label(entity, value)) new_dict.update(append_data(dates_filled=list(item.items()), interval=filter.interval)) if filter.display == TRENDS_CUMULATIVE: new_dict["data"] = np.cumsum(new_dict["data"]) response.append(new_dict) return response
def calculate_trends(filter: Filter, params: dict, team_id: int, actions: QuerySet) -> List[Dict[str, Any]]: compare = params.get("compare") entities_list = [] actions = actions.filter(deleted=False) if len(filter.entities) == 0: # If no filters, automatically grab all actions and show those instead filter.entities = [ Entity({ "id": action.id, "name": action.name, "type": TREND_FILTER_TYPE_ACTIONS, }) for action in actions ] if not filter.date_from: filter._date_from = (Event.objects.filter( team_id=team_id).order_by("timestamp")[0].timestamp.replace( hour=0, minute=0, second=0, microsecond=0).isoformat()) if not filter.date_to: filter._date_to = now().isoformat() compared_filter = None if compare: compared_filter = determine_compared_filter(filter) for entity in filter.entities: if entity.type == TREND_FILTER_TYPE_ACTIONS: try: db_action = [ action for action in actions if action.id == entity.id ][0] entity.name = db_action.name except IndexError: continue trend_entity = serialize_entity(entity=entity, filter=filter, params=params, team_id=team_id) if compare and compared_filter: trend_entity = convert_to_comparison( trend_entity, filter, "{} - {}".format(entity.name, "current")) entities_list.extend(trend_entity) compared_trend_entity = serialize_entity(entity=entity, filter=compared_filter, params=params, team_id=team_id) compared_trend_entity = convert_to_comparison( compared_trend_entity, compared_filter, "{} - {}".format(entity.name, "previous"), ) entities_list.extend(compared_trend_entity) else: entities_list.extend(trend_entity) return entities_list
def people(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: team = request.user.team_set.get() filter = Filter(request=request) def _calculate_people(events: QuerySet): shown_as = request.GET.get('shown_as') if shown_as is not None and shown_as == 'Stickiness': stickiness_days = int(request.GET['stickiness_days']) events = events\ .values('person_id')\ .annotate(day_count=Count(functions.TruncDay('timestamp'), distinct=True))\ .filter(day_count=stickiness_days) else: events = events.values('person_id').distinct() if request.GET.get( 'breakdown_type' ) == 'cohort' and request.GET.get('breakdown_value') != 'all': events = events.filter( Exists( CohortPeople.objects.filter( cohort_id=int(request.GET['breakdown_value']), person_id=OuterRef('person_id')).only('id'))) people = Person.objects\ .filter(team=team, id__in=[p['person_id'] for p in events[0:100]]) people = people.prefetch_related( Prefetch('persondistinctid_set', to_attr='distinct_ids_cache')) return self._serialize_people(people=people, request=request) filtered_events: QuerySet = QuerySet() if request.GET.get('session'): filtered_events = Event.objects.filter(team=team).filter( self._filter_events(filter)).add_person_id(team.pk) else: entity = Entity({ 'id': request.GET['entityId'], 'type': request.GET['type'] }) if entity.type == TREND_FILTER_TYPE_EVENTS: filtered_events = self._process_entity_for_events(entity, team=team, order_by=None)\ .filter(self._filter_events(filter, entity)) elif entity.type == TREND_FILTER_TYPE_ACTIONS: actions = super().get_queryset() actions = actions.filter(deleted=False) try: action = actions.get(pk=entity.id) except Action.DoesNotExist: return Response([]) filtered_events = self._process_entity_for_events( entity, team=team, order_by=None).filter(self._filter_events(filter, entity)) people = _calculate_people(events=filtered_events) return Response([people])
def get_target_entity(request: request.Request) -> Entity: entity_id = request.GET.get(ENTITY_ID) entity_type = request.GET.get(ENTITY_TYPE) if entity_id and entity_type: return Entity({"id": entity_id, "type": entity_type}) else: raise ValueError( "An entity must be provided for target entity to be determined")
def serialize_entity( entity: Entity, filter: Filter, params: dict, team_id: int ) -> List[Dict[str, Any]]: interval = params.get("interval") if interval is None: interval = "day" serialized: Dict[str, Any] = { "action": entity.to_dict(), "label": entity.name, "count": 0, "data": [], "labels": [], "days": [], } response = [] events = process_entity_for_events( entity=entity, team_id=team_id, order_by=None if params.get("shown_as") == "Stickiness" else "-timestamp", ) events = events.filter(filter_events(team_id, filter, entity)) if params.get("shown_as", "Volume") == "Volume": items = aggregate_by_interval( filtered_events=events, team_id=team_id, entity=entity, filter=filter, interval=interval, params=params, breakdown="properties__{}".format(params.get("breakdown")) if params.get("breakdown") else None, ) for value, item in items.items(): new_dict = copy.deepcopy(serialized) if value != "Total": new_dict.update(breakdown_label(entity, value)) new_dict.update( append_data(dates_filled=list(item.items()), interval=interval) ) if filter.display == TRENDS_CUMULATIVE: new_dict["data"] = np.cumsum(new_dict["data"]) response.append(new_dict) elif params.get("shown_as") == TRENDS_STICKINESS: new_dict = copy.deepcopy(serialized) new_dict.update( stickiness( filtered_events=events, entity=entity, filter=filter, team_id=team_id ) ) response.append(new_dict) return response
def _serialize_entity(self, entity: Entity, filter: Filter, request: request.Request, team: Team) -> List[Dict[str, Any]]: interval = request.GET.get('interval') if interval is None: interval = 'day' serialized: Dict[str, Any] = { 'action': entity.to_dict(), 'label': entity.name, 'count': 0, 'data': [], 'labels': [], 'days': [] } response = [] events = self._process_entity_for_events( entity=entity, team=team, order_by=None if request.GET.get('shown_as') == 'Stickiness' else '-timestamp') events = events.filter(self._filter_events(filter, entity)) if request.GET.get('shown_as', 'Volume') == 'Volume': items = self._aggregate_by_interval( filtered_events=events, team=team, entity=entity, filter=filter, interval=interval, request=request, breakdown='properties__{}'.format(request.GET['breakdown']) if request.GET.get('breakdown') else None, ) for value, item in items.items(): new_dict = copy.deepcopy(serialized) if value != 'Total': new_dict.update(self._breakdown_label(entity, value)) new_dict.update( append_data(dates_filled=list(item.items()), interval=interval)) if filter.display == TRENDS_CUMULATIVE: new_dict['data'] = np.cumsum(new_dict['data']) response.append(new_dict) elif request.GET['shown_as'] == TRENDS_STICKINESS: new_dict = copy.deepcopy(serialized) new_dict.update( self._stickiness(filtered_events=events, entity=entity, filter=filter)) response.append(new_dict) return response
def format_entity_filter(entity: Entity, prepend: str = "action", filter_by_team=True) -> Tuple[str, Dict]: if entity.type == TREND_FILTER_TYPE_ACTIONS: action = entity.get_action() entity_filter, params = format_action_filter( action, prepend=prepend, filter_by_team=filter_by_team) else: key = f"{prepend}_event" entity_filter = f"event = %({key})s" params = {key: entity.id} return entity_filter, params
def retention(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: team = request.user.team_set.get() properties = request.GET.get("properties", "{}") start_entity_data = request.GET.get("start_entity", None) start_entity: Optional[Entity] = None if start_entity_data: data = json.loads(start_entity_data) start_entity = Entity({"id": data["id"], "type": data["type"]}) filter = Filter(data={"properties": json.loads(properties)}) filter._date_from = "-11d" result = calculate_retention(filter, team, start_entity=start_entity) return Response(result)
def test_retention_action_start_point(self): person1 = Person.objects.create(team=self.team, distinct_ids=["person1", "alias1"]) person2 = Person.objects.create(team=self.team, distinct_ids=["person2"]) action = self._create_signup_actions([ ("person1", self._date(0)), ("person1", self._date(1)), ("person1", self._date(2)), ("person1", self._date(5)), ("alias1", self._date(5, 9)), ("person1", self._date(6)), ("person2", self._date(1)), ("person2", self._date(2)), ("person2", self._date(3)), ("person2", self._date(6)), ]) start_entity = Entity({ "id": action.pk, "type": TREND_FILTER_TYPE_ACTIONS }) result = Retention().run( Filter(data={ "date_from": self._date(0, hour=0), "entities": [start_entity] }), self.team, total_days=7) self.assertEqual(len(result), 7) self.assertEqual( self.pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"], ) self.assertEqual(result[0]["date"], "Wed. 10 June") self.assertEqual( self.pluck(result, "values", "count"), [ [1, 1, 1, 0, 0, 1, 1], [2, 2, 1, 0, 1, 2], [2, 1, 0, 1, 2], [1, 0, 0, 1], [0, 0, 0], [1, 1], [2], ], )
def _serialize_entity(self, entity: Entity, filter: StickinessFilter, team_id: int) -> List[Dict[str, Any]]: serialized: Dict[str, Any] = { "action": entity.to_dict(), "label": entity.name, "count": 0, "data": [], "labels": [], "days": [], } response = [] new_dict = copy.deepcopy(serialized) new_dict.update(self.stickiness(entity=entity, filter=filter, team_id=team_id)) response.append(new_dict) return response
def retention(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: team = self.team properties = request.GET.get("properties", "{}") filter = RetentionFilter(data={"properties": json.loads(properties)}) start_entity_data = request.GET.get("start_entity", None) if start_entity_data: data = json.loads(start_entity_data) filter.entities = [Entity({"id": data["id"], "type": data["type"]})] filter._date_from = "-11d" result = retention.Retention().run(filter, team) return Response({"data": result})
def _serialize_step( self, step: Entity, count: int, people: Optional[List[uuid.UUID]] = None) -> Dict[str, Any]: if step.type == TREND_FILTER_TYPE_ACTIONS: name = step.get_action().name else: name = step.id return { "action_id": step.id, "name": name, "order": step.order, "people": people if people else [], "count": count, "type": step.type, }
def people(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: team = request.user.team_set.get() entity = Entity({ 'id': request.GET['entityId'], 'type': request.GET['type'] }) filter = Filter(request=request) def _calculate_people(entity: Entity, events: QuerySet): if request.GET.get('shown_as', 'Volume') == 'Volume': events = events.values('person_id').distinct() elif request.GET['shown_as'] == 'Stickiness': stickiness_days = int(request.GET['stickiness_days']) events = events\ .values('person_id')\ .annotate(day_count=Count(functions.TruncDay('timestamp'), distinct=True))\ .filter(day_count=stickiness_days) people = Person.objects\ .filter(team=team, id__in=[p['person_id'] for p in events[0:100]]) return self._serialize_people(entity=entity, people=people, request=request) if entity.type == TREND_FILTER_TYPE_EVENTS: filtered_events = self._process_entity_for_events(entity, team=team, order_by=None)\ .filter(self._filter_events(filter)) people = _calculate_people(entity=entity, events=filtered_events) return Response([people]) elif entity.type == TREND_FILTER_TYPE_ACTIONS: actions = super().get_queryset() actions = actions.filter(deleted=False) try: action = actions.get(pk=entity.id) except Action.DoesNotExist: return Response([]) filtered_events = self._process_entity_for_events( entity, team=team, order_by=None).filter(self._filter_events(filter)) people = _calculate_people(entity=entity, events=filtered_events) return Response([people]) return Response([])
def _format_serialized(self, entity: Entity, result: List[Dict[str, Any]]): serialized_data = [] serialized: Dict[str, Any] = { "action": entity.to_dict(), "label": entity.name, "count": 0, "data": [], "labels": [], "days": [], } for queried_metric in result: serialized_copy = copy.deepcopy(serialized) serialized_copy.update(queried_metric) serialized_data.append(serialized_copy) return serialized_data
def get_target_entity(filter: Union[Filter, StickinessFilter]) -> Entity: if not filter.target_entity_id: raise ValueError( "An entity id and the entity type must be provided to determine an entity" ) entity_math = filter.target_entity_math or "total" # make math explicit possible_entity = retrieve_entity_from(filter.target_entity_id, filter.target_entity_type, entity_math, filter.events, filter.actions) if possible_entity: return possible_entity elif filter.target_entity_type: return Entity({ "id": filter.target_entity_id, "type": filter.target_entity_type, "math": entity_math }) else: raise ValueError( "An entity must be provided for target entity to be determined")
def people(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: team = request.user.team_set.get() filter = Filter(request=request) offset = int(request.GET.get("offset", 0)) def _calculate_people(events: QuerySet, offset: int): shown_as = request.GET.get("shown_as") if shown_as is not None and shown_as == "Stickiness": stickiness_days = int(request.GET["stickiness_days"]) events = ( events.values("person_id") .annotate( day_count=Count(functions.TruncDay("timestamp"), distinct=True) ) .filter(day_count=stickiness_days) ) else: events = events.values("person_id").distinct() if ( request.GET.get("breakdown_type") == "cohort" and request.GET.get("breakdown_value") != "all" ): events = events.filter( Exists( CohortPeople.objects.filter( cohort_id=int(request.GET["breakdown_value"]), person_id=OuterRef("person_id"), ).only("id") ) ) people = Person.objects.filter( team=team, id__in=[p["person_id"] for p in events[offset : offset + 100]], ) people = people.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache") ) return serialize_people(people=people, request=request) filtered_events: QuerySet = QuerySet() if request.GET.get("session"): filtered_events = ( Event.objects.filter(team=team) .filter(filter_events(team.pk, filter)) .add_person_id(team.pk) ) else: if len(filter.entities) >= 1: entity = filter.entities[0] else: entity = Entity( {"id": request.GET["entityId"], "type": request.GET["type"]} ) if entity.type == TREND_FILTER_TYPE_EVENTS: filtered_events = process_entity_for_events( entity, team_id=team.pk, order_by=None ).filter(filter_events(team.pk, filter, entity)) elif entity.type == TREND_FILTER_TYPE_ACTIONS: actions = super().get_queryset() actions = actions.filter(deleted=False) try: action = actions.get(pk=entity.id) except Action.DoesNotExist: return Response([]) filtered_events = process_entity_for_events( entity, team_id=team.pk, order_by=None ).filter(filter_events(team.pk, filter, entity)) people = _calculate_people(events=filtered_events, offset=offset) current_url = request.get_full_path() next_url: Optional[str] = request.get_full_path() if people["count"] > 99 and next_url: if "offset" in next_url: next_url = next_url[1:] next_url = next_url.replace( "offset=" + str(offset), "offset=" + str(offset + 100) ) else: next_url = request.build_absolute_uri( "{}{}offset={}".format( next_url, "&" if "?" in next_url else "?", offset + 100 ) ) else: next_url = None return Response( {"results": [people], "next": next_url, "previous": current_url[1:]} )
def get_people(self, request: request.Request) -> Union[Dict[str, Any], List]: team = self.team filter = Filter(request=request) offset = int(request.GET.get("offset", 0)) def _calculate_people(events: QuerySet, offset: int): events = events.values("person_id").distinct() if request.GET.get( "breakdown_type" ) == "cohort" and request.GET.get("breakdown_value") != "all": events = events.filter( Exists( CohortPeople.objects.filter( cohort_id=int(request.GET["breakdown_value"]), person_id=OuterRef("person_id"), ).only("id"))) if request.GET.get("breakdown_type") == "person": events = events.filter( Exists( Person.objects.filter( **{ "id": OuterRef("person_id"), "properties__{}".format(request.GET["breakdown"]): request.GET["breakdown_value"], }).only("id"))) people = Person.objects.filter( team=team, id__in=[p["person_id"] for p in events[offset:offset + 100]]) people = people.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) return PersonSerializer(people, context={ "request": request }, many=True).data filtered_events: QuerySet = QuerySet() if request.GET.get("session"): filtered_events = (Event.objects.filter(team=team).filter( base.filter_events(team.pk, filter)).add_person_id(team.pk)) else: if len(filter.entities) >= 1: entity = filter.entities[0] else: entity = Entity({ "id": request.GET["entityId"], "type": request.GET["type"] }) if entity.type == TREND_FILTER_TYPE_EVENTS: filtered_events = base.process_entity_for_events( entity, team_id=team.pk, order_by=None).filter( base.filter_events(team.pk, filter, entity)) elif entity.type == TREND_FILTER_TYPE_ACTIONS: actions = super().get_queryset() actions = actions.filter(deleted=False) try: action = actions.get(pk=entity.id) except Action.DoesNotExist: return [] filtered_events = base.process_entity_for_events( entity, team_id=team.pk, order_by=None).filter( base.filter_events(team.pk, filter, entity)) people = _calculate_people(events=filtered_events, offset=offset) current_url = request.get_full_path() next_url = paginated_result(people, request, offset) return { "results": [{ "people": people, "count": len(people) }], "next": next_url, "previous": current_url[1:] }
def people(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: team = request.user.team_set.get() filter = Filter(request=request) def _calculate_people(events: QuerySet): shown_as = request.GET.get("shown_as") if shown_as is not None and shown_as == "Stickiness": stickiness_days = int(request.GET["stickiness_days"]) events = (events.values("person_id").annotate(day_count=Count( functions.TruncDay("timestamp"), distinct=True)).filter( day_count=stickiness_days)) else: events = events.values("person_id").distinct() if (request.GET.get("breakdown_type") == "cohort" and request.GET.get("breakdown_value") != "all"): events = events.filter( Exists( CohortPeople.objects.filter( cohort_id=int(request.GET["breakdown_value"]), person_id=OuterRef("person_id"), ).only("id"))) people = Person.objects.filter( team=team, id__in=[p["person_id"] for p in events[0:100]]) people = people.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) return serialize_people(people=people, request=request) filtered_events: QuerySet = QuerySet() if request.GET.get("session"): filtered_events = (Event.objects.filter(team=team).filter( filter_events(team.pk, filter)).add_person_id(team.pk)) else: if len(filter.entities) >= 1: entity = filter.entities[0] else: entity = Entity({ "id": request.GET["entityId"], "type": request.GET["type"] }) if entity.type == TREND_FILTER_TYPE_EVENTS: filtered_events = process_entity_for_events( entity, team_id=team.pk, order_by=None).filter( filter_events(team.pk, filter, entity)) elif entity.type == TREND_FILTER_TYPE_ACTIONS: actions = super().get_queryset() actions = actions.filter(deleted=False) try: action = actions.get(pk=entity.id) except Action.DoesNotExist: return Response([]) filtered_events = process_entity_for_events( entity, team_id=team.pk, order_by=None).filter( filter_events(team.pk, filter, entity)) people = _calculate_people(events=filtered_events) return Response([people])