def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response.Response: queryset = self.get_queryset() monday = now() + timedelta(days=-now().weekday()) events = queryset.filter(timestamp__gte=monday.replace(hour=0, minute=0, second=0))[0:101] is_csv_request = self.request.accepted_renderer.format == "csv" if not is_csv_request and len(events) < 101: events = queryset[0:101] elif is_csv_request: events = queryset[0:100000] prefetched_events = self._prefetch_events([event for event in events]) path = request.get_full_path() reverse = request.GET.get("orderBy", "-timestamp") != "-timestamp" if not is_csv_request and len(events) > 100: next_url: Optional[str] = request.build_absolute_uri( "{}{}{}={}".format( path, "&" if "?" in path else "?", "after" if reverse else "before", events[99].timestamp.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), ) ) else: next_url = None return response.Response( { "next": next_url, "results": EventSerializer( prefetched_events, many=True, context={"format": self.request.accepted_renderer.format} ).data, } )
def get_people(self, request: request.Request) -> Union[Dict[str, Any], List]: team = self.team filter = Filter(request=request) offset = int(request.GET.get("offset", 0)) def _calculate_people(events: QuerySet, offset: int): events = events.values("person_id").distinct() if request.GET.get( "breakdown_type" ) == "cohort" and request.GET.get("breakdown_value") != "all": events = events.filter( Exists( CohortPeople.objects.filter( cohort_id=int(request.GET["breakdown_value"]), person_id=OuterRef("person_id"), ).only("id"))) if request.GET.get("breakdown_type") == "person": events = events.filter( Exists( Person.objects.filter( **{ "id": OuterRef("person_id"), "team_id": self.team.pk, "properties__{}".format(request.GET["breakdown"]): request.GET["breakdown_value"], }).only("id"))) people = Person.objects.filter( team=team, id__in=[p["person_id"] for p in events[offset:offset + 100]]) people = people.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) return PersonSerializer(people, context={ "request": request }, many=True).data filtered_events: QuerySet = QuerySet() if request.GET.get("session"): filtered_events = (Event.objects.filter(team=team).filter( base.filter_events(team.pk, filter)).add_person_id(team.pk)) else: entity = get_target_entity(request) if entity.type == TREND_FILTER_TYPE_EVENTS: filtered_events = base.process_entity_for_events( entity, team_id=team.pk, order_by=None).filter( base.filter_events(team.pk, filter, entity)) elif entity.type == TREND_FILTER_TYPE_ACTIONS: actions = super().get_queryset() actions = actions.filter(deleted=False) try: action = actions.get(pk=entity.id) except Action.DoesNotExist: return [] filtered_events = base.process_entity_for_events( entity, team_id=team.pk, order_by=None).filter( base.filter_events(team.pk, filter, entity)) people = _calculate_people(events=filtered_events, offset=offset) current_url = request.get_full_path() next_url = paginated_result(people, request, offset) return { "results": [{ "people": people, "count": len(people) }], "next": next_url, "previous": current_url[1:] }
def people(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: team = request.user.team_set.get() filter = Filter(request=request) offset = int(request.GET.get("offset", 0)) def _calculate_people(events: QuerySet, offset: int): shown_as = request.GET.get("shown_as") if shown_as is not None and shown_as == "Stickiness": stickiness_days = int(request.GET["stickiness_days"]) events = ( events.values("person_id") .annotate( day_count=Count(functions.TruncDay("timestamp"), distinct=True) ) .filter(day_count=stickiness_days) ) else: events = events.values("person_id").distinct() if ( request.GET.get("breakdown_type") == "cohort" and request.GET.get("breakdown_value") != "all" ): events = events.filter( Exists( CohortPeople.objects.filter( cohort_id=int(request.GET["breakdown_value"]), person_id=OuterRef("person_id"), ).only("id") ) ) people = Person.objects.filter( team=team, id__in=[p["person_id"] for p in events[offset : offset + 100]], ) people = people.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache") ) return serialize_people(people=people, request=request) filtered_events: QuerySet = QuerySet() if request.GET.get("session"): filtered_events = ( Event.objects.filter(team=team) .filter(filter_events(team.pk, filter)) .add_person_id(team.pk) ) else: if len(filter.entities) >= 1: entity = filter.entities[0] else: entity = Entity( {"id": request.GET["entityId"], "type": request.GET["type"]} ) if entity.type == TREND_FILTER_TYPE_EVENTS: filtered_events = process_entity_for_events( entity, team_id=team.pk, order_by=None ).filter(filter_events(team.pk, filter, entity)) elif entity.type == TREND_FILTER_TYPE_ACTIONS: actions = super().get_queryset() actions = actions.filter(deleted=False) try: action = actions.get(pk=entity.id) except Action.DoesNotExist: return Response([]) filtered_events = process_entity_for_events( entity, team_id=team.pk, order_by=None ).filter(filter_events(team.pk, filter, entity)) people = _calculate_people(events=filtered_events, offset=offset) current_url = request.get_full_path() next_url: Optional[str] = request.get_full_path() if people["count"] > 99 and next_url: if "offset" in next_url: next_url = next_url[1:] next_url = next_url.replace( "offset=" + str(offset), "offset=" + str(offset + 100) ) else: next_url = request.build_absolute_uri( "{}{}offset={}".format( next_url, "&" if "?" in next_url else "?", offset + 100 ) ) else: next_url = None return Response( {"results": [people], "next": next_url, "previous": current_url[1:]} )