def stats(self, request: request.Request) -> response.Response: filter = Filter(request=request) team = request.user.team assert team is not None date_from, date_to = parse_timestamps(filter) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team.pk) result = sync_execute( GET_ELEMENTS.format(date_from=date_from, date_to=date_to, query=prop_filters), { "team_id": team.id, **prop_filter_params }, ) return response.Response([{ "count": elements[1], "hash": None, "elements": [ ElementSerializer(element).data for element in chain_to_elements(elements[0]) ], } for elements in result])
def _session_list(self, base_query: str, params: Tuple[Any, ...], team: Team, filter: Filter, offset: int) -> List[Dict[str, Any]]: session_list = "SELECT * FROM (SELECT global_session_id, properties, start_time, length, sessions.distinct_id, event_count, events from\ (SELECT\ global_session_id,\ count(1) as event_count,\ MAX(distinct_id) as distinct_id,\ EXTRACT('EPOCH' FROM (MAX(timestamp) - MIN(timestamp))) AS length,\ MIN(timestamp) as start_time,\ array_agg(json_build_object( 'id', id, 'event', event, 'timestamp', timestamp, 'properties', properties, 'elements_hash', elements_hash) ORDER BY timestamp) as events\ FROM ({}) as count GROUP BY 1) as sessions\ LEFT OUTER JOIN posthog_persondistinctid ON posthog_persondistinctid.distinct_id = sessions.distinct_id\ LEFT OUTER JOIN posthog_person ON posthog_person.id = posthog_persondistinctid.person_id\ ORDER BY start_time DESC) as ordered_sessions OFFSET %s LIMIT 50".format( base_query) with connection.cursor() as cursor: params = params + (offset, ) cursor.execute(session_list, params) sessions = dict_from_cursor_fetchall(cursor) hash_ids = [] for session in sessions: for event in session["events"]: if event.get("elements_hash"): hash_ids.append(event["elements_hash"]) groups = self._prefetch_elements(hash_ids, team) for session in sessions: for event in session["events"]: try: event.update({ "elements": ElementSerializer( [ group for group in groups if group.hash == event["elements_hash"] ][0].element_set.all().order_by("order"), many=True, ).data }) except IndexError: event.update({"elements": []}) return sessions
def stats(self, request: request.Request, **kwargs) -> response.Response: # type: ignore filter = Filter(request=request, team=self.team) date_from, date_to, date_params = parse_timestamps(filter, team_id=self.team.pk) prop_filters, prop_filter_params = parse_prop_clauses(filter.properties, self.team.pk) result = sync_execute( GET_ELEMENTS.format(date_from=date_from, date_to=date_to, query=prop_filters), {"team_id": self.team.pk, **prop_filter_params, **date_params}, ) return response.Response( [ { "count": elements[1], "hash": None, "elements": [ElementSerializer(element).data for element in chain_to_elements(elements[0])], } for elements in result ] )
def _session_list(self, base_query: Query, params: QueryParams, team: Team, filter: SessionsFilter, limit: int, offset: int) -> List[Dict[str, Any]]: session_list = """ SELECT * FROM ( SELECT global_session_id, properties, start_time, end_time, length, sessions.distinct_id, event_count, events FROM ( SELECT global_session_id, count(1) as event_count, MAX(distinct_id) as distinct_id, EXTRACT('EPOCH' FROM (MAX(timestamp) - MIN(timestamp))) AS length, MIN(timestamp) as start_time, MAX(timestamp) as end_time, array_agg(json_build_object( 'id', id, 'event', event, 'timestamp', timestamp, 'properties', properties, 'elements_hash', elements_hash) ORDER BY timestamp) as events FROM ({base_query}) as count GROUP BY 1 ) as sessions LEFT OUTER JOIN posthog_persondistinctid ON posthog_persondistinctid.distinct_id = sessions.distinct_id AND posthog_persondistinctid.team_id = %s LEFT OUTER JOIN posthog_person ON posthog_person.id = posthog_persondistinctid.person_id ORDER BY start_time DESC ) as ordered_sessions OFFSET %s LIMIT %s """.format(base_query=base_query) with connection.cursor() as cursor: params = params + ( team.pk, offset, limit, ) cursor.execute(session_list, params) sessions = dict_from_cursor_fetchall(cursor) hash_ids = [] for session in sessions: for event in session["events"]: if event.get("elements_hash"): hash_ids.append(event["elements_hash"]) groups = self._prefetch_elements(hash_ids, team) for session in sessions: for event in session["events"]: element_group = groups.get(event["elements_hash"]) if element_group: event.update({ "elements": ElementSerializer( element_group.element_set, many=True, ).data }) else: event.update({"elements": []}) return filter_sessions_by_recordings(team, sessions, filter)