def test_hour(self): self.assertEqual( relative_date_parse("-24h").isoformat(), "2020-01-30T12:00:00+00:00") self.assertEqual( relative_date_parse("-48h").isoformat(), "2020-01-29T12:00:00+00:00")
def test_year(self): self.assertEqual( relative_date_parse('-1y').strftime("%Y-%m-%d"), '2019-01-31') self.assertEqual( relative_date_parse('-2y').strftime("%Y-%m-%d"), '2018-01-31') self.assertEqual( relative_date_parse('yStart').strftime("%Y-%m-%d"), '2020-01-01') self.assertEqual( relative_date_parse('-1yStart').strftime("%Y-%m-%d"), '2019-01-01')
def test_year(self): self.assertEqual( relative_date_parse("-1y").strftime("%Y-%m-%d"), "2019-01-31") self.assertEqual( relative_date_parse("-2y").strftime("%Y-%m-%d"), "2018-01-31") self.assertEqual( relative_date_parse("yStart").strftime("%Y-%m-%d"), "2020-01-01") self.assertEqual( relative_date_parse("-1yStart").strftime("%Y-%m-%d"), "2019-01-01")
def list(self, request): team = request.user.team_set.get() resp = [] aggregate = PersonDistinctId.objects.filter(team=team) if request.GET.get('date_from'): date_from = relative_date_parse(request.GET['date_from']) if request.GET['date_from'] == 'all': date_from = None # type: ignore else: date_from = datetime.date.today() - relativedelta(days=7) if request.GET.get('date_to'): date_to = relative_date_parse(request.GET['date_to']) else: date_to = datetime.date.today() aggregate = self._add_event_and_url_at_position( aggregate, team, 1, date_from, date_to) urls = False for index in range(1, 4): aggregate = self._add_event_and_url_at_position( aggregate, team, index + 1, date_from, date_to) first_url_key = 'url_{}'.format(index) second_url_key = 'url_{}'.format(index + 1) rows = aggregate\ .filter( **({'{}__in'.format(first_url_key): urls} if urls else {}), **{'{}__isnull'.format(second_url_key): False} )\ .values( first_url_key, second_url_key )\ .annotate(count=Count('pk'))\ .order_by('-count')[0: 6] urls = [] for row in rows: resp.append({ 'source': '{}_{}'.format(index, row[first_url_key]), 'target': '{}_{}'.format(index + 1, row[second_url_key]), 'value': row['count'] }) urls.append(row[second_url_key]) resp = sorted(resp, key=lambda x: x['value'], reverse=True) return Response(resp)
def _get_dates_from_request(self, request: request.Request) -> Tuple[datetime.date, datetime.date]: if request.GET.get('date_from'): date_from = relative_date_parse(request.GET['date_from']) if request.GET['date_from'] == 'all': date_from = None # type: ignore else: date_from = datetime.date.today() - relativedelta(days=7) if request.GET.get('date_to'): date_to = relative_date_parse(request.GET['date_to']) else: date_to = datetime.date.today() return date_from, date_to
def date_to(self) -> datetime.datetime: if self._date_to: return relative_date_parse(self._date_to) return timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
def date_from(self) -> Optional[datetime.datetime]: if self._date_from: if self._date_from == "all": return None return relative_date_parse(self._date_from) return timezone.now().replace( hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=7)
def _exec_query(self) -> List[Tuple]: prop_filters, prop_filter_params = parse_prop_clauses( self._filter.properties, self._team.pk, prepend="global") # format default dates data = {} if not self._filter._date_from: data.update({"date_from": relative_date_parse("-7d")}) if not self._filter._date_to: data.update({"date_to": timezone.now()}) self._filter = Filter(data={**self._filter._data, **data}) parsed_date_from, parsed_date_to, _ = parse_timestamps( filter=self._filter, table="events.", team_id=self._team.pk) self.params: Dict = { "team_id": self._team.pk, "events": [], # purely a speed optimization, don't need this for filtering **prop_filter_params, } steps = [ self._build_steps_query(entity, index) for index, entity in enumerate(self._filter.entities) ] query = FUNNEL_SQL.format( team_id=self._team.id, steps=", ".join(steps), filters=prop_filters.replace("uuid IN", "events.uuid IN", 1), parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, ) return sync_execute(query, self.params)
def _exec_query(self) -> List[Tuple]: prop_filters, prop_filter_params = parse_prop_clauses( self._filter.properties, self._team.pk, prepend="global", allow_denormalized_props=True) # format default dates data = {} if not self._filter._date_from: data.update({"date_from": relative_date_parse("-7d")}) if not self._filter._date_to: data.update({"date_to": timezone.now()}) self._filter = self._filter.with_data(data) parsed_date_from, parsed_date_to, _ = parse_timestamps( filter=self._filter, table="events.", team_id=self._team.pk) self.params.update(prop_filter_params) steps = [ self._build_steps_query(entity, index) for index, entity in enumerate(self._filter.entities) ] query = FUNNEL_SQL.format( team_id=self._team.id, steps=", ".join(steps), filters=prop_filters.replace("uuid IN", "events.uuid IN", 1), parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, top_level_groupby="", extra_select="", extra_groupby="", within_time="6048000000000000", ) return sync_execute(query, self.params)
def date_to(self) -> datetime.datetime: if self._date_to: if isinstance(self._date_to, str): return relative_date_parse(self._date_to) else: return self._date_to return timezone.now()
def get_property_values_for_key(key: str, team: Team, value: Optional[str] = None): parsed_date_from = "AND timestamp >= '{}'".format( relative_date_parse("-7d").strftime("%Y-%m-%d 00:00:00")) parsed_date_to = "AND timestamp <= '{}'".format( timezone.now().strftime("%Y-%m-%d 23:59:59")) if value: return sync_execute( SELECT_PROP_VALUES_SQL_WITH_FILTER.format( parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to), { "team_id": team.pk, "key": key, "value": "%{}%".format(value) }, ) return sync_execute( SELECT_PROP_VALUES_SQL.format(parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to), { "team_id": team.pk, "key": key }, )
def _exec_query(self) -> List[Tuple]: prop_filters, prop_filter_params = parse_prop_clauses( "uuid", self._filter.properties, self._team, prepend="global") # format default dates if not self._filter._date_from: self._filter._date_from = relative_date_parse("-7d") if not self._filter._date_to: self._filter._date_to = timezone.now() parsed_date_from, parsed_date_to = parse_timestamps( filter=self._filter) self.params: Dict = {"team_id": self._team.pk, **prop_filter_params} steps = [ self._build_steps_query(entity, index) for index, entity in enumerate(self._filter.entities) ] query = FUNNEL_SQL.format( select_steps=",".join([ "step_{}".format(index) for index, _ in enumerate(self._filter.entities) ]), team_id=self._team.id, steps=", ".join(steps), filters=prop_filters.replace("uuid IN", "events.uuid IN", 1), parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, ) return sync_execute(query, self.params)
def lifecycle(self, request: request.Request) -> response.Response: team = cast(User, request.user).team if not team: return response.Response( {"message": "Could not retrieve team", "detail": "Could not validate team associated with user"}, status=400, ) filter = Filter(request=request) target_date = request.GET.get("target_date", None) if target_date is None: return response.Response( {"message": "Missing parameter", "detail": "Must include specified date"}, status=400 ) target_date_parsed = relative_date_parse(target_date) lifecycle_type = request.GET.get("lifecycle_type", None) if lifecycle_type is None: return response.Response( {"message": "Missing parameter", "detail": "Must include lifecycle type"}, status=400 ) limit = int(request.GET.get("limit", 100)) next_url: Optional[str] = request.get_full_path() people = self.lifecycle_class().get_people( target_date=target_date_parsed, filter=filter, team_id=team.pk, lifecycle_type=lifecycle_type, request=request, limit=limit, ) next_url = paginated_result(people, request, filter.offset) return response.Response({"results": [{"people": people, "count": len(people)}], "next": next_url})
def date_to(self) -> Optional[datetime.datetime]: if self._date_to: if isinstance(self._date_to, str): return relative_date_parse(self._date_to) else: return self._date_to return None
def _calculate_trends(self, filter: Filter, team: Team) -> List[Dict[str, Any]]: # format default dates if not filter._date_from: filter._date_from = relative_date_parse("-7d") if not filter._date_to: filter._date_to = timezone.now() result = [] for entity in filter.entities: if filter.compare: compare_filter = determine_compared_filter(filter=filter) entity_result = self._serialize_entity(entity, filter, team) entity_result = convert_to_comparison( entity_result, filter, "{} - {}".format(entity.name, "current")) result.extend(entity_result) previous_entity_result = self._serialize_entity( entity, compare_filter, team) previous_entity_result = convert_to_comparison( previous_entity_result, filter, "{} - {}".format(entity.name, "previous")) result.extend(previous_entity_result) else: entity_result = self._serialize_entity(entity, filter, team) result.extend(entity_result) return result
def calculate_paths(self, filter: Filter, team: Team): # format default dates if not filter._date_from: filter._date_from = relative_date_parse("-7d") if not filter._date_to: filter._date_to = timezone.now() parsed_date_from, parsed_date_to = parse_timestamps(filter=filter) event, path_type, start_comparator = self._determine_path_type(filter.path_type if filter else None) prop_filters, prop_filter_params = parse_prop_clauses("uuid", filter.properties, team) # Step 0. Event culling subexpression for step 1. # Make an expression that removes events in a session that are definitely unused. # For example the 4th, 5th, etc row after a "new_session = 1" or "marked_session_start = 1" row gets removed excess_row_filter = "(" for i in range(4): if i > 0: excess_row_filter += " or " excess_row_filter += "neighbor(new_session, {}, 0) = 1".format(-i) if filter and filter.start_point: excess_row_filter += " or neighbor(marked_session_start, {}, 0) = 1".format(-i) excess_row_filter += ")" paths_query = PATHS_QUERY_FINAL.format( event_query="event = %(event)s" if event else "event NOT IN ('$autocapture', '$pageview', '$identify', '$pageleave', '$screen')", path_type=path_type, parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, filters=prop_filters if filter.properties else "", marked_session_start="{} = %(start_point)s".format(start_comparator) if filter and filter.start_point else "new_session", excess_row_filter=excess_row_filter, select_elements_chain=", events.elements_chain as elements_chain" if event == AUTOCAPTURE_EVENT else "", group_by_elements_chain=", events.elements_chain" if event == AUTOCAPTURE_EVENT else "", ) params: Dict = { "team_id": team.pk, "property": "$current_url", "event": event, "start_point": filter.start_point, } params = {**params, **prop_filter_params} rows = sync_execute(paths_query, params) resp: List[Dict[str, str]] = [] for row in rows: resp.append( {"source": row[0], "source_id": row[1], "target": row[2], "target_id": row[3], "value": row[4],} ) resp = sorted(resp, key=lambda x: x["value"], reverse=True) return resp
def test_events_sessions_basic(self): with freeze_time("2012-01-14T03:21:34.000Z"): event_factory(team=self.team, event="1st action", distinct_id="1") event_factory(team=self.team, event="1st action", distinct_id="2") with freeze_time("2012-01-14T03:25:34.000Z"): event_factory(team=self.team, event="2nd action", distinct_id="1") event_factory(team=self.team, event="2nd action", distinct_id="2") with freeze_time("2012-01-15T03:59:34.000Z"): event_factory(team=self.team, event="3rd action", distinct_id="2") with freeze_time("2012-01-15T03:59:35.000Z"): event_factory(team=self.team, event="3rd action", distinct_id="1") with freeze_time("2012-01-15T04:01:34.000Z"): event_factory(team=self.team, event="4th action", distinct_id="1", properties={"$os": "Mac OS X"}) event_factory(team=self.team, event="4th action", distinct_id="2", properties={"$os": "Windows 95"}) with freeze_time("2012-01-15T04:01:34.000Z"): response = self.client.get(f"/api/projects/{self.team.id}/events/sessions/",).json() self.assertEqual(len(response["result"]), 2) response = self.client.get( f"/api/projects/{self.team.id}/events/sessions/?date_from=2012-01-14&date_to=2012-01-15", ).json() self.assertEqual(len(response["result"]), 4) # 4 sessions were already created above for i in range(SESSIONS_LIST_DEFAULT_LIMIT - 4): with freeze_time(relative_date_parse("2012-01-15T04:01:34.000Z") + relativedelta(hours=i)): event_factory(team=self.team, event="action {}".format(i), distinct_id=str(i + 3)) response = self.client.get( f"/api/projects/{self.team.id}/events/sessions/?date_from=2012-01-14&date_to=2012-01-17", ).json() self.assertEqual(len(response["result"]), SESSIONS_LIST_DEFAULT_LIMIT) self.assertIsNone(response.get("pagination")) for i in range(2): with freeze_time(relative_date_parse("2012-01-15T04:01:34.000Z") + relativedelta(hours=i + 46)): event_factory(team=self.team, event="action {}".format(i), distinct_id=str(i + 49)) response = self.client.get( f"/api/projects/{self.team.id}/events/sessions/?date_from=2012-01-14&date_to=2012-01-17", ).json() self.assertEqual(len(response["result"]), SESSIONS_LIST_DEFAULT_LIMIT) self.assertIsNotNone(response["pagination"])
def _set_default_dates(self, filter: Filter, team_id: int) -> Filter: data = {} if not filter._date_from: data.update({"date_from": relative_date_parse("-7d")}) if not filter._date_to: data.update({"date_to": timezone.now()}) if data: return Filter(data={**filter._data, **data}) return filter
def _get_dates_from_request(self, request: request.Request) -> Tuple[datetime.date, datetime.date]: if request.GET.get('date_from'): date_from = relative_date_parse(request.GET['date_from']) if request.GET['date_from'] == 'all': date_from = None # type: ignore else: date_from = datetime.date.today() - relativedelta(days=7) if request.GET.get('date_to'): date_to = relative_date_parse(request.GET['date_to']) else: date_to = datetime.date.today() # UTC is what is set in setting.py if date_from is not None: date_from = pd.Timestamp(date_from, tz='UTC') date_to = pd.Timestamp(date_to, tz='UTC') return date_from, date_to
def test_insight_session_basic(self): with freeze_time("2012-01-14T03:21:34.000Z"): event_factory(team=self.team, event="1st action", distinct_id="1") event_factory(team=self.team, event="1st action", distinct_id="2") with freeze_time("2012-01-14T03:25:34.000Z"): event_factory(team=self.team, event="2nd action", distinct_id="1") event_factory(team=self.team, event="2nd action", distinct_id="2") with freeze_time("2012-01-15T03:59:34.000Z"): event_factory(team=self.team, event="3rd action", distinct_id="2") with freeze_time("2012-01-15T03:59:35.000Z"): event_factory(team=self.team, event="3rd action", distinct_id="1") with freeze_time("2012-01-15T04:01:34.000Z"): event_factory(team=self.team, event="4th action", distinct_id="1", properties={"$os": "Mac OS X"}) event_factory(team=self.team, event="4th action", distinct_id="2", properties={"$os": "Windows 95"}) with freeze_time("2012-01-15T04:01:34.000Z"): response = self.client.get("/api/insight/session/",).json() self.assertEqual(len(response["result"]), 2) response = self.client.get("/api/insight/session/?date_from=2012-01-14&date_to=2012-01-15",).json() self.assertEqual(len(response["result"]), 4) for i in range(46): with freeze_time(relative_date_parse("2012-01-15T04:01:34.000Z") + relativedelta(hours=i)): event_factory(team=self.team, event="action {}".format(i), distinct_id=str(i + 3)) response = self.client.get("/api/insight/session/?date_from=2012-01-14&date_to=2012-01-17",).json() self.assertEqual(len(response["result"]), 50) self.assertEqual(response.get("offset", None), None) for i in range(2): with freeze_time(relative_date_parse("2012-01-15T04:01:34.000Z") + relativedelta(hours=i + 46)): event_factory(team=self.team, event="action {}".format(i), distinct_id=str(i + 49)) response = self.client.get("/api/insight/session/?date_from=2012-01-14&date_to=2012-01-17",).json() self.assertEqual(len(response["result"]), 50) self.assertEqual(response["offset"], 50) response = self.client.get( "/api/insight/session/?date_from=2012-01-14&date_to=2012-01-17&offset=50", ).json() self.assertEqual(len(response["result"]), 2) self.assertEqual(response.get("offset", None), None)
def calculate_avg(self, filter: Filter, team: Team): # format default dates if not filter._date_from: filter._date_from = relative_date_parse("-7d") if not filter._date_to: filter._date_to = timezone.now() parsed_date_from, parsed_date_to = parse_timestamps(filter) filters, params = parse_prop_clauses("uuid", filter.properties, team) interval_notation = get_interval_annotation_ch(filter.interval) num_intervals, seconds_in_interval = get_time_diff( filter.interval or "day", filter.date_from, filter.date_to) avg_query = SESSIONS_NO_EVENTS_SQL.format( team_id=team.pk, date_from=parsed_date_from, date_to=parsed_date_to, filters="{}".format(filters) if filter.properties else "", sessions_limit="", ) per_period_query = AVERAGE_PER_PERIOD_SQL.format( sessions=avg_query, interval=interval_notation) null_sql = NULL_SQL.format( date_to=(filter.date_to or timezone.now()).strftime("%Y-%m-%d 00:00:00"), interval=interval_notation, num_intervals=num_intervals, seconds_in_interval=seconds_in_interval, ) final_query = AVERAGE_SQL.format(sessions=per_period_query, null_sql=null_sql) params = {**params, "team_id": team.pk} response = sync_execute(final_query, params) values = self.clean_values(filter, response) time_series_data = append_data(values, interval=filter.interval, math=None) # calculate average total = sum(val[1] for val in values) if total == 0: return [] valid_days = sum(1 if val[1] else 0 for val in values) overall_average = (total / valid_days) if valid_days else 0 result = self._format_avg(overall_average) time_series_data.update(result) return [time_series_data]
def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[HttpRequest] = None, **kwargs) -> None: super().__init__(data, request) if request: data = { **(data if data else {}), **request.GET.dict(), } elif not data: raise ValueError( "You need to define either a data dict or a request") team: Optional[Team] = kwargs.get("team", None) if not team: raise ValueError("Team must be provided to stickiness filter") if self._date_from == "all": get_earliest_timestamp: Optional[Callable] = kwargs.get( "get_earliest_timestamp", None) if not get_earliest_timestamp: raise ValueError( "Callable must be provided when date filtering is all time" ) self._date_from = get_earliest_timestamp(team_id=team.pk) if not self._date_from: self._date_from = relative_date_parse("-7d") if not self._date_to: self._date_to = timezone.now().isoformat() self.stickiness_days = int(data.get(STICKINESS_DAYS, "0")) self.interval = data.get(INTERVAL, "day").lower() self.entityId = data.get("entityId", None) self.type = data.get("type", None) total_seconds = (self.date_to - self.date_from).total_seconds() if self.interval == "minute": self.num_intervals = int(divmod(total_seconds, 60)[0]) elif self.interval == "hour": self.num_intervals = int(divmod(total_seconds, 3600)[0]) elif self.interval == "day": self.num_intervals = int(divmod(total_seconds, 86400)[0]) elif self.interval == "week": self.num_intervals = (self.date_to - self.date_from).days // 7 elif self.interval == "month": self.num_intervals = (self.date_to.year - self.date_from.year) + ( self.date_to.month - self.date_from.month) else: raise ValueError(f"{self.interval} not supported") self.num_intervals += 2
def _date_from(self) -> Optional[Union[str, datetime]]: if not self.team or not self.get_earliest_timestamp: raise AttributeError("StickinessDateMixin requires team and get_earliest_timestamp to be provided") _date_from = self._data.get(DATE_FROM, None) if _date_from == "all": return self.get_earliest_timestamp(team_id=self.team.pk) elif _date_from: return _date_from else: return relative_date_parse("-7d")
def get_values(self, request: request.Request) -> List[Dict[str, Any]]: key = request.GET.get("key") params: List[Optional[Union[str, int]]] = [key, key] if key == "custom_event": event_names = ( Event.objects.filter(team_id=self.team_id) .filter(~Q(event__in=["$autocapture", "$pageview", "$identify", "$pageleave", "$screen"])) .values("event") .distinct() ) return [{"name": value["event"]} for value in event_names] if request.GET.get("value"): where = " AND properties ->> %s LIKE %s" params.append(key) params.append("%{}%".format(request.GET["value"])) else: where = "" params.append(self.team_id) params.append(relative_date_parse("-7d").strftime("%Y-%m-%d 00:00:00")) params.append(timezone.now().strftime("%Y-%m-%d 23:59:59")) # This samples a bunch of events with that property, and then orders them by most popular in that sample # This is much quicker than trying to do this over the entire table values = Event.objects.raw( """ SELECT value, COUNT(1) as id FROM ( SELECT ("posthog_event"."properties" -> %s) as "value" FROM "posthog_event" WHERE ("posthog_event"."properties" -> %s) IS NOT NULL {} AND ("posthog_event"."team_id" = %s) AND ("posthog_event"."timestamp" >= %s) AND ("posthog_event"."timestamp" <= %s) LIMIT 10000 ) as "value" GROUP BY value ORDER BY id DESC LIMIT 50; """.format( where ), params, ) flattened = flatten([value.value for value in values]) return [{"name": convert_property_value(value)} for value in flattened]
def _set_default_dates(self, filter: Filter) -> None: # format default dates if filter.session_type != SESSION_AVG and filter.session_type != SESSION_DIST: if not filter._date_from: filter._date_from = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) if not filter._date_to and filter.date_from: filter._date_to = filter.date_from + relativedelta(days=1) else: if not filter._date_from: filter._date_from = relative_date_parse("-7d") if not filter._date_to: filter._date_to = timezone.now()
def _exec_query(self) -> List[Tuple]: # format default dates data = {} if not self._filter._date_from: data.update({"date_from": relative_date_parse("-7d")}) if not self._filter._date_to: data.update({"date_to": timezone.now()}) self._filter = self._filter.with_data(data) query = self.get_query() return sync_execute(query, self.params)
def date_to(self) -> datetime: if self._date_to: if isinstance(self._date_to, str): date_to = relative_date_parse(self._date_to) else: date_to = self._date_to else: date_to = timezone.now() date_to = date_to + self.period_increment if self.period == "Hour": return date_to else: return date_to.replace(hour=0, minute=0, second=0, microsecond=0)
def trends(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: actions = self.get_queryset() actions = actions.filter(deleted=False) actions_list = [] if request.GET.get('date_from'): date_from = relative_date_parse(request.GET['date_from']) if request.GET['date_from'] == 'all': date_from = None # type: ignore else: date_from = datetime.date.today() - relativedelta(days=7) if request.GET.get('date_to'): date_to = relative_date_parse(request.GET['date_to']) else: date_to = datetime.date.today() # steps = (date_to - date_from).days parsed_actions = self._parse_actions(request) if parsed_actions: for filters in parsed_actions: db_action = [a for a in actions if a.id == filters['id']][0] actions_list.append( self._serialize_action(action=db_action, filters=filters, request=request, date_from=date_from, date_to=date_to)) else: for action in actions: actions_list.append( self._serialize_action(action=action, filters={}, request=request, date_from=date_from, date_to=date_to)) return Response(actions_list)
def _filter_request(self, request: request.Request, queryset: QuerySet) -> QuerySet: filters = request.GET.dict() for key in filters: if key == "saved": if str_to_bool(request.GET["saved"]): queryset = queryset.filter(Q(saved=True) | Q(dashboard__isnull=False)) else: queryset = queryset.filter(Q(saved=False)) elif key == "user": queryset = queryset.filter(created_by=request.user) elif key == "favorited": queryset = queryset.filter(Q(favorited=True)) elif key == "date_from": queryset = queryset.filter(last_modified_at__gt=relative_date_parse(request.GET["date_from"])) elif key == "date_to": queryset = queryset.filter(last_modified_at__lt=relative_date_parse(request.GET["date_to"])) elif key == INSIGHT: queryset = queryset.filter(filters__insight=request.GET[INSIGHT]) elif key == "search": queryset = queryset.filter( Q(name__icontains=request.GET["search"]) | Q(derived_name__icontains=request.GET["search"]) ) return queryset
def _calculate_stickiness(self, filter: Filter, team: Team) -> List[Dict[str, Any]]: if not filter._date_from: filter._date_from = relative_date_parse("-7d") if not filter._date_to: filter._date_to = timezone.now() result = [] for entity in filter.entities: if entity.type == TREND_FILTER_TYPE_ACTIONS: entity.name = Action.objects.only("name").get(team=team, pk=entity.id).name entity_result = self._serialize_entity(entity, filter, team) result.extend(entity_result) return result