def test_parse_prop_clauses_defaults(snapshot): filter = Filter( data={ "properties": [ { "key": "event_prop", "value": "value" }, { "key": "email", "type": "person", "value": "posthog", "operator": "icontains" }, ], }) assert parse_prop_clauses(filter.properties, None) == snapshot assert (parse_prop_clauses(filter.properties, None, person_properties_mode=PersonPropertiesMode. USING_PERSON_PROPERTIES_COLUMN) == snapshot) assert parse_prop_clauses( filter.properties, None, person_properties_mode=PersonPropertiesMode.EXCLUDE) == snapshot
def _breakdown_person_params(self, aggregate_operation: str, entity: Entity, filter: Filter, team_id: int): parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team_id) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team_id, table_name="e", filter_test_accounts=filter.filter_test_accounts) person_prop_filters, person_prop_params = parse_prop_clauses( [prop for prop in filter.properties if prop.type == "person"], team_id, table_name="e", filter_test_accounts=filter.filter_test_accounts, is_person_query=True, ) entity_params, entity_format_params = populate_entity_params(entity) elements_query = TOP_PERSON_PROPS_ARRAY_OF_KEY_SQL.format( parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, latest_person_sql=GET_LATEST_PERSON_SQL.format(query=""), prop_filters=prop_filters, person_prop_filters=person_prop_filters, aggregate_operation=aggregate_operation, latest_distinct_id_sql=GET_LATEST_PERSON_DISTINCT_ID_SQL, **entity_format_params) top_elements_array = self._get_top_elements(elements_query, filter, team_id, params={ **prop_filter_params, **person_prop_params, **entity_params }) params = { "values": [*top_elements_array, "none"], } breakdown_filter_params = { "latest_person_sql": GET_LATEST_PERSON_SQL.format(query=""), } return ( params, BREAKDOWN_PERSON_PROP_JOIN_SQL, breakdown_filter_params, "value", None if filter.offset else NONE_BREAKDOWN_PERSON_PROP_JOIN_SQL, )
def _build_filters(self, entity: Entity, index: int) -> str: prop_filters, prop_filter_params = parse_prop_clauses( entity.properties, self._team.pk, prepend=str(index)) self.params.update(prop_filter_params) if entity.properties: return prop_filters return ""
def _exec_query(self) -> List[Tuple]: prop_filters, prop_filter_params = parse_prop_clauses( "uuid", self._filter.properties, self._team, prepend="global") # format default dates if not self._filter._date_from: self._filter._date_from = relative_date_parse("-7d") if not self._filter._date_to: self._filter._date_to = timezone.now() parsed_date_from, parsed_date_to = parse_timestamps( filter=self._filter) self.params: Dict = {"team_id": self._team.pk, **prop_filter_params} steps = [ self._build_steps_query(entity, index) for index, entity in enumerate(self._filter.entities) ] query = FUNNEL_SQL.format( select_steps=",".join([ "step_{}".format(index) for index, _ in enumerate(self._filter.entities) ]), team_id=self._team.id, steps=", ".join(steps), filters=prop_filters.replace("uuid IN", "events.uuid IN", 1), parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, ) return sync_execute(query, self.params)
def test_prop_cohort_basic_action(self): _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) _create_person( distinct_ids=["some_id"], team_id=self.team.pk, properties={"$some_prop": "something", "$another_prop": "something"}, ) _create_person(distinct_ids=["no_match"], team_id=self.team.pk) action = _create_action(team=self.team, name="$pageview") _create_event( event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}, ) _create_event( event="$not_pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"}, ) cohort1 = Cohort.objects.create(team=self.team, groups=[{"action_id": action.pk}], name="cohort1",) filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}],}) query, params = parse_prop_clauses(filter.properties, self.team.pk) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) result = sync_execute(final_query, {**params, "team_id": self.team.pk}) self.assertEqual(len(result), 1)
def test_prop_event(self): _create_event( event="$pageview", team=self.team, distinct_id="whatever", properties={"attr": "some_other_val"}, ) _create_event( event="$pageview", team=self.team, distinct_id="whatever", properties={"attr": "some_val"}, ) filter = Filter(data={ "properties": [{ "key": "attr", "value": "some_val" }], }) query, params = parse_prop_clauses(filter.properties, self.team.pk) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format( query) result = sync_execute(final_query, {**params, "team_id": self.team.pk}) self.assertEqual(len(result), 1)
def _process_content_sql(team: Team, entity: Entity, filter: Filter): filter = _handle_date_interval(filter) parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team.pk) entity_sql, entity_params = format_entity_filter(entity=entity) person_filter = "" person_filter_params: Dict[str, Any] = {} if filter.breakdown_type == "cohort" and filter.breakdown_value != "all": cohort = Cohort.objects.get(pk=filter.breakdown_value) person_filter, person_filter_params = format_filter_query(cohort) person_filter = "AND distinct_id IN ({})".format(person_filter) elif ( filter.breakdown_type == "person" and isinstance(filter.breakdown, str) and isinstance(filter.breakdown_value, str) ): person_prop = Property(**{"key": filter.breakdown, "value": filter.breakdown_value, "type": "person"}) filter.properties.append(person_prop) prop_filters, prop_filter_params = parse_prop_clauses(filter.properties, team.pk) params: Dict = {"team_id": team.pk, **prop_filter_params, **entity_params, "offset": filter.offset} content_sql = PERSON_TREND_SQL.format( entity_filter=f"AND {entity_sql}", parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, filters=prop_filters, breakdown_filter="", person_filter=person_filter, ) return content_sql, {**params, **person_filter_params}
def calculate_list(self, filter: Filter, team: Team, limit: int, offset: int): filters, params = parse_prop_clauses("uuid", filter.properties, team) if not filter._date_from: filter._date_from = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) if not filter._date_to and filter.date_from: filter._date_to = filter.date_from + relativedelta(days=1) date_from, date_to = parse_timestamps(filter) params = { **params, "team_id": team.pk, "limit": limit, "offset": offset } query = SESSION_SQL.format( date_from=date_from, date_to=date_to, filters="{}".format(filters) if filter.properties else "", sessions_limit="LIMIT %(offset)s, %(limit)s", ) query_result = sync_execute(query, params) result = self._parse_list_results(query_result) self._add_person_properties(team, result) return result
def _format_lifecycle_query(self, entity: Entity, filter: Filter, team_id: int) -> Tuple[str, Dict, Callable]: date_from = filter.date_from if not date_from: date_from = get_earliest_timestamp(team_id) interval = filter.interval or "day" num_intervals, seconds_in_interval, _ = get_time_diff( interval, filter.date_from, filter.date_to, team_id) interval_increment, interval_string, sub_interval_string = self.get_interval( interval) trunc_func = get_trunc_func_ch(interval) event_query = "" event_params: Dict[str, Any] = {} props_to_filter = [*filter.properties, *entity.properties] prop_filters, prop_filter_params = parse_prop_clauses( props_to_filter, team_id, filter_test_accounts=filter.filter_test_accounts) _, _, date_params = parse_timestamps(filter=filter, team_id=team_id) if entity.type == TREND_FILTER_TYPE_ACTIONS: try: action = entity.get_action() event_query, event_params = format_action_filter(action) except: return "", {}, self._parse_result(filter, entity) else: event_query = "event = %(event)s" event_params = {"event": entity.id} return ( LIFECYCLE_SQL.format( interval=interval_string, trunc_func=trunc_func, event_query=event_query, filters=prop_filters, sub_interval=sub_interval_string, GET_TEAM_PERSON_DISTINCT_IDS=GET_TEAM_PERSON_DISTINCT_IDS, ), { "team_id": team_id, "prev_date_from": (date_from - interval_increment).strftime("%Y-%m-%d{}".format( " %H:%M:%S" if filter.interval == "hour" or filter.interval == "minute" else " 00:00:00")), "num_intervals": num_intervals, "seconds_in_interval": seconds_in_interval, **event_params, **date_params, **prop_filter_params, }, self._parse_result(filter, entity), )
def _exec_query(self) -> List[Tuple]: prop_filters, prop_filter_params = parse_prop_clauses( self._filter.properties, self._team.pk, prepend="global") # format default dates data = {} if not self._filter._date_from: data.update({"date_from": relative_date_parse("-7d")}) if not self._filter._date_to: data.update({"date_to": timezone.now()}) self._filter = Filter(data={**self._filter._data, **data}) parsed_date_from, parsed_date_to, _ = parse_timestamps( filter=self._filter, table="events.", team_id=self._team.pk) self.params: Dict = { "team_id": self._team.pk, "events": [], # purely a speed optimization, don't need this for filtering **prop_filter_params, } steps = [ self._build_steps_query(entity, index) for index, entity in enumerate(self._filter.entities) ] query = FUNNEL_SQL.format( team_id=self._team.id, steps=", ".join(steps), filters=prop_filters.replace("uuid IN", "events.uuid IN", 1), parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, ) return sync_execute(query, self.params)
def _filter_events( filter: Filter, team: Team, person_query: Optional[bool] = False, order_by: Optional[str] = None, ): prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team.pk) params = {"team_id": team.pk, **prop_filter_params} if order_by == "id": order_by = "uuid" events = sync_execute( GET_EVENTS_WITH_PROPERTIES.format( filters=prop_filters, order_by="ORDER BY {}".format(order_by) if order_by else "", ), params, ) parsed_events = ClickhouseEventSerializer(events, many=True, context={ "elements": None, "people": None }).data return parsed_events
def stats(self, request: request.Request) -> response.Response: filter = Filter(request=request) team = request.user.team assert team is not None date_from, date_to = parse_timestamps(filter) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team.pk) result = sync_execute( GET_ELEMENTS.format(date_from=date_from, date_to=date_to, query=prop_filters), { "team_id": team.id, **prop_filter_params }, ) return response.Response([{ "count": elements[1], "hash": None, "elements": [ ElementSerializer(element).data for element in chain_to_elements(elements[0]) ], } for elements in result])
def _build_filters(self, entity: Entity, index: int) -> str: prop_filters, prop_filter_params = parse_prop_clauses( "uuid", entity.properties, self._team, prepend=str(index)) self.params.update(prop_filter_params) if entity.properties: return prop_filters.replace("uuid IN", "random_event_id IN", 1) return ""
def fetch_distinct_ids( self, action_filters: ActionFiltersSQL, date_from: str, date_to: str, date_params: Dict[str, Any], limit: int, distinct_id_offset: int, ) -> List[str]: if self.filter.distinct_id: persons = get_persons_by_distinct_ids(self.team.pk, [self.filter.distinct_id]) return persons[0].distinct_ids if len(persons) > 0 else [] person_filters, person_filter_params = parse_prop_clauses( self.filter.person_filter_properties, self.team.pk, allow_denormalized_props=False) return sync_execute( SESSIONS_DISTINCT_ID_SQL.format( date_from=date_from, date_to=date_to, person_filters=person_filters, action_filters=action_filters.matches_any_clause, ), { **person_filter_params, **action_filters.params, "team_id": self.team.pk, "distinct_id_limit": distinct_id_offset + limit, **date_params, }, )
def test_prop_cohort_basic(self): _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) _create_person( distinct_ids=["some_id"], team_id=self.team.pk, properties={ "$some_prop": "something", "$another_prop": "something" }, ) _create_person(distinct_ids=["no_match"], team_id=self.team.pk) _create_event( event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}, ) _create_event( event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"}, ) cohort1 = Cohort.objects.create( team=self.team, groups=[{ "properties": { "$some_prop": "something", "$another_prop": "something" } }], name="cohort1", ) filter = Filter(data={ "properties": [{ "key": "id", "value": cohort1.pk, "type": "cohort" }], }) query, params = parse_prop_clauses(filter.properties, self.team) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format( query) result = sync_execute(final_query, {**params, "team_id": self.team.pk}) self.assertEqual(len(result), 1) feature_flag = FeatureFlag.objects.create(filters=filter.to_dict(), created_by=self.user, name="test", key="test", team=self.team) self.assertTrue(feature_flag.distinct_id_matches("some_id")) self.assertFalse(feature_flag.distinct_id_matches("no_match"))
def _process_content_sql(target_entity: Entity, filter: StickinessFilter, team: Team) -> Tuple[str, Dict[str, Any]]: parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team.pk) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team.pk, filter_test_accounts=filter.filter_test_accounts) entity_sql, entity_params = _format_entity_filter(entity=target_entity) trunc_func = get_trunc_func_ch(filter.interval) params: Dict = { "team_id": team.pk, **prop_filter_params, "stickiness_day": filter.selected_interval, **entity_params, "offset": filter.offset, } content_sql = STICKINESS_PEOPLE_SQL.format( entity_filter=entity_sql, parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, filters=prop_filters, trunc_func=trunc_func, GET_TEAM_PERSON_DISTINCT_IDS=GET_TEAM_PERSON_DISTINCT_IDS, ) return content_sql, params
def _query_events_list(self, filter: Filter, team: Team, request: Request, long_date_from: bool = False) -> List: limit = "LIMIT 101" conditions, condition_params = determine_event_conditions( { "after": (now() - timedelta(days=1)).isoformat(), "before": (now() + timedelta(seconds=5)).isoformat(), **request.GET.dict(), }, long_date_from, ) prop_filters, prop_filter_params = parse_prop_clauses(filter.properties, team.pk) if request.GET.get("action_id"): action = Action.objects.get(pk=request.GET["action_id"]) if action.steps.count() == 0: return [] action_query, params = format_action_filter(action) prop_filters += " AND {}".format(action_query) prop_filter_params = {**prop_filter_params, **params} if prop_filters != "": return sync_execute( SELECT_EVENT_WITH_PROP_SQL.format(conditions=conditions, limit=limit, filters=prop_filters), {"team_id": team.pk, **condition_params, **prop_filter_params}, ) else: return sync_execute( SELECT_EVENT_WITH_ARRAY_PROPS_SQL.format(conditions=conditions, limit=limit), {"team_id": team.pk, **condition_params}, )
def stickiness(self, entity: Entity, filter: StickinessFilter, team_id: int) -> Dict[str, Any]: parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team_id) prop_filters, prop_filter_params = parse_prop_clauses(filter.properties, team_id) trunc_func = get_trunc_func_ch(filter.interval) params: Dict = {"team_id": team_id} params = {**params, **prop_filter_params, "num_intervals": filter.num_intervals} if entity.type == TREND_FILTER_TYPE_ACTIONS: action = Action.objects.get(pk=entity.id) action_query, action_params = format_action_filter(action) if action_query == "": return {} params = {**params, **action_params} content_sql = STICKINESS_ACTIONS_SQL.format( team_id=team_id, actions_query=action_query, parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, filters=prop_filters, trunc_func=trunc_func, ) else: content_sql = STICKINESS_SQL.format( team_id=team_id, event=entity.id, parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, filters=prop_filters, trunc_func=trunc_func, ) counts = sync_execute(content_sql, params) return self.process_result(counts, filter)
def _calculate_stickiness_entity_people(self, team: Team, entity: Entity, filter: Filter, stickiness_day: int): parsed_date_from, parsed_date_to = parse_timestamps(filter=filter) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team.pk) entity_sql, entity_params = self._format_entity_filter(entity=entity) params: Dict = { "team_id": team.pk, **prop_filter_params, "stickiness_day": stickiness_day, **entity_params, "offset": filter.offset, } content_sql = STICKINESS_PEOPLE_SQL.format( entity_filter=entity_sql, parsed_date_from=(parsed_date_from or ""), parsed_date_to=(parsed_date_to or ""), filters="{filters}".format( filters=prop_filters) if filter.properties else "", ) people = sync_execute( PEOPLE_SQL.format( content_sql=content_sql, query="", latest_person_sql=GET_LATEST_PERSON_SQL.format(query="")), params, ) serialized_people = ClickhousePersonSerializer(people, many=True).data return serialized_people
def run(self, filter: SessionsFilter, team: Team, *args, **kwargs) -> List[Dict[str, Any]]: limit = kwargs.get("limit", SESSIONS_LIST_DEFAULT_LIMIT) offset = kwargs.get("offset", 0) filter = set_default_dates(filter) filters, params = parse_prop_clauses(filter.properties, team.pk) action_filter_timestamp_sql, action_filter_params = format_action_filter_aggregate( filter, team.pk) date_from, date_to, _ = parse_timestamps(filter, team.pk) params = { **params, **action_filter_params, "team_id": team.pk, "limit": limit, "offset": offset, "distinct_id_limit": limit + offset, } query = SESSION_SQL.format( date_from=date_from, date_to=date_to, filters=filters, action_filter_timestamp=action_filter_timestamp_sql, sessions_limit="LIMIT %(offset)s, %(limit)s", ) query_result = sync_execute(query, params) result = self._parse_list_results(query_result) self._add_person_properties(team, result) return filter_sessions_by_recordings(team, result, filter)
def stickiness(self, entity: Entity, filter: Filter, team_id: int) -> Dict[str, Any]: if not filter.date_to or not filter.date_from: raise ValueError("_stickiness needs date_to and date_from set") range_days = (filter.date_to - filter.date_from).days + 2 parsed_date_from, parsed_date_to = parse_timestamps(filter=filter) prop_filters, prop_filter_params = parse_prop_clauses(filter.properties, team_id) params: Dict = {"team_id": team_id} params = {**params, **prop_filter_params} if entity.type == TREND_FILTER_TYPE_ACTIONS: action = Action.objects.get(pk=entity.id) action_query, action_params = format_action_filter(action) if action_query == "": return {} params = {**params, **action_params} content_sql = STICKINESS_ACTIONS_SQL.format( team_id=team_id, actions_query=action_query, parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, filters=prop_filters, ) else: content_sql = STICKINESS_SQL.format( team_id=team_id, event=entity.id, parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, filters=prop_filters, ) counts = sync_execute(content_sql, params) return self.process_result(counts, range_days)
def _format_all_query(team_id: int, filter: Filter, **kwargs) -> Tuple[str, Dict]: entity = kwargs.pop("entity", None) parsed_date_from, parsed_date_to, date_params = parse_timestamps( filter=filter, team_id=team_id, table="all_events.") props_to_filter = [*filter.properties] if entity and isinstance(entity, Entity): props_to_filter = [*props_to_filter, *entity.properties] prop_filters, prop_filter_params = parse_prop_clauses( props_to_filter, team_id, prepend="all_cohort_", table_name="all_events") query = f""" SELECT DISTINCT distinct_id, 0 as value FROM events all_events WHERE team_id = {team_id} {parsed_date_from} {parsed_date_to} {prop_filters} """ return query, {**date_params, **prop_filter_params}
def _run_query(self, filter: Filter) -> List: query, params = parse_prop_clauses(filter.properties, self.team.pk, allow_denormalized_props=True) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format( query) return sync_execute(final_query, {**params, "team_id": self.team.pk})
def get_breakdown_event_prop_values(filter: Filter, entity: Entity, aggregate_operation: str, team_id: int, limit: int = 25): parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team_id) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team_id, table_name="e", filter_test_accounts=filter.filter_test_accounts, ) entity_params, entity_format_params = populate_entity_params(entity) elements_query = TOP_ELEMENTS_ARRAY_OF_KEY_SQL.format( parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, prop_filters=prop_filters, aggregate_operation=aggregate_operation, **entity_format_params, ) top_elements_array = _get_top_elements( filter=filter, team_id=team_id, query=elements_query, params={ **prop_filter_params, **entity_params }, limit=limit, ) return top_elements_array
def test_prop_person(self): _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"email": "*****@*****.**"}) _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"email": "*****@*****.**"}) _create_event( event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}, ) filter = Filter( data={ "properties": [{ "key": "email", "value": "*****@*****.**", "type": "person" }], }) query, params = parse_prop_clauses(filter.properties, self.team.pk) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format( query) result = sync_execute(final_query, {**params, "team_id": self.team.pk}) self.assertEqual(len(result), 1)
def _exec_query(self) -> List[Tuple]: prop_filters, prop_filter_params = parse_prop_clauses( self._filter.properties, self._team.pk, prepend="global", allow_denormalized_props=True) # format default dates data = {} if not self._filter._date_from: data.update({"date_from": relative_date_parse("-7d")}) if not self._filter._date_to: data.update({"date_to": timezone.now()}) self._filter = self._filter.with_data(data) parsed_date_from, parsed_date_to, _ = parse_timestamps( filter=self._filter, table="events.", team_id=self._team.pk) self.params.update(prop_filter_params) steps = [ self._build_steps_query(entity, index) for index, entity in enumerate(self._filter.entities) ] query = FUNNEL_SQL.format( team_id=self._team.id, steps=", ".join(steps), filters=prop_filters.replace("uuid IN", "events.uuid IN", 1), parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, top_level_groupby="", extra_select="", extra_groupby="", within_time="6048000000000000", ) return sync_execute(query, self.params)
def calculate_dist(self, filter: Filter, team: Team): parsed_date_from, parsed_date_to = parse_timestamps(filter) filters, params = parse_prop_clauses("uuid", filter.properties, team) dist_query = DIST_SQL.format( team_id=team.pk, date_from=parsed_date_from, date_to=parsed_date_to, filters="{}".format(filters) if filter.properties else "", sessions_limit="", ) params = {**params, "team_id": team.pk} result = sync_execute(dist_query, params) dist_labels = [ "0 seconds (1 event)", "0-3 seconds", "3-10 seconds", "10-30 seconds", "30-60 seconds", "1-3 minutes", "3-10 minutes", "10-30 minutes", "30-60 minutes", "1+ hours", ] res = [{"label": dist_labels[index], "count": result[0][index]} for index in range(len(dist_labels))] return res
def _breakdown_person_params(self, aggregate_operation: str, filter: Filter, team_id: int): parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team_id) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team_id, table_name="e", filter_test_accounts=filter.filter_test_accounts) elements_query = TOP_PERSON_PROPS_ARRAY_OF_KEY_SQL.format( parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, latest_person_sql=GET_LATEST_PERSON_SQL.format(query=""), prop_filters=prop_filters, aggregate_operation=aggregate_operation, ) top_elements_array = self._get_top_elements(elements_query, filter, team_id, params=prop_filter_params) params = { "values": top_elements_array, } breakdown_filter = BREAKDOWN_PERSON_PROP_JOIN_SQL breakdown_filter_params = { "latest_person_sql": GET_LATEST_PERSON_SQL.format(query=""), } return params, breakdown_filter, breakdown_filter_params, "value"
def _retrieve_people(self, target_entity: Entity, filter: StickinessFilter, team: Team) -> ReturnDict: parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team.pk) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team.pk) entity_sql, entity_params = self._format_entity_filter( entity=target_entity) trunc_func = get_trunc_func_ch(filter.interval) params: Dict = { "team_id": team.pk, **prop_filter_params, "stickiness_day": filter.selected_interval, **entity_params, "offset": filter.offset, } content_sql = STICKINESS_PEOPLE_SQL.format( entity_filter=entity_sql, parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, filters=prop_filters, trunc_func=trunc_func, ) people = sync_execute( PEOPLE_SQL.format( content_sql=content_sql, query="", latest_person_sql=GET_LATEST_PERSON_SQL.format(query="")), params, ) return ClickhousePersonSerializer(people, many=True).data
def _breakdown_prop_params(self, aggregate_operation: str, filter: Filter, team_id: int): parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team_id) prop_filters, prop_filter_params = parse_prop_clauses( filter.properties, team_id, table_name="e", filter_test_accounts=filter.filter_test_accounts) elements_query = TOP_ELEMENTS_ARRAY_OF_KEY_SQL.format( parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to, prop_filters=prop_filters, aggregate_operation=aggregate_operation, ) top_elements_array = self._get_top_elements(elements_query, filter, team_id, params=prop_filter_params) params = { "values": top_elements_array, } breakdown_filter = BREAKDOWN_PROP_JOIN_SQL return params, breakdown_filter, {}, "JSONExtractRaw(properties, %(key)s)"