def test_aggregate_by_groups(self): self._create_multiple_people( period=timedelta(weeks=1), event_properties=lambda i: {"$group_0": f"org:{i // 2}"}, ) create_group(team_id=self.team.pk, group_type_index=0, group_key=f"org:0", properties={"industry": "technology"}) create_group(team_id=self.team.pk, group_type_index=0, group_key=f"org:1", properties={"industry": "agriculture"}) create_group(team_id=self.team.pk, group_type_index=0, group_key=f"org:2", properties={"industry": "technology"}) with freeze_time("2020-02-15T13:01:01Z"): data = get_stickiness_time_series_ok( client=self.client, team=self.team, request={ "shown_as": "Stickiness", "date_from": "2020-01-01", "date_to": "2020-02-15", "events": [{ "id": "watched movie", "math": "unique_group", "math_group_type_index": 0 }], "interval": "week", }, ) assert data["watched movie"][1].value == 2 assert data["watched movie"][2].value == 0 assert data["watched movie"][3].value == 1 with freeze_time("2020-02-15T13:01:01Z"): week1_actors = get_people_from_url_ok( self.client, data["watched movie"][1].person_url) week2_actors = get_people_from_url_ok( self.client, data["watched movie"][2].person_url) week3_actors = get_people_from_url_ok( self.client, data["watched movie"][3].person_url) assert sorted([p["id"] for p in week1_actors]) == sorted(["org:0", "org:2"]) assert sorted([p["id"] for p in week2_actors]) == sorted([]) assert sorted([p["id"] for p in week3_actors]) == sorted(["org:1"])
def test_groups_list(self): with freeze_time("2021-05-01"): create_group( team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={ "industry": "finance", "name": "Mr. Krabs" }, ) with freeze_time("2021-05-02"): create_group( team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}, ) create_group( team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={"name": "Plankton"}, ) response = self.client.get( f"/api/projects/{self.team.id}/groups?group_type_index=0").json() self.assertEqual( response, { "next": None, "previous": None, "results": [ { "created_at": "2021-05-02T00:00:00Z", "group_key": "org:6", "group_properties": { "industry": "technology" }, "group_type_index": 0, }, { "created_at": "2021-05-01T00:00:00Z", "group_key": "org:5", "group_properties": { "industry": "finance", "name": "Mr. Krabs" }, "group_type_index": 0, }, ], }, )
def test_test_account_filters_with_groups(self): self.team.test_account_filters = [ {"key": "key", "type": "group", "value": "value", "group_type_index": 0}, ] self.team.save() GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) create_group(self.team.pk, group_type_index=0, group_key="in", properties={"key": "value"}) create_group(self.team.pk, group_type_index=0, group_key="out", properties={"key": "othervalue"}) with freeze_time("2020-01-11T12:00:00Z"): Person.objects.create(distinct_ids=["person1"], team_id=self.team.pk) with freeze_time("2020-01-09T12:00:00Z"): Person.objects.create(distinct_ids=["person2"], team_id=self.team.pk) journeys_for( { "person1": [ {"event": "$pageview", "timestamp": datetime(2020, 1, 11, 12), "properties": {"$group_0": "out"},}, ], "person2": [ {"event": "$pageview", "timestamp": datetime(2020, 1, 9, 12), "properties": {"$group_0": "in"},}, {"event": "$pageview", "timestamp": datetime(2020, 1, 12, 12), "properties": {"$group_0": "in"},}, {"event": "$pageview", "timestamp": datetime(2020, 1, 15, 12), "properties": {"$group_0": "in"},}, ], }, self.team, ) result = ClickhouseTrends().run( Filter( data={ "date_from": "2020-01-12T00:00:00Z", "date_to": "2020-01-19T00:00:00Z", "events": [{"id": "$pageview", "type": "events", "order": 0}], "shown_as": TRENDS_LIFECYCLE, FILTER_TEST_ACCOUNTS: True, }, team=self.team, ), self.team, ) self.assertLifecycleResults( result, [ {"status": "dormant", "data": [0, -1, 0, 0, -1, 0, 0, 0]}, {"status": "new", "data": [0, 0, 0, 0, 0, 0, 0, 0]}, {"status": "resurrecting", "data": [1, 0, 0, 1, 0, 0, 0, 0]}, {"status": "returning", "data": [0, 0, 0, 0, 0, 0, 0, 0]}, ], )
def test_insight_trends_basic(self): events_by_person = { "1": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 14, 3) }, ], "2": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 14, 3) }, ], } created_people = journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", events=[{ "id": "$pageview", "math": "dau", "name": "$pageview", "custom_name": None, "type": "events", "order": 0, "properties": [], "math_property": None, }], ) data = get_trends_time_series_ok(self.client, request, self.team) assert data["$pageview"]["2012-01-13"].value == 0 assert data["$pageview"]["2012-01-14"].value == 2 assert data["$pageview"]["2012-01-14"].label == "14-Jan-2012" assert data["$pageview"]["2012-01-15"].value == 0 with freeze_time("2012-01-15T04:01:34.000Z"): people = get_people_from_url_ok( self.client, data["$pageview"]["2012-01-14"].person_url) assert sorted([p["id"] for p in people]) == sorted( [str(created_people["1"].uuid), str(created_people["2"].uuid)])
def test_interval_dates_days(self): with freeze_time("2021-05-05T12:00:00Z"): self._setup_returning_lifecycle_data(20) result = self._run_lifecycle({"date_from": "-7d", "interval": "day"}) self.assertLifecycleResults( result, [ {"status": "dormant", "data": [0] * 8}, {"status": "new", "data": [0] * 8}, {"status": "resurrecting", "data": [0] * 8}, {"status": "returning", "data": [1] * 8}, ], ) self.assertEqual( result[0]["days"], [ "2021-04-28", "2021-04-29", "2021-04-30", "2021-05-01", "2021-05-02", "2021-05-03", "2021-05-04", "2021-05-05", ], )
def test_filter_by_group_properties(self): create_group( team_id=self.team.pk, group_type_index=0, group_key=f"org:1", properties={"industry": "technology"} ) create_group( team_id=self.team.pk, group_type_index=0, group_key=f"org:2", properties={"industry": "agriculture"} ) create_group( team_id=self.team.pk, group_type_index=0, group_key=f"org:3", properties={"industry": "technology"} ) create_group(team_id=self.team.pk, group_type_index=0, group_key=f"org:4", properties={}) create_group( team_id=self.team.pk, group_type_index=1, group_key=f"company:1", properties={"industry": "technology"} ) create_group(team_id=self.team.pk, group_type_index=1, group_key=f"instance:1", properties={}) p1, p2, p3, p4 = self._create_multiple_people( period=timedelta(weeks=1), event_properties=lambda i: {"$group_0": f"org:{i}", "$group_1": "instance:1"}, ) with freeze_time("2020-02-15T13:01:01Z"): data = get_stickiness_time_series_ok( client=self.client, team=self.team, request={ "shown_as": "Stickiness", "date_from": "2020-01-01", "date_to": "2020-02-15", "events": [{"id": "watched movie"}], "properties": [{"key": "industry", "value": "technology", "type": "group", "group_type_index": 0}], "interval": "week", }, ) assert data["watched movie"][1].value == 1 assert data["watched movie"][2].value == 0 assert data["watched movie"][3].value == 1 with freeze_time("2020-02-15T13:01:01Z"): week1_actors = get_people_from_url_ok(self.client, data["watched movie"][1].person_url) week2_actors = get_people_from_url_ok(self.client, data["watched movie"][2].person_url) week3_actors = get_people_from_url_ok(self.client, data["watched movie"][3].person_url) assert sorted([p["id"] for p in week1_actors]) == sorted([str(p1.uuid)]) assert sorted([p["id"] for p in week2_actors]) == sorted([]) assert sorted([p["id"] for p in week3_actors]) == sorted([str(p3.uuid)])
def setUp(self): super().setUp() # type: ignore Person.objects.create( team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} ) with freeze_time("2020-01-02T13:01:01Z"): _create_event( team=self.team, event="session start", distinct_id="blabla", properties={"session duration": 200, "location": "Paris", "$current_url": "http://example.org"}, ) _create_event( team=self.team, event="session start", distinct_id="blabla", properties={"session duration": 300, "location": "Paris"}, ) _create_event( team=self.team, event="session start", distinct_id="blabla", properties={"session duration": 400, "location": "London"}, ) with freeze_time("2020-01-03T13:01:01Z"): _create_event( team=self.team, event="session start", distinct_id="blabla", properties={"session duration": 400, "location": "London"}, ) with freeze_time("2020-01-03T13:04:01Z"): _create_event( team=self.team, event="session start", distinct_id="blabla", properties={"session duration": 500, "location": "London"}, ) _create_event( team=self.team, event="session end", distinct_id="blabla", properties={"session duration": 500, "location": "London"}, )
def test_feature_available_self_hosted_license_expired(self, patch_post): with freeze_time( "2070-01-01T12:00:00.000Z" ): # LicensedTestMixin enterprise license expires in 2038 sync_all_organization_available_features( ) # This is normally ran every hour self.organization.refresh_from_db() self.assertFalse( self.organization.is_feature_available("whatever"))
def test_is_blocked(manager: ModuleManager): user = User('a!0@c') another_user = User('b!1@c') counters = manager.executed_counters with freeze_time('2017-01-01 12:00:00'): counter = ExecutedCounter() counter.counter = 50 counters[user.prefix] = counter with freeze_time('2017-01-01 10:00:00'): counter = ExecutedCounter() counter.counter = 50 counters[another_user.prefix] = counter with freeze_time('2017-01-01 12:00:00'): assert manager.is_blocked(user) is True assert manager.is_blocked(another_user) is False counters = manager.executed_counters assert counters[user.prefix].counter == 50 assert counters[another_user.prefix].counter == 1
def test_it_works_for_users_with_all_info(self, db): user = create_user_with_filled_out_hardship_details() with freeze_time("2021-01-25"): v = get_vars_for_user(user) assert v is not None assert v.index_number == "myindex" assert v.county_and_court == "Bipbop Court, Funkypants County" assert v.address == "150 court street, Apartment 2, Brooklyn, NY" assert v.has_financial_hardship is True assert v.has_health_risk is False assert v.name == "Boop Jones" assert v.date == "01/25/2021"
def test_invoice_item_is_terminated_when_resource_state_is_changed(self): with freeze_time(self.start_date): self.resource.set_state_ok() self.resource.save() self.assertEqual(models.Invoice.objects.count(), 1) self.invoice = models.Invoice.objects.first() termination_date = self.start_date + timezone.timedelta(days=2) usage_days = (termination_date - self.start_date).days + 1 factor = self.get_factor(self.start_date, usage_days) expected_price = self.plan_component.price * factor with freeze_time(termination_date): self.resource.set_state_terminating() self.resource.save() self.resource.set_state_terminated() self.resource.save() self.assertEqual(self.invoice.items.first().end, termination_date) self.assertEqual(self.invoice.items.first().end, termination_date) self.assertEqual(self.invoice.price, Decimal(expected_price))
def test_is_blocked(manager: ModuleManager): user = User('a!0@c') another_user = User('b!1@c') counters = manager.executed_counters with freeze_time('2017-01-01 12:00:00'): counter = ExecutedCounter() counter.counter = 50 counters[user.prefix] = counter with freeze_time('2017-01-01 10:00:00'): counter = ExecutedCounter() counter.counter = 50 counters[another_user.prefix] = counter with freeze_time('2017-01-01 12:00:00'): assert manager.is_blocked(user) is True assert manager.is_blocked(another_user) is False counters = manager.executed_counters assert counters[user.prefix].counter == 50 assert counters[another_user.prefix].counter == 1
def test_invoice_price_is_not_changed_after_a_while_if_resource_is_deleted( self): start_date = timezone.datetime(2014, 2, 27, tzinfo=pytz.UTC) end_date = core_utils.month_end(start_date) usage_days = utils.get_full_days(start_date, end_date) month_days = monthrange(start_date.year, start_date.month)[1] factor = quantize_price(decimal.Decimal(usage_days) / month_days) with freeze_time(start_date): self.resource.set_state_ok() self.resource.save() self.assertEqual(models.Invoice.objects.count(), 1) invoice = models.Invoice.objects.first() with freeze_time(end_date): self.resource.set_state_terminating() self.resource.save() self.resource.set_state_terminated() self.resource.save() expected_price = self.plan_component.price * factor self.assertEqual(invoice.price, Decimal(expected_price))
def test_insight_trends_clean_arg(self): events_by_actor = { "1": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": { "key": "val" } }, ], "2": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 14, 3) }, ], } created_actors = journeys_for(events_by_actor, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", events=[{ "id": "$pageview", "math": None, # this argument will now be removed from the request instead of becoming a string "name": "$pageview", "custom_name": None, "type": "events", "order": 0, "properties": [{ "key": "key", "value": "val" }], "math_property": None, }], ) data = get_trends_time_series_ok(self.client, request, self.team) actors = get_people_from_url_ok( self.client, data["$pageview"]["2012-01-14"].person_url) # this would return 2 people prior to #8103 fix # 'None' values have to be purged before formatting into the actor url assert sorted([p["id"] for p in actors ]) == sorted([str(created_actors["1"].uuid)])
def test_invoice_is_created_in_new_month_when_single_item_is_terminated( self): start_date = timezone.datetime(2014, 2, 27, tzinfo=pytz.UTC) next_month = timezone.datetime(2014, 3, 2, tzinfo=pytz.UTC) with freeze_time(start_date): self.resource.set_state_ok() self.resource.save() self.assertEqual(models.Invoice.objects.count(), 1) invoice = models.Invoice.objects.first() self.assertEqual(models.Invoice.objects.count(), 1) self.assertEqual(self.get_invoice_items(invoice).count(), 1) with freeze_time(next_month): new_invoice, _ = registrators.RegistrationManager.get_or_create_invoice( self.resource.project.customer, next_month) self.resource.set_state_terminating() self.resource.save() self.resource.set_state_terminated() self.resource.save() self.assertEqual(self.get_invoice_items(new_invoice).count(), 1) self.assertEqual( self.get_invoice_items(new_invoice).first().end, next_month)
def _setup_returning_lifecycle_data(self, days): with freeze_time("2019-01-01T12:00:00Z"): Person.objects.create(distinct_ids=["person1"], team_id=self.team.pk) journeys_for( { "person1": [ {"event": "$pageview", "timestamp": (now() - timedelta(days=n)).strftime("%Y-%m-%d %H:%M:%S.%f")} for n in range(days) ], }, self.team, create_people=False, )
def test_fill_date_from_and_date_to(self): with freeze_time("2020-10-01T12:00:00Z"): filter = RetentionFilter(data={}) self.assertEqual(filter.date_from.isoformat(), "2020-09-21T00:00:00+00:00") self.assertEqual(filter.date_to.isoformat(), "2020-10-02T00:00:00+00:00") # Make sure these dates aren't present in final filter to ensure rolling retention self.assertEqual( filter.to_dict(), { "display": "RetentionTable", "insight": "RETENTION", "period": "Day", "retention_type": "retention_recurring", "total_intervals": 11, }, ) with freeze_time("2020-10-01T12:00:00Z"): filter = RetentionFilter(data={"date_to": "2020-08-01"}) self.assertEqual(filter.date_from.isoformat(), "2020-07-22T00:00:00+00:00") self.assertEqual(filter.date_to.isoformat(), "2020-08-02T00:00:00+00:00") # Make sure these dates aren't present in final filter to ensure rolling retention self.assertEqual( filter.to_dict(), { "date_to": "2020-08-02T00:00:00+00:00", "display": "RetentionTable", "insight": "RETENTION", "period": "Day", "retention_type": "retention_recurring", "total_intervals": 11, }, )
def _run(self, extra: Dict = {}, run_at: Optional[str] = None): with freeze_time(run_at or "2020-01-04T13:01:01Z"): action_response = ClickhouseTrends().run( Filter( data={ "events": [ {"id": "session start", "math": "sum", "math_property": "session duration"}, {"id": "session start", "math": "avg", "math_property": "session duration"}, ], "formula": "A + B", **extra, } ), self.team, ) return action_response
def test_existing_invoice_is_updated_on_resource_creation(self): start_date = timezone.datetime(2014, 2, 27, tzinfo=pytz.UTC) end_date = core_utils.month_end(start_date) usage_days = utils.get_full_days(start_date, end_date) month_days = monthrange(start_date.year, start_date.month)[1] factor = quantize_price(decimal.Decimal(usage_days) / month_days) with freeze_time(start_date): invoice = factories.InvoiceFactory(customer=self.fixture.customer) self.resource.set_state_ok() self.resource.save() self.assertEqual(models.Invoice.objects.count(), 1) self.assertTrue(invoice.items.filter(resource=self.resource).exists()) expected_price = self.plan_component.price * factor self.assertEqual(invoice.price, Decimal(expected_price))
def test_get(self): cycle1 = BillingCycle.objects.create(date_range=(date(2016, 4, 1), date(2016, 5, 1))) cycle2 = BillingCycle.objects.create(date_range=(date(2016, 5, 1), date(2016, 6, 1))) cycle3 = BillingCycle.objects.create(date_range=(date(2016, 6, 1), date(2016, 7, 1))) cycle3 = BillingCycle.objects.create(date_range=(date(2016, 7, 1), date(2016, 8, 1))) cycle3 = BillingCycle.objects.create(date_range=(date(2016, 8, 1), date(2016, 9, 1))) with freeze_time('2016-06-15'): response = self.client.get(reverse('billing_cycles:list')) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['billing_cycles']), 3)
def test_default_filter_by_date_from(self): with freeze_time("2020-01-01T00:00:00Z"): filter = Filter( data={ "properties": [ { "key": "name", "value": json.dumps({"first_name": "Mary", "last_name": "Smith"}), "type": "person", } ], } ) one_week_ago = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=7) date_filter_query = filter.date_filter_Q self.assertEqual(date_filter_query, Q(timestamp__gte=one_week_ago, timestamp__lte=timezone.now()))
def test_event_filter_with_cohort_properties(self): with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): with freeze_time("2021-08-21T20:00:00.000Z"): Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) Person.objects.create(team=self.team, distinct_ids=["user2"], properties={ "email": "bla2", "$some_prop": "some_val" }) cohort = Cohort.objects.create(team=self.team, name="cohort1", groups=[{ "properties": { "$some_prop": "some_val" } }]) cohort.calculate_people_ch(pending_version=0) self.create_snapshot("user", "1", self.base_time) self.create_event("user", self.base_time, team=self.team) self.create_snapshot( "user", "1", self.base_time + relativedelta(seconds=30)) self.create_snapshot("user2", "2", self.base_time) self.create_event("user2", self.base_time, team=self.team) self.create_snapshot( "user2", "2", self.base_time + relativedelta(seconds=30)) filter = SessionRecordingsFilter( team=self.team, data={ "properties": [{ "key": "id", "value": cohort.pk, "operator": None, "type": "cohort" }], }, ) session_recording_list_instance = ClickhouseSessionRecordingList( filter=filter, team_id=self.team.pk) (session_recordings, _) = session_recording_list_instance.run() self.assertEqual(len(session_recordings), 1) self.assertEqual(session_recordings[0]["session_id"], "2")
def test_interval_dates_weeks(self): with freeze_time("2021-05-06T12:00:00Z"): self._setup_returning_lifecycle_data(50) result = self._run_lifecycle({"date_from": "-30d", "interval": "week"}) self.assertLifecycleResults( result, [ {"status": "dormant", "data": [0] * 5}, {"status": "new", "data": [0] * 5}, {"status": "resurrecting", "data": [0] * 5}, {"status": "returning", "data": [1] * 5}, ], ) self.assertEqual( result[0]["days"], ["2021-04-05", "2021-04-12", "2021-04-19", "2021-04-26", "2021-05-03",], )
def test_lifecycle_edge_cases(self): # This test tests behavior when created_at is different from first matching event and dormant/resurrecting/returning logic with freeze_time("2020-01-11T12:00:00Z"): Person.objects.create(distinct_ids=["person1"], team_id=self.team.pk) journeys_for( { "person1": [ {"event": "$pageview", "timestamp": datetime(2020, 1, 12, 12),}, {"event": "$pageview", "timestamp": datetime(2020, 1, 13, 12),}, {"event": "$pageview", "timestamp": datetime(2020, 1, 15, 12),}, {"event": "$pageview", "timestamp": datetime(2020, 1, 16, 12),}, ], }, self.team, ) result = ClickhouseTrends().run( Filter( data={ "date_from": "2020-01-11T00:00:00Z", "date_to": "2020-01-18T00:00:00Z", "events": [{"id": "$pageview", "type": "events", "order": 0}], "shown_as": TRENDS_LIFECYCLE, }, team=self.team, ), self.team, ) self.assertLifecycleResults( result, [ {"status": "dormant", "data": [0, 0, 0, -1, 0, 0, -1, 0]}, {"status": "new", "data": [0, 0, 0, 0, 0, 0, 0, 0]}, {"status": "resurrecting", "data": [0, 1, 0, 0, 1, 0, 0, 0]}, {"status": "returning", "data": [0, 0, 1, 0, 0, 1, 0, 0]}, ], )
def test_insight_trends_cumulative(self): events_by_person = { "p1": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 13, 3), "properties": { "key": "val" } }, { "event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": { "key": "val" } }, ], "p2": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 13, 3), "properties": { "key": "notval" } }, ], "p3": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": { "key": "val" } }, ], } created_people = journeys_for(events_by_person, self.team) # Total Volume with freeze_time("2012-01-15T04:01:34.000Z"): request = TrendsRequest( date_from="-14d", display="ActionsLineGraphCumulative", events=[{ "id": "$pageview", "math": None, "name": "$pageview", "custom_name": None, "type": "events", "order": 0, "properties": [], "math_property": None, }], ) data_response = get_trends_time_series_ok(self.client, request, self.team) person_response = get_people_from_url_ok( self.client, data_response["$pageview"]["2012-01-14"].person_url) assert data_response["$pageview"]["2012-01-13"].value == 2 assert data_response["$pageview"]["2012-01-14"].value == 4 assert data_response["$pageview"]["2012-01-15"].value == 4 assert data_response["$pageview"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in person_response]) == sorted([ str(created_people["p1"].uuid), str(created_people["p2"].uuid), str(created_people["p3"].uuid) ]) # DAU with freeze_time("2012-01-15T04:01:34.000Z"): request = TrendsRequest( date_from="-14d", display="ActionsLineGraphCumulative", events=[{ "id": "$pageview", "math": "dau", "name": "$pageview", "custom_name": None, "type": "events", "order": 0, "properties": [], "math_property": None, }], ) data_response = get_trends_time_series_ok(self.client, request, self.team) person_response = get_people_from_url_ok( self.client, data_response["$pageview"]["2012-01-14"].person_url) assert data_response["$pageview"]["2012-01-13"].value == 2 assert data_response["$pageview"]["2012-01-14"].value == 3 assert data_response["$pageview"]["2012-01-15"].value == 3 assert data_response["$pageview"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in person_response]) == sorted([ str(created_people["p1"].uuid), str(created_people["p2"].uuid), str(created_people["p3"].uuid) ]) # breakdown with freeze_time("2012-01-15T04:01:34.000Z"): request = TrendsRequestBreakdown( date_from="-14d", display="ActionsLineGraphCumulative", breakdown="key", breakdown_type="event", events=[{ "id": "$pageview", "math": None, "name": "$pageview", "custom_name": None, "type": "events", "order": 0, "properties": [], "math_property": None, }], ) data_response = get_trends_time_series_ok(self.client, request, self.team) person_response = get_people_from_url_ok( self.client, data_response["$pageview - val"]["2012-01-14"].person_url) assert data_response["$pageview - val"]["2012-01-13"].value == 1 assert data_response["$pageview - val"][ "2012-01-13"].breakdown_value == "val" assert data_response["$pageview - val"]["2012-01-14"].value == 3 assert data_response["$pageview - val"][ "2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in person_response]) == sorted( [str(created_people["p1"].uuid), str(created_people["p3"].uuid)]) # breakdown dau with freeze_time("2012-01-15T04:01:34.000Z"): request = TrendsRequestBreakdown( date_from="-14d", display="ActionsLineGraphCumulative", breakdown="key", breakdown_type="event", events=[{ "id": "$pageview", "math": "dau", "name": "$pageview", "custom_name": None, "type": "events", "order": 0, "properties": [], "math_property": None, }], ) data_response = get_trends_time_series_ok(self.client, request, self.team) people = get_people_from_url_ok( self.client, data_response["$pageview - val"]["2012-01-14"].person_url) assert data_response["$pageview - val"]["2012-01-13"].value == 1 assert data_response["$pageview - val"][ "2012-01-13"].breakdown_value == "val" assert data_response["$pageview - val"]["2012-01-14"].value == 2 assert data_response["$pageview - val"][ "2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in people]) == sorted( [str(created_people["p1"].uuid), str(created_people["p3"].uuid)])
def test_breakdown_with_filter(self): events_by_person = { "person1": [ { "event": "sign up", "timestamp": datetime(2012, 1, 13, 3), "properties": { "key": "val" } }, ], "person2": [ { "event": "sign up", "timestamp": datetime(2012, 1, 13, 3), "properties": { "key": "oh" } }, ], } created_people = journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): params = TrendsRequestBreakdown( date_from="-14d", breakdown="key", events=[{ "id": "sign up", "name": "sign up", "type": "events", "order": 0, }], properties=[{ "key": "key", "value": "oh", "operator": "not_icontains" }], ) data_response = get_trends_time_series_ok(self.client, params, self.team) person_response = get_people_from_url_ok( self.client, data_response["sign up - val"]["2012-01-13"].person_url) assert data_response["sign up - val"]["2012-01-13"].value == 1 assert data_response["sign up - val"][ "2012-01-13"].breakdown_value == "val" assert sorted([p["id"] for p in person_response ]) == sorted([str(created_people["person1"].uuid)]) with freeze_time("2012-01-15T04:01:34.000Z"): params = TrendsRequestBreakdown( date_from="-14d", breakdown="key", display="ActionsPie", events=[{ "id": "sign up", "name": "sign up", "type": "events", "order": 0, }], ) aggregate_response = get_trends_aggregate_ok( self.client, params, self.team) aggregate_person_response = get_people_from_url_ok( self.client, aggregate_response["sign up - val"].person_url) assert aggregate_response["sign up - val"].value == 1 assert sorted([p["id"] for p in aggregate_person_response ]) == sorted([str(created_people["person1"].uuid)])
def test_max_age(self): freezer = freeze_time(datetime.now() - timedelta(days=15)) freezer.start() device = models.Device.objects.create( os='OS', os_version='1.0', browser='Browser', browser_version='1.0', device='' ) visitor = models.Visitor.objects.create( uuid=models.generate_uuid(), ) visit = models.Visit.objects.create( uuid=models.generate_uuid(), visitor=visitor, device=device, web_property=self.dja_property, first_page=self.page, visit_date=date.today(), ) re = models.RequestEvent.objects.create( ip_address='127.0.0.1', user_agent='Mozilla/5.0 (Windows NT 5.1; rv:30.0) Gecko/20100101 Firefox/30.0', protocol='http', domain='invalid.com', path='/foo', query_string='', method='GET', client=self.dja_client, tracking_user_id=visitor.uuid, tracking_key=visit.uuid, created=(datetime.now() - timedelta(seconds=10)) ) models.RequestEvent.objects.create( ip_address='127.0.0.1', user_agent='Mozilla/5.0 (Windows NT 5.1; rv:30.0) Gecko/20100101 Firefox/30.0', protocol='http', domain='invalid.com', path='/foo', query_string='', method='GET', client=self.dja_client, tracking_user_id=visitor.uuid, tracking_key=visit.uuid, ) models.PageVisit.objects.create( page=self.page, visit=visit, request_event=re ) freezer.stop() command = Command() command.stdout = Mock() command.handle( start=(datetime.now() - timedelta(days=10)).strftime('%Y-%m-%d'), max_age=10 ) self.assertEqual( 1, models.PageVisit.objects.count(), 'should not have created a page visit for the second request event' ) self.assertIsNone( models.PageVisit.objects.get().duration, 'should not have set a duration on page visit' ) visit = models.Visit.objects.get() self.assertIsNone( visit.last_page, 'should not have set a last page on visit' ) self.assertIsNone( visit.duration, 'should not have set a duration on visit' ) self.assertEqual( 0, visit.conversion_count, 'conversion_count should 0, not %s' % visit.conversion_count )
def test_fill_date_from_and_date_to(self): with freeze_time("2020-10-01T12:00:00Z"): filter = RetentionFilter(data={}) self.assertEqual(filter.date_from.isoformat(), "2020-09-21T00:00:00+00:00") self.assertEqual(filter.date_to.isoformat(), "2020-10-02T00:00:00+00:00") # Make sure these dates aren't present in final filter to ensure rolling retention self.assertEqual( filter.to_dict(), { "display": "RetentionTable", "insight": "RETENTION", "period": "Day", "retention_type": "retention_recurring", "total_intervals": 11, "returning_entity": { "id": "$pageview", "math": None, "math_property": None, "math_group_type_index": None, "name": "$pageview", "custom_name": None, "order": None, "properties": {}, "type": "events", }, "target_entity": { "id": "$pageview", "math": None, "math_property": None, "math_group_type_index": None, "name": "$pageview", "custom_name": None, "order": None, "properties": {}, "type": "events", }, }, ) with freeze_time("2020-10-01T12:00:00Z"): filter = RetentionFilter(data={"date_to": "2020-08-01"}) self.assertEqual(filter.date_from.isoformat(), "2020-07-22T00:00:00+00:00") self.assertEqual(filter.date_to.isoformat(), "2020-08-02T00:00:00+00:00") # Make sure these dates aren't present in final filter to ensure rolling retention # The date_to below is the base value that's provided when the object was created (_date_to). # It doesn't match the date_to above because the retention filter will transform date_to to include one period ahead self.assertEqual( filter.to_dict(), { "date_to": "2020-08-01", "display": "RetentionTable", "insight": "RETENTION", "period": "Day", "retention_type": "retention_recurring", "total_intervals": 11, "returning_entity": { "id": "$pageview", "math": None, "math_property": None, "math_group_type_index": None, "name": "$pageview", "custom_name": None, "order": None, "properties": {}, "type": "events", }, "target_entity": { "id": "$pageview", "math": None, "math_property": None, "math_group_type_index": None, "name": "$pageview", "custom_name": None, "order": None, "properties": {}, "type": "events", }, }, )
def test_breakdown_counts_of_different_events_one_without_events(self): with freeze_time("2020-01-04T13:01:01Z"): response = ClickhouseTrends().run( Filter( data={ "insight": "TRENDS", "display": "ActionsLineGraph", "formula": "B / A", "breakdown": "location", "breakdown_type": "event", "events": [ { "id": "session start", "name": "session start", "type": "events", "order": 0 }, { "id": "session error", "name": "session error", "type": "events", "order": 1 }, ], }), self.team, ) self.assertEqual( response, [ { "data": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "count": 0.0, "labels": [ "28-Dec-2019", "29-Dec-2019", "30-Dec-2019", "31-Dec-2019", "1-Jan-2020", "2-Jan-2020", "3-Jan-2020", "4-Jan-2020", ], "days": [ "2019-12-28", "2019-12-29", "2019-12-30", "2019-12-31", "2020-01-01", "2020-01-02", "2020-01-03", "2020-01-04", ], "label": "London", }, { "data": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "count": 0.0, "labels": [ "28-Dec-2019", "29-Dec-2019", "30-Dec-2019", "31-Dec-2019", "1-Jan-2020", "2-Jan-2020", "3-Jan-2020", "4-Jan-2020", ], "days": [ "2019-12-28", "2019-12-29", "2019-12-30", "2019-12-31", "2020-01-01", "2020-01-02", "2020-01-03", "2020-01-04", ], "label": "Paris", }, ], )
def test_max_age(self): freezer = freeze_time(datetime.now() - timedelta(days=15)) freezer.start() device = models.Device.objects.create(os='OS', os_version='1.0', browser='Browser', browser_version='1.0', device='') visitor = models.Visitor.objects.create(uuid=models.generate_uuid(), ) visit = models.Visit.objects.create( uuid=models.generate_uuid(), visitor=visitor, device=device, web_property=self.dja_property, first_page=self.page, visit_date=date.today(), ) re = models.RequestEvent.objects.create( ip_address='127.0.0.1', user_agent= 'Mozilla/5.0 (Windows NT 5.1; rv:30.0) Gecko/20100101 Firefox/30.0', protocol='http', domain='invalid.com', path='/foo', query_string='', method='GET', client=self.dja_client, tracking_user_id=visitor.uuid, tracking_key=visit.uuid, created=(datetime.now() - timedelta(seconds=10))) models.RequestEvent.objects.create( ip_address='127.0.0.1', user_agent= 'Mozilla/5.0 (Windows NT 5.1; rv:30.0) Gecko/20100101 Firefox/30.0', protocol='http', domain='invalid.com', path='/foo', query_string='', method='GET', client=self.dja_client, tracking_user_id=visitor.uuid, tracking_key=visit.uuid, ) models.PageVisit.objects.create(page=self.page, visit=visit, request_event=re) freezer.stop() command = Command() command.stdout = Mock() command.handle(start=(datetime.now() - timedelta(days=10)).strftime('%Y-%m-%d'), max_age=10) self.assertEqual( 1, models.PageVisit.objects.count(), 'should not have created a page visit for the second request event' ) self.assertIsNone(models.PageVisit.objects.get().duration, 'should not have set a duration on page visit') visit = models.Visit.objects.get() self.assertIsNone(visit.last_page, 'should not have set a last page on visit') self.assertIsNone(visit.duration, 'should not have set a duration on visit') self.assertEqual( 0, visit.conversion_count, 'conversion_count should 0, not %s' % visit.conversion_count)
def test_insight_trends_compare(self): events_by_person = { "p1": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 5, 3), "properties": { "key": "val" } }, { "event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": { "key": "val" } }, ], "p2": [ { "event": "$pageview", "timestamp": datetime(2012, 1, 5, 3), "properties": { "key": "notval" } }, { "event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": { "key": "notval" } }, ], } created_people = journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): request = TrendsRequest( date_from="-7d", compare=True, events=[{ "id": "$pageview", "name": "$pageview", "type": "events", "order": 0, }], ) data_response = get_trends_time_series_ok(self.client, request, self.team) assert data_response["$pageview - current"]["2012-01-13"].value == 0 assert data_response["$pageview - current"]["2012-01-14"].value == 2 assert data_response["$pageview - previous"]["2012-01-04"].value == 0 assert data_response["$pageview - previous"]["2012-01-05"].value == 2 with freeze_time("2012-01-15T04:01:34.000Z"): curr_people = get_people_from_url_ok( self.client, data_response["$pageview - current"]["2012-01-14"].person_url) prev_people = get_people_from_url_ok( self.client, data_response["$pageview - previous"]["2012-01-05"].person_url) assert sorted([p["id"] for p in curr_people]) == sorted( [str(created_people["p1"].uuid), str(created_people["p2"].uuid)]) assert sorted([p["id"] for p in prev_people]) == sorted( [str(created_people["p1"].uuid), str(created_people["p2"].uuid)])