Example #1
0
    def _test_refresh_dashboard_cache_types(
        self,
        filter: FilterType,
        patch_update_cache_item: MagicMock,
        patch_apply_async: MagicMock,
    ) -> None:

        dashboard_to_cache = Dashboard.objects.create(team=self.team,
                                                      is_shared=True,
                                                      last_accessed_at=now())

        DashboardItem.objects.create(
            dashboard=dashboard_to_cache,
            filters=filter.to_dict(),
            team=self.team,
            last_refresh=now() - timedelta(days=30),
        )
        update_cached_items()

        for call_item in patch_update_cache_item.call_args_list:
            update_cache_item(*call_item[0])

        item_key = generate_cache_key("{}_{}".format(filter.toJSON(),
                                                     self.team.pk))
        self.assertIsNotNone(cache.get(item_key))
Example #2
0
    def _test_refresh_dashboard_cache_types(
        self,
        filter: FilterType,
        cache_type: CacheType,
        patch_update_cache_item: MagicMock,
    ) -> None:
        self._create_dashboard(filter)

        update_cached_items()

        expected_args = [
            generate_cache_key("{}_{}".format(filter.toJSON(), self.team.pk)),
            cache_type,
            {
                "filter": filter.toJSON(),
                "team_id": self.team.pk,
            },
        ]

        patch_update_cache_item.assert_any_call(*expected_args)

        update_cache_item(*expected_args)  # type: ignore

        item_key = generate_cache_key("{}_{}".format(filter.toJSON(),
                                                     self.team.pk))
        self.assertIsNotNone(get_safe_cache(item_key))
Example #3
0
    def test_broken_insights(
            self, dashboard_item_update_task_params: MagicMock) -> None:
        # sometimes we have broken insights, add a test to catch
        dashboard = Dashboard.objects.create(team=self.team, is_shared=True)
        item = Insight.objects.create(dashboard=dashboard,
                                      filters={},
                                      team=self.team)

        update_cached_items()

        self.assertEqual(dashboard_item_update_task_params.call_count, 0)
Example #4
0
    def test_broken_exception_insights(
            self, dashboard_item_update_task_params: MagicMock) -> None:
        dashboard_item_update_task_params.side_effect = Exception()
        dashboard = Dashboard.objects.create(team=self.team, is_shared=True)
        filter = {"events": [{"id": "$pageview"}]}
        item = Insight.objects.create(dashboard=dashboard,
                                      filters=filter,
                                      team=self.team)

        update_cached_items()

        self.assertEquals(Insight.objects.get().refresh_attempt, 1)
Example #5
0
    def test_stickiness_regression(self, patch_update_cache_item: MagicMock, patch_apply_async: MagicMock) -> None:
        # We moved Stickiness from being a "shown_as" item to its own insight
        # This move caused issues hence a regression test
        filter_stickiness = StickinessFilter(
            data={
                "events": [{"id": "$pageview"}],
                "properties": [{"key": "$browser", "value": "Mac OS X"}],
                "date_from": "2012-01-10",
                "date_to": "2012-01-15",
                "insight": INSIGHT_STICKINESS,
                "shown_as": "Stickiness",
            },
            team=self.team,
            get_earliest_timestamp=Event.objects.earliest_timestamp,
        )
        filter = Filter(
            data={
                "events": [{"id": "$pageview"}],
                "properties": [{"key": "$browser", "value": "Mac OS X"}],
                "date_from": "2012-01-10",
                "date_to": "2012-01-15",
            }
        )
        shared_dashboard = Dashboard.objects.create(team=self.team, is_shared=True)

        DashboardItem.objects.create(dashboard=shared_dashboard, filters=filter_stickiness.to_dict(), team=self.team)
        DashboardItem.objects.create(dashboard=shared_dashboard, filters=filter.to_dict(), team=self.team)

        item_stickiness_key = generate_cache_key(filter_stickiness.toJSON() + "_" + str(self.team.pk))
        item_key = generate_cache_key(filter.toJSON() + "_" + str(self.team.pk))

        update_cached_items()

        for call_item in patch_update_cache_item.call_args_list:
            update_cache_item(*call_item[0])

        self.assertEqual(
            get_safe_cache(item_stickiness_key)["result"][0]["labels"],
            ["1 day", "2 days", "3 days", "4 days", "5 days", "6 days"],
        )
        self.assertEqual(
            get_safe_cache(item_key)["result"][0]["labels"],
            [
                "Tue. 10 January",
                "Wed. 11 January",
                "Thu. 12 January",
                "Fri. 13 January",
                "Sat. 14 January",
                "Sun. 15 January",
            ],
        )
Example #6
0
    def test_refresh_dashboard_cache(self, patch_update_cache_item: MagicMock, patch_apply_async: MagicMock) -> None:
        # There's two things we want to refresh
        # Any shared dashboard, as we only use cached items to show those
        # Any dashboard accessed in the last 7 days
        filter_dict = {
            "events": [{"id": "$pageview"}],
            "properties": [{"key": "$browser", "value": "Mac OS X"}],
        }
        filter = Filter(data=filter_dict)
        shared_dashboard = Dashboard.objects.create(team=self.team, is_shared=True)
        funnel_filter = Filter(data={"events": [{"id": "user signed up", "type": "events", "order": 0},],})

        item = DashboardItem.objects.create(dashboard=shared_dashboard, filters=filter.to_dict(), team=self.team)
        funnel_item = DashboardItem.objects.create(
            dashboard=shared_dashboard, filters=funnel_filter.to_dict(), team=self.team
        )

        dashboard_to_cache = Dashboard.objects.create(team=self.team, is_shared=True, last_accessed_at=now())
        item_to_cache = DashboardItem.objects.create(
            dashboard=dashboard_to_cache,
            filters=Filter(data={"events": [{"id": "cache this"}]}).to_dict(),
            team=self.team,
        )

        dashboard_do_not_cache = Dashboard.objects.create(
            team=self.team, is_shared=True, last_accessed_at="2020-01-01T12:00:00Z"
        )
        item_do_not_cache = DashboardItem.objects.create(
            dashboard=dashboard_do_not_cache,
            filters=Filter(data={"events": [{"id": "do not cache this"}]}).to_dict(),
            team=self.team,
        )

        item_key = generate_cache_key(filter.toJSON() + "_" + str(self.team.pk))
        funnel_key = generate_cache_key(filter.toJSON() + "_" + str(self.team.pk))
        update_cached_items()

        # pass the caught calls straight to the function
        # we do this to skip Redis
        for call_item in patch_update_cache_item.call_args_list:
            update_cache_item(*call_item[0])

        self.assertIsNotNone(DashboardItem.objects.get(pk=item.pk).last_refresh)
        self.assertIsNotNone(DashboardItem.objects.get(pk=item_to_cache.pk).last_refresh)
        self.assertIsNotNone(DashboardItem.objects.get(pk=item_do_not_cache.pk).last_refresh)
        self.assertEqual(get_safe_cache(item_key)["result"][0]["count"], 0)
        self.assertEqual(get_safe_cache(funnel_key)["result"][0]["count"], 0)
Example #7
0
    def test_filters_multiple_dashboard(self) -> None:
        # Regression test. Previously if we had insights with the same filter, but different dashboard filters, we woul donly update one of those
        dashboard1 = Dashboard.objects.create(filters={"date_from": "-14d"},
                                              team=self.team,
                                              is_shared=True)
        dashboard2 = Dashboard.objects.create(filters={"date_from": "-30d"},
                                              team=self.team,
                                              is_shared=True)
        dashboard3 = Dashboard.objects.create(team=self.team, is_shared=True)

        filter = {"events": [{"id": "$pageview"}]}

        item1 = Insight.objects.create(dashboard=dashboard1,
                                       filters=filter,
                                       team=self.team)
        item2 = Insight.objects.create(dashboard=dashboard2,
                                       filters=filter,
                                       team=self.team)
        item3 = Insight.objects.create(dashboard=dashboard3,
                                       filters=filter,
                                       team=self.team)

        update_cached_items()

        insights = Insight.objects.all().order_by("id")

        self.assertEqual(
            len(get_safe_cache(insights[0].filters_hash)["result"][0]["data"]),
            15)
        self.assertEqual(
            len(get_safe_cache(insights[1].filters_hash)["result"][0]["data"]),
            31)
        self.assertEqual(
            len(get_safe_cache(insights[2].filters_hash)["result"][0]["data"]),
            8)
        self.assertEqual(insights[0].last_refresh.isoformat(),
                         "2021-08-25T22:09:14.252000+00:00")
        self.assertEqual(insights[1].last_refresh.isoformat(),
                         "2021-08-25T22:09:14.252000+00:00")
        self.assertEqual(insights[2].last_refresh.isoformat(),
                         "2021-08-25T22:09:14.252000+00:00")
Example #8
0
    def test_insights_old_filter(self) -> None:
        # Some filters hashes are wrong (likely due to changes in our filters models) and previously we would not save changes to those insights and constantly retry them.
        dashboard = Dashboard.objects.create(team=self.team, is_shared=True)
        filter = {"events": [{"id": "$pageview"}]}
        item = Insight.objects.create(dashboard=dashboard,
                                      filters=filter,
                                      filters_hash="cache_thisiswrong",
                                      team=self.team)
        Insight.objects.all().update(filters_hash="cache_thisiswrong")
        self.assertEquals(Insight.objects.get().filters_hash,
                          "cache_thisiswrong")

        update_cached_items()

        self.assertEquals(
            Insight.objects.get().filters_hash,
            generate_cache_key("{}_{}".format(
                Filter(data=filter).toJSON(), self.team.pk)),
        )
        self.assertEquals(Insight.objects.get().last_refresh.isoformat(),
                          "2021-08-25T22:09:14.252000+00:00")
Example #9
0
def check_cached_items():
    from posthog.tasks.update_cache import update_cached_items

    update_cached_items()
Example #10
0
 def _update_cached_items() -> None:
     # This function will throw an exception every time which is what we want in production
     try:
         update_cached_items()
     except Exception as e:
         pass