Пример #1
0
def update_cached_items() -> None:

    tasks = []
    items = (DashboardItem.objects.filter(
        Q(
            Q(dashboard__is_shared=True)
            | Q(dashboard__last_accessed_at__gt=timezone.now() -
                relativedelta(days=7)))).exclude(refreshing=True).exclude(
                    deleted=True))

    for item in items.filter(filters__isnull=False).exclude(
            filters={}).distinct("filters"):
        filter = Filter(data=item.filters)
        cache_key = generate_cache_key("{}_{}".format(filter.toJSON(),
                                                      item.team_id))
        payload = {"filter": filter.toJSON(), "team_id": item.team_id}
        tasks.append(
            update_cache_item_task.s(cache_key, TRENDS_ENDPOINT, payload))

    for item in items.filter(funnel_id__isnull=False).distinct("funnel_id"):
        cache_key = generate_cache_key("funnel_{}_{}".format(
            item.funnel_id, item.team_id))
        payload = {"funnel_id": item.funnel_id, "team_id": item.team_id}
        tasks.append(
            update_cache_item_task.s(cache_key, FUNNEL_ENDPOINT, payload))

    logger.info("Found {} items to refresh".format(len(tasks)))
    taskset = group(tasks)
    taskset.apply_async()
Пример #2
0
def update_cached_items() -> None:

    tasks = []
    items = (
        DashboardItem.objects.filter(
            Q(Q(dashboard__is_shared=True) | Q(dashboard__last_accessed_at__gt=timezone.now() - relativedelta(days=7)))
        )
        .exclude(dashboard__deleted=True)
        .exclude(refreshing=True)
        .exclude(deleted=True)
        .distinct("filters_hash")
    )

    for item in DashboardItem.objects.filter(
        pk__in=Subquery(items.filter(filters__isnull=False).exclude(filters={}).distinct("filters").values("pk"))
    ).order_by(F("last_refresh").asc(nulls_first=True))[0:PARALLEL_DASHBOARD_ITEM_CACHE]:
        filter = get_filter(data=item.dashboard_filters(), team=item.team)
        cache_key = generate_cache_key("{}_{}".format(filter.toJSON(), item.team_id))

        cache_type = get_cache_type(filter)
        payload = {"filter": filter.toJSON(), "team_id": item.team_id}
        tasks.append(update_cache_item_task.s(cache_key, cache_type, payload))

    logger.info("Found {} items to refresh".format(len(tasks)))
    taskset = group(tasks)
    taskset.apply_async()
Пример #3
0
def update_cached_items() -> None:

    tasks = []
    items = (DashboardItem.objects.filter(
        Q(
            Q(dashboard__is_shared=True)
            | Q(dashboard__last_accessed_at__gt=timezone.now() -
                relativedelta(days=7)))).exclude(
                    dashboard__deleted=True).exclude(refreshing=True).exclude(
                        deleted=True))

    for item in items.filter(filters__isnull=False).exclude(
            filters={}).distinct("filters"):
        filter = Filter(data=item.filters)
        cache_key = generate_cache_key("{}_{}".format(filter.toJSON(),
                                                      item.team_id))
        curr_data = cache.get(cache_key)

        # if task is logged and loading leave it alone
        if curr_data and curr_data.get("task_id", None):
            continue

        cache_type = CacheType.FUNNEL if filter.insight == "FUNNELS" else CacheType.TRENDS
        payload = {"filter": filter.toJSON(), "team_id": item.team_id}
        tasks.append(update_cache_item_task.s(cache_key, cache_type, payload))

    logger.info("Found {} items to refresh".format(len(tasks)))
    taskset = group(tasks)
    taskset.apply_async()
Пример #4
0
def update_cached_items() -> None:

    tasks = []
    items = (Insight.objects.filter(
        Q(
            Q(dashboard__is_shared=True)
            | Q(dashboard__last_accessed_at__gt=timezone.now() -
                relativedelta(days=7)))).exclude(
                    dashboard__deleted=True).exclude(refreshing=True).exclude(
                        deleted=True).exclude(refresh_attempt__gt=2).exclude(
                            filters={}).order_by(
                                F("last_refresh").asc(nulls_first=True)))

    for item in items[0:PARALLEL_INSIGHT_CACHE]:
        try:
            cache_key, cache_type, payload = dashboard_item_update_task_params(
                item)
            if item.filters_hash != cache_key:
                item.save(
                )  # force update if the saved key is different from the cache key
            tasks.append(
                update_cache_item_task.s(cache_key, cache_type, payload))
        except Exception as e:
            item.refresh_attempt = (item.refresh_attempt or 0) + 1
            item.save()
            capture_exception(e)

    logger.info("Found {} items to refresh".format(len(tasks)))
    taskset = group(tasks)
    taskset.apply_async()
    statsd.gauge("update_cache_queue_depth", items.count())
Пример #5
0
def update_cached_items() -> None:

    tasks = []
    items = (Insight.objects.filter(
        Q(
            Q(dashboard__is_shared=True)
            | Q(dashboard__last_accessed_at__gt=timezone.now() -
                relativedelta(days=7)))).exclude(
                    dashboard__deleted=True).exclude(refreshing=True).exclude(
                        deleted=True).distinct("filters_hash"))

    for item in Insight.objects.filter(pk__in=Subquery(
            items.filter(filters__isnull=False).exclude(
                filters={}).distinct("filters").values("pk"))).order_by(
                    F("last_refresh").asc(
                        nulls_first=True))[0:PARALLEL_DASHBOARD_ITEM_CACHE]:
        cache_key, cache_type, payload = dashboard_item_update_task_params(
            item)
        tasks.append(update_cache_item_task.s(cache_key, cache_type, payload))

    logger.info("Found {} items to refresh".format(len(tasks)))
    taskset = group(tasks)
    taskset.apply_async()