示例#1
0
def cached_as(*samples, **kwargs):
    """
    Caches results of a function and invalidates them same way as given queryset(s).
    NOTE: Ignores queryset cached ops settings, always caches.
    """
    timeout = kwargs.pop('timeout', None)
    extra = kwargs.pop('extra', None)
    key_func = kwargs.pop('key_func', func_cache_key)
    lock = kwargs.pop('lock', None)
    if kwargs:
        raise TypeError('Unexpected keyword arguments %s' % ', '.join(kwargs))

    # If we unexpectedly get list instead of queryset return identity decorator.
    # Paginator could do this when page.object_list is empty.
    if len(samples) == 1 and isinstance(samples[0], list):
        return lambda func: func

    def _get_queryset(sample):
        if isinstance(sample, Model):
            queryset = sample.__class__.objects.filter(pk=sample.pk)
        elif isinstance(sample, type) and issubclass(sample, Model):
            queryset = sample.objects.all()
        else:
            queryset = sample

        queryset._require_cacheprofile()

        return queryset

    querysets = map(_get_queryset, samples)
    cond_dnfs = mapcat(dnfs, querysets)
    key_extra = [qs._cache_key() for qs in querysets]
    key_extra.append(extra)
    if not timeout:  # TODO: switch to is None on major release
        timeout = min(qs._cacheprofile['timeout'] for qs in querysets)
    if lock is None:
        lock = any(qs._cacheprofile['lock'] for qs in querysets)

    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            if transaction_state.is_dirty() or not settings.CACHEOPS_ENABLED:
                return func(*args, **kwargs)

            cache_key = 'as:' + key_func(func, args, kwargs, key_extra)

            with redis_client.getting(cache_key, lock=lock) as cache_data:
                cache_read.send(sender=None,
                                func=func,
                                hit=cache_data is not None)
                if cache_data is not None:
                    return pickle.loads(cache_data)
                else:
                    result = func(*args, **kwargs)
                    cache_thing(cache_key, result, cond_dnfs, timeout)
                    return result

        return wrapper

    return decorator
示例#2
0
def cached_as(*samples, **kwargs):
    """
    Caches results of a function and invalidates them same way as given queryset(s).
    NOTE: Ignores queryset cached ops settings, always caches.
    """
    timeout = kwargs.pop('timeout', None)
    extra = kwargs.pop('extra', None)
    key_func = kwargs.pop('key_func', func_cache_key)
    lock = kwargs.pop('lock', None)
    if not samples:
        raise TypeError('Pass a queryset, a model or an object to cache like')
    if kwargs:
        raise TypeError('Unexpected keyword arguments %s' % ', '.join(kwargs))

    # If we unexpectedly get list instead of queryset return identity decorator.
    # Paginator could do this when page.object_list is empty.
    if len(samples) == 1 and isinstance(samples[0], list):
        return lambda func: func

    def _get_queryset(sample):
        if isinstance(sample, Model):
            queryset = sample.__class__.objects.filter(pk=sample.pk)
        elif isinstance(sample, type) and issubclass(sample, Model):
            queryset = sample.objects.all()
        else:
            queryset = sample

        queryset._require_cacheprofile()

        return queryset

    querysets = map(_get_queryset, samples)
    cond_dnfs = mapcat(dnfs, querysets)
    key_extra = [qs._cache_key() for qs in querysets]
    key_extra.append(extra)
    if not timeout:  # TODO: switch to is None on major release
        timeout = min(qs._cacheprofile['timeout'] for qs in querysets)
    if lock is None:
        lock = any(qs._cacheprofile['lock'] for qs in querysets)

    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            if transaction_state.is_dirty() or not settings.CACHEOPS_ENABLED:
                return func(*args, **kwargs)

            cache_key = 'as:' + key_func(func, args, kwargs, key_extra)

            with redis_client.getting(cache_key, lock=lock) as cache_data:
                cache_read.send(sender=None, func=func, hit=cache_data is not None)
                if cache_data is not None:
                    return pickle.loads(cache_data)
                else:
                    result = func(*args, **kwargs)
                    cache_thing(cache_key, result, cond_dnfs, timeout)
                    return result

        return wrapper
    return decorator
示例#3
0
def _cached_as(*samples, **kwargs):
    """
    Caches results of a function and invalidates them same way as given queryset.
    NOTE: Ignores queryset cached ops settings, just caches.
    """
    timeout = kwargs.get('timeout')
    extra = kwargs.get('extra')
    _get_key = kwargs.get('_get_key')

    # If we unexpectedly get list instead of queryset return identity decorator.
    # Paginator could do this when page.object_list is empty.
    # TODO: think of better way doing this.
    if len(samples) == 1 and isinstance(samples[0], list):
        return lambda func: func

    def _get_queryset(sample):
        if isinstance(sample, Model):
            queryset = sample.__class__.objects.inplace().filter(pk=sample.pk)
        elif isinstance(sample, type) and issubclass(sample, Model):
            queryset = sample.objects.all()
        else:
            queryset = sample

        queryset._require_cacheprofile()

        return queryset

    querysets = map(_get_queryset, samples)
    cond_dnfs = mapcat(dnfs, querysets)
    key_extra = [qs._cache_key() for qs in querysets]
    key_extra.append(extra)
    if not timeout:
        timeout = min(qs._cacheconf['timeout'] for qs in querysets)

    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            cache_key = 'as:' + _get_key(func, args, kwargs, key_extra)

            cache_data = redis_client.get(cache_key)
            if cache_data is not None:
                return pickle.loads(cache_data)

            if USE_LOCK:
                with redis_lock.Lock(redis_client, cache_key):
                    cache_data = redis_client.get(cache_key)
                    if cache_data is not None:
                        return pickle.loads(cache_data)
                    result = func(*args)
                    cache_thing(cache_key, result, cond_dnfs, timeout)
            else:
                result = func(*args)
                cache_thing(cache_key, result, cond_dnfs, timeout)

            return result

        return wrapper

    return decorator
示例#4
0
def cached_as(*samples, **kwargs):
    """
    Caches results of a function and invalidates them same way as given queryset.
    NOTE: Ignores queryset cached ops settings, just caches.
    """
    timeout = kwargs.get('timeout')
    extra = kwargs.get('extra')
    key_func = kwargs.get('key_func', func_cache_key)

    # If we unexpectedly get list instead of queryset return identity decorator.
    # Paginator could do this when page.object_list is empty.
    if len(samples) == 1 and isinstance(samples[0], list):
        return lambda func: func

    def _get_queryset(sample):
        if isinstance(sample, Model):
            queryset = sample.__class__.objects.filter(pk=sample.pk)
        elif isinstance(sample, type) and issubclass(sample, Model):
            queryset = sample.objects.all()
        else:
            queryset = sample

        queryset._require_cacheprofile()

        return queryset

    querysets = map(_get_queryset, samples)
    cond_dnfs = mapcat(dnfs, querysets)
    key_extra = [qs._cache_key() for qs in querysets]
    key_extra.append(extra)
    if not timeout:
        timeout = min(qs._cacheconf['timeout'] for qs in querysets)

    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            if in_transaction():
                return func(*args, **kwargs)

            default_key = 'as:' + key_func(func, args, kwargs, key_extra)
            cache_key = get_user_defined_key(default_key=default_key) or default_key

            cache_data = redis_client.get(cache_key)
            cache_read.send(sender=None, func=func, hit=cache_data is not None)
            if cache_data is not None:
                return pickle.loads(cache_data)

            result = func(*args, **kwargs)
            cache_thing(cache_key, result, cond_dnfs, timeout)
            return result

        return wrapper

    return decorator
示例#5
0
 def class_tree(cls):
     return [cls] + mapcat(class_tree, cls.__subclasses__())
示例#6
0
 def class_tree(cls):
     return [cls] + mapcat(class_tree, cls.__subclasses__())