def decorator(func): @wraps(func) def wrapper(instance, *args, **kwargs): def partial_func(*args, **kwargs): # Ignores non-hashable instances return func(instance, *args, **kwargs) if not defaults.caching or wrapper.disabled: # you can manually disable cache here return func(instance, *args, **kwargs) cache = instance.__dict__ cached_func = cache.get(wrapper.attrname, _NOT_FOUND) if cached_func is _NOT_FOUND: with wrapper.lock: # check if another thread filled cache while we awaited lock cached_func = cache.get(wrapper.attrname, _NOT_FOUND) if cached_func is _NOT_FOUND: cached_func = lru_cache( maxsize=wrapper.maxsize, typed=wrapper.typed)(partial_func) cache[ wrapper. attrname] = cached_func # store function instead of output # Check if object can be hashed hashable = True for arg in args: if not checks.is_hashable(arg): hashable = False break for k, v in kwargs.items(): if not checks.is_hashable(v): hashable = False break if not hashable: # If not, do not invoke lru_cache return func(instance, *args, **kwargs) return cached_func(*args, **kwargs) wrapper.func = func wrapper.maxsize = maxsize wrapper.typed = typed wrapper.attrname = '__cached_' + func.__name__ wrapper.lock = RLock() wrapper.disabled = disabled wrapper.kwargs = kwargs def clear_cache(instance): """Clear the cache for this method belonging to `instance`.""" if hasattr(instance, wrapper.attrname): delattr(instance, wrapper.attrname) setattr(wrapper, 'clear_cache', clear_cache) return wrapper
def __init__(self, func, maxsize=128, typed=False, disabled=False, **kwargs): super().__init__(func, **kwargs) self.maxsize = maxsize self.typed = typed self.attrname = None self.lock = RLock() self.disabled = disabled
def __init__(self, func, **kwargs): super().__init__(func, **kwargs) self.lock = RLock()
def decorating_function(user_function): cache = {} hits = misses = 0 full = False cache_get = cache.get # bound method to lookup a key or return None lock = RLock() # because linkedlist updates aren't threadsafe root = [] # root of the circular doubly linked list root[:] = [root, root, None, None] # initialize by pointing to self if use_memory_up_to: def wrapper(*args, **kwds): # Size limited caching that tracks accesses by recency nonlocal root, hits, misses, full key = make_key(args, kwds, typed) with lock: link = cache_get(key) if link is not None: # Move the link to the front of the circular queue link_prev, link_next, _key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root hits += 1 return result result = user_function(*args, **kwds) with lock: if key in cache: # Getting here means that this same key was added to the # cache while the lock was released. Since the link # update is already done, we need only return the # computed result and update the count of misses. pass elif full: # Use the old root to store the new key and result. oldroot = root oldroot[KEY] = key oldroot[RESULT] = result # Empty the oldest link and make it the new root. # Keep a reference to the old key and old result to # prevent their ref counts from going to zero during the # update. That will prevent potentially arbitrary object # clean-up code (i.e. __del__) from running while we're # still adjusting the links. root = oldroot[NEXT] oldkey = root[KEY] oldresult = root[RESULT] root[KEY] = root[RESULT] = None # Now update the cache dictionary. del cache[oldkey] # Save the potentially reentrant cache[key] assignment # for last, after the root and links have been put in # a consistent state. cache[key] = oldroot else: # Put result in a new link at the front of the queue. last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link full = (psutil.virtual_memory().available < use_memory_up_to) misses += 1 return result elif maxsize == 0: def wrapper(*args, **kwds): # No caching -- just a statistics update after a successful call nonlocal misses result = user_function(*args, **kwds) misses += 1 return result elif maxsize is None: def wrapper(*args, **kwds): # Simple caching without ordering or size limit nonlocal hits, misses key = make_key(args, kwds, typed) result = cache_get(key, sentinel) if result is not sentinel: hits += 1 return result result = user_function(*args, **kwds) cache[key] = result misses += 1 return result else: def wrapper(*args, **kwds): # Size limited caching that tracks accesses by recency nonlocal root, hits, misses, full key = make_key(args, kwds, typed) with lock: link = cache_get(key) if link is not None: # Move the link to the front of the circular queue link_prev, link_next, _key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root hits += 1 return result result = user_function(*args, **kwds) with lock: if key in cache: # Getting here means that this same key was added to the # cache while the lock was released. Since the link # update is already done, we need only return the # computed result and update the count of misses. pass elif full: # Use the old root to store the new key and result. oldroot = root oldroot[KEY] = key oldroot[RESULT] = result # Empty the oldest link and make it the new root. # Keep a reference to the old key and old result to # prevent their ref counts from going to zero during the # update. That will prevent potentially arbitrary object # clean-up code (i.e. __del__) from running while we're # still adjusting the links. root = oldroot[NEXT] oldkey = root[KEY] oldresult = root[RESULT] root[KEY] = root[RESULT] = None # Now update the cache dictionary. del cache[oldkey] # Save the potentially reentrant cache[key] assignment # for last, after the root and links have been put in # a consistent state. cache[key] = oldroot else: # Put result in a new link at the front of the queue. last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link full = (len(cache) >= maxsize) misses += 1 return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(hits, misses, maxsize, len(cache)) def cache_clear(): """Clear the cache and cache statistics""" nonlocal hits, misses, full with lock: cache.clear() root[:] = [root, root, None, None] hits = misses = 0 full = False wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return update_wrapper(wrapper, user_function)
def __init__(self, func, disabled=False, **kwargs): super().__init__(func, **kwargs) self.attrname = '__cached_' + func.__name__ self.lock = RLock() self.disabled = disabled
def __init__(self, func, disabled=False, **kwargs): super().__init__(func, **kwargs) self.attrname = None self.lock = RLock() self.disabled = disabled
def __init__(self, func, **kwargs): super().__init__(func, **kwargs) self.attrname = None self.lock = RLock()