def __init__(self, obj, lock = None): """The initializer.""" if lock is None: lock = Lock() super(TObjectProxy, self).__init__(obj, lock) #I'm paranoid => attributes are private. super(TObjectProxy, self).__setattr__('_TObjectProxy__obj', obj) super(TObjectProxy, self).__setattr__('_TObjectProxy__lock', lock)
def __init__(self, handler, lock = None): """The initializer.""" if lock is None: lock = Lock() if callable(handler): super(TCallableProxy, self).__init__(handler, lock) else: raise TypeError("Object not callable.", handler)
def decorating_function(user_function, **kwargs): tuple = kwargs.get('tuple', builtins.tuple) sorted = kwargs.get('sorted', builtins.sorted) map = kwargs.get('map', builtins.map) len = kwargs.get('len', builtins.len) type = kwargs.get('type', builtins.type) KeyError = kwargs.get('KeyError', builtins.KeyError) hits = [0] misses = [0] kwd_mark = (object(), ) # separates positional and keyword args lock = Lock() # needed because OrderedDict isn't threadsafe if maxsize is None: cache = dict() # simple cache without ordering or size limit @wraps(user_function) def wrapper(*args, **kwds): #nonlocal hits, misses key = args if kwds: sorted_items = tuple(sorted(kwds.items())) key += kwd_mark + sorted_items if typed: key += tuple(map(type, args)) if kwds: key += tuple(type(v) for k, v in sorted_items) try: result = cache[key] hits[0] += 1 return result except KeyError: pass result = user_function(*args, **kwds) cache[key] = result misses[0] += 1 return result else: cache = OrderedDict() # ordered least recent to most recent cache_popitem = cache.popitem # use the move_to_end method if available, otherwise fallback to # the function. cache_renew = getattr(cache, 'move_to_end', functools.partial(_move_to_end, cache)) @wraps(user_function) def wrapper(*args, **kwds): #nonlocal hits, misses key = args if kwds: sorted_items = tuple(sorted(kwds.items())) key += kwd_mark + sorted_items if typed: key += tuple(map(type, args)) if kwds: key += tuple(type(v) for k, v in sorted_items) with lock: try: result = cache[key] cache_renew(key) # record recent use of this key hits[0] += 1 return result except KeyError: pass result = user_function(*args, **kwds) with lock: cache[key] = result # record recent use of this key misses[0] += 1 if len(cache) > maxsize: cache_popitem( 0) # purge least recently used cache entry return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(hits[0], misses[0], maxsize, len(cache)) def cache_clear(): """Clear the cache and cache statistics""" #nonlocal hits, misses with lock: cache.clear() hits[0] = misses[0] = 0 wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return wrapper
def decorating_function(user_function, len=len, iter=iter, tuple=tuple, sorted=sorted, KeyError=KeyError): cache = {} # mapping of args to results queue = deque() # order that keys have been used refcount = Counter() # times each key is in the queue sentinel = object() # marker for looping around the queue kwd_mark = object() # separates positional and keyword args lock = Lock() # lookup optimizations (ugly but fast) queue_append, queue_popleft = queue.append, queue.popleft queue_appendleft, queue_pop = queue.appendleft, queue.pop @wraps(user_function) def wrapper(*args, **kwds): # cache key records both positional and keyword args key = args if kwds: key += (kwd_mark,) + tuple(sorted(kwds.items())) # record recent use of this key queue_append(key) refcount[key] += 1 # get cache entry or compute if not found try: with lock: result = cache[key] wrapper.hits += 1 except KeyError: result = user_function(*args, **kwds) with lock: cache[key] = result wrapper.misses += 1 # purge least recently used cache entry if len(cache) > maxsize: key = queue_popleft() refcount[key] -= 1 while refcount[key]: key = queue_popleft() refcount[key] -= 1 del cache[key], refcount[key] # periodically compact the queue by eliminating duplicate keys # while preserving order of most recent access if len(queue) > maxqueue: with lock: refcount.clear() queue_appendleft(sentinel) for key in ifilterfalse(refcount.__contains__, iter(queue_pop, sentinel)): queue_appendleft(key) refcount[key] = 1 return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(wrapper.hits, wrapper.misses, maxsize, len(cache)) def cache_clear(): with lock: cache.clear() queue.clear() refcount.clear() wrapper.hits = wrapper.misses = 0 wrapper.hits = wrapper.misses = 0 wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return wrapper