Beispiel #1
0
 async def run_and_cache(func, args, kwargs):
     result = await func(*args, **kwargs)
     key = functools._make_key(args, kwargs, False)
     cache[key] = result
     if len(cache) > maxsize:
         cache.popitem(False)
     cache.move_to_end(key)
     return result
        def wrapper(*args, **kwds):
            args_key = str(hash(functools._make_key(args, kwds, typed=False)))
            func_key = '.'.join([user_function.__module__, user_function.__name__])
            unique_key = func_key + args_key
            try:
                return _check_cache(_handle_dict[filename], unique_key,
                                    user_function, args, kwds)
            except KeyError:

                with shelve.open(filename, writeback=True) as cache:
                    _handle_dict[filename] = cache
                    return _check_cache(cache, unique_key, user_function, args, kwds)
Beispiel #3
0
 async def wrapper(*args, **kwargs):
     key = functools._make_key(args, kwargs, False)
     if key in cache:
         return cache[key]
     if key in awaiting:
         task = awaiting[key]
         return await asyncio.wait_for(task, timeout=None)
     task = asyncio.ensure_future(run_and_cache(func, args, kwargs))
     awaiting[key] = task
     result = await asyncio.wait_for(task, timeout=None)
     del awaiting[key]
     return result
Beispiel #4
0
        def wrapper(*args, **kwargs):

            nonlocal p, hits, misses
            key = _make_key(args, kwargs, typed)

            # ARC hit: Case I
            if key in t1:
                hits += 1
                result = t1[key]
                del t1[key]
                t2[key] = result  # MRU in t2
                return result
            elif key in t2:
                hits += 1
                t2.move_to_end(key)  # MRU in t2
                return t2[key]

            # ARC miss
            misses += 1
            result = func(*args, **kwargs)
            if key in b1:  # Case II: hit in l1
                p = adapt_plus(p)
                # by invariant, the cache must be full, so evict from t1 or t2
                evict_t1_t2()
                t2[key] = result
            elif key in b2:  # Case III: hit in l2
                # by invariant, the cache must be full, so evict from t1 or t2
                p = adapt_minus(p)
                evict_t1_t2()
                t2[key] = result
            else:  # Case IV: cache miss in DBL(2c)
                len_l1 = len(t1) + len(b1)
                if len_l1 == max_size:
                    evict_l1()
                elif len_l1 < max_size:
                    evict_l2()

                # if cache is not full, add it to t1 even if we exceed p
                t1[key] = result  # MRU in t1

            return result
Beispiel #5
0
        def wrapper(*args, **kwargs) -> Any:
            """
            The wrapper

            IN:
                args - function position arguments
                kwargs - function keyword arguments

            OUT:
                value returned by the function
            """
            cache = funcs_cache_map[func]
            key = _make_key(args, kwargs, typed)

            if cache.has_cache(key):
                return cache.get(key)

            value = func(*args, **kwargs)
            cache.add(key, value)

            return value
Beispiel #6
0
        async def wrapped(*fn_args, **fn_kwargs):
            if wrapped.closed:
                raise RuntimeError(
                    "alru_cache is closed for {}".format(wrapped))

            loop = asyncio.get_event_loop()

            key = _make_key(fn_args, fn_kwargs, typed)

            fut = wrapped._cache.get(key)

            if fut is not None:
                if not fut.done():
                    _cache_hit(wrapped, key)
                    return await asyncio.shield(fut)

                exc = fut._exception

                if exc is None or cache_exceptions:
                    _cache_hit(wrapped, key)
                    return fut.result()

                # exception here and cache_exceptions == False
                wrapped._cache.pop(key)

            fut = loop.create_future()
            task = loop.create_task(fn(*fn_args, **fn_kwargs))
            task.add_done_callback(partial(_done_callback, fut))

            wrapped.tasks.add(task)
            task.add_done_callback(wrapped.tasks.remove)

            wrapped._cache[key] = fut

            if maxsize is not None and len(wrapped._cache) > maxsize:
                wrapped._cache.popitem(last=False)

            _cache_miss(wrapped, key)
            return await asyncio.shield(fut)
Beispiel #7
0
        def wrapper(*args, **kwargs):
            # I have to ignore self i.e. commands.Cog instance
            _args = tuple([arg for arg in args if 'cogs' not in repr(arg)])
            _key = _make_key(_args, kwargs, typed_cache)
            value = _internal_cache.get(_key)

            try:
                value = _internal_cache[_key]
                _stats.hits += 1
                if inspect.iscoroutinefunction(func):
                    return _wrap_new_coroutine(value)

                return value

            except KeyError:
                _stats.missed += 1
                value = func(*args, **kwargs)
                if inspect.isawaitable(value):
                    return _wrap_and_store_key(_internal_cache, _key, value)

                _internal_cache[_key] = value
                return value
Beispiel #8
0
async def test_async_ttl_cache_dont_overwrite_new_cache_entry():
    """Make sure that we don't overwrite a new cache entry that was placed
    while we were waiting to handle the result of a previously cached future
    """
    range_continue_event = asyncio.Event()
    update_cache_event = asyncio.Event()
    return_values = iter(range(10))

    # Wait until awaiter has had a chance to get the in-flight future out of
    # the cache, then signal to the cache_updater to replace the cached future
    # before returning.  Because cache_updater is signalled first, it will
    # replace the previously cached future before async_ttl_cache decides
    # whether save the result of that future in the cache
    async def range_coroutine():
        await range_continue_event.wait()
        update_cache_event.set()
        return next(return_values)

    range_coroutine_future = asyncio.ensure_future(range_coroutine())
    cache_key = functools._make_key((), {}, typed=False)
    cache = {cache_key: (range_coroutine_future, float("Inf"))}

    cached_range_coroutine = async_ttl_cache(cache=cache,
                                             ttl=0)(range_coroutine)

    new_range_coroutine_future = asyncio.ensure_future(range_coroutine())

    async def awaiter():
        range_continue_event.set()
        await cached_range_coroutine()

    async def cache_updater():
        await update_cache_event.wait()
        cache[cache_key] = (new_range_coroutine_future, float("Inf"))

    await asyncio.gather(awaiter(), cache_updater())
    assert cache[cache_key] == (new_range_coroutine_future, float("Inf"))
Beispiel #9
0
 def inner(*args, **kwargs):
     key = _make_key(args, kwargs, typed=typed)
     with func._main_lock:
         key_lock = func._keyed_locks[key]
     with key_lock:
         return caching_func(*args, **kwargs)
Beispiel #10
0
def peek_cache(cached_func, *fn_args, **fn_kwargs):
    from functools import _make_key
    if getattr(cached_func, "__self__", None):
        fn_args = (cached_func.__self__, ) + fn_args
    key = _make_key(fn_args, fn_kwargs, False)
    return cached_func._cache.get(key)
Beispiel #11
0
        async def wrapped(*fn_args, **fn_kwargs):
            # If notOlderThan is not provided or None, use default timestamp delta:
            # utcnow() - DEFAULT_NOT_OLDER_THAN_SECONDS_DELTA
            notOlderThan = fn_kwargs.get('notOlderThan', None)
            notOlderThan = int(notOlderThan) if notOlderThan else None
            if not notOlderThan:
                notOlderThan = int(datetime.now(timezone.utc).timestamp()
                                   ) - DEFAULT_NOT_OLDER_THAN_SECONDS_DELTA

            key = _make_key(fn_args, fn_kwargs, typed)

            # We store (mutable) lists in the cache, so that even if an item
            # is evicted before its task finished, other waiting tasks can get
            # the result. We need to keep a reference to the list here, rather
            # than unpacking.
            event_result_ex_time = wrapped._cache.get(key)

            # Check if value was cached after notOlderThan. If it was, it's a hit,
            # otherwise invalidate and call the function to get a new value
            if event_result_ex_time != None and notOlderThan != None and event_result_ex_time[
                    3] < notOlderThan:
                _cache_invalidate(wrapped, typed, *fn_args, **fn_kwargs)
                event_result_ex_time = None

            if event_result_ex_time is None:
                # List items:
                # Future to await, already awaited result, exception, UTC timestamp when cached
                event_result_ex_time = [
                    asyncio.Event(), None, None,
                    int(datetime.now(timezone.utc).timestamp())
                ]
                # logically there is a possible race between get above and
                # insert here. Make sure there is no `await` in between.
                wrapped._cache[key] = event_result_ex_time
                _cache_miss(wrapped, key)

                if maxsize is not None and len(wrapped._cache) > maxsize:
                    wrapped._cache.popitem(last=False)

                try:
                    res = await fn(*fn_args, **fn_kwargs)
                    event_result_ex_time[1] = res
                    return res
                except Exception as e:
                    event_result_ex_time[2] = e
                    # Even with cache_exceptions=False, we don't retry for
                    # requests that happened *in parallel* to the first one.
                    # Only once the initial task has failed, the next request
                    # will not hit the cache.
                    if cache_exceptions == False:
                        _cache_invalidate(wrapped, typed, *fn_args,
                                          **fn_kwargs)
                    # make sure to pass on the exception to get a proper traceback.
                    raise
                finally:
                    # now at least result or exc is set, and we can release others.
                    event_result_ex_time[0].set()
            else:
                _cache_hit(wrapped, key)
                # this will return immediately if the task is done, no need to
                # check manually.
                await event_result_ex_time[0].wait()
                if event_result_ex_time[2]:
                    raise event_result_ex_time[2]
                else:
                    return event_result_ex_time[1]
Beispiel #12
0
 def _invalidate(*args, **kwargs):
     try:
         del _internal_cache[_make_key(args, kwargs, typed_cache)]
         return True
     except KeyError:
         return False
Beispiel #13
0
 def _thread_lru(*args, **kwargs):
     key = _make_key(args, kwargs, typed=False)
     with lock_dict[key]:
         return func(*args, **kwargs)
Beispiel #14
0
 def wrapped(*args, **kwargs):
     key = _make_key(args, kwargs, typed)
     return instance[key]
Beispiel #15
0
def _make_io_key(args, *_args, **kwargs):
    self, obj, field, ctx = args
    # Ignore self because we have a self-specific cache
    return _make_key((obj.id, field), *_args, **kwargs)
Beispiel #16
0
# partial方法:偏函数:把函数部分的参数固定下来,相当于为部分参数添加了一个固定的默认值,形成一个新的函数并返回
# 从partial生成的新函数,是对原函数的封装
import functools

def add(x, y) ->int:
    return x + y

newadd = functools.partial(add, y=5)
print(newadd(7))
print(newadd(7, y=6))
print(newadd(y=7, x=10))

import inspect

print(inspect.signature(newadd))

def add(x, y, *args) -> int:
    print(args)
    return x + y

newadd = functools.partial(add, 1,3,5,6,7)

print(newadd())
print(newadd(9))
# print(newadd(9, 10, x=26, y=20))

print(inspect.signature(newadd))

functools._make_key((4, 6), {'z', 3}, False)
Beispiel #17
0
        def wrapped(*fn_args, **fn_kwargs):
            if wrapped.closed:
                raise RuntimeError(
                    'alru_cache is closed for {}'.format(wrapped))

            _loop = _get_loop(cls,
                              kwargs,
                              wrapped._origin,
                              fn_args,
                              fn_kwargs,
                              loop=loop)

            key = _make_key(fn_args, fn_kwargs, typed)

            if wrapped._cache.get(key) is not None:
                if expiration_time is None:
                    fut = wrapped._cache.get(key)

                    if not fut.done():
                        _cache_hit(wrapped, key)
                        return (yield from asyncio.shield(fut, loop=_loop))

                    exc = fut._exception

                    if exc is None or cache_exceptions:
                        _cache_hit(wrapped, key)
                        return fut.result()

                    # exception here and cache_exceptions == False
                    wrapped._cache.pop(key)

                else:
                    fut = wrapped._cache.get(key).get("fut")
                    fut_time = wrapped._cache.get(key).get("time")

                    if not fut.done():
                        _cache_hit(wrapped, key)
                        return (yield from asyncio.shield(fut, loop=_loop))

                    exc = fut._exception

                    if (exc is None or cache_exceptions
                        ) and time() - fut_time < expiration_time:
                        _cache_hit(wrapped, key)
                        return fut.result()

                    # exception here and cache_exceptions == False
                    # or result expires
                    wrapped._cache.pop(key)

            fut = create_future(loop=_loop)
            coro = fn(*fn_args, **fn_kwargs)
            task = ensure_future(coro, loop=_loop)
            task.add_done_callback(partial(_done_callback, fut))

            wrapped.tasks.add(task)
            task.add_done_callback(wrapped.tasks.remove)

            if expiration_time is None:
                wrapped._cache[key] = task
            else:
                wrapped._cache[key] = {"fut": task, "time": time()}

            if maxsize is not None and len(wrapped._cache) > maxsize:
                wrapped._cache.popitem(last=False)

            _cache_miss(wrapped, key)
            return (yield from asyncio.shield(fut, loop=_loop))
Beispiel #18
0
 def update_event(self, inp=-1):
     self.set_output_val(0, functools._make_key(self.input(0), self.input(1), self.input(2), self.input(3), self.input(4), self.input(5), self.input(6), self.input(7)))
Beispiel #19
0
    def __call__(self, *args, **kwds):
        if self.kwds:
            prev_kwds = self.kwds.copy()
            prev_kwds.update(kwds)
            kwds = prev_kwds

        if self.args:
            args = self.args + args

        try:
            key = hash(functools._make_key(args, kwds, False)) ^ hash(self.func)
        except TypeError:
            key = None

        if key in self._cache:
            return self._cache[key](*args, **kwds)

        alternatives = [self.func] + getattr(self.func, 'alternatives', [])
        self.errors = [None] * len(alternatives)

        for i, func in enumerate(alternatives):
            try:
                kwd_intfs, kwd_params = extract_arg_kwds(kwds, func)
                args_comb = combine_arg_kwds(args, kwd_intfs, func)

                if all_args_specified(args_comb, func):
                    ret = func(*args_comb, **kwd_params)

                    if key is not None:
                        self._cache[key] = func

                    return ret
                else:
                    # TODO: Can happen if user forgets '*' for separation, warn about this
                    # TODO: Think about disabling keyword arguments that are not keyword-only for gears
                    msg = f"not enough arguments specified for '{self.func.__name__}'"
                    if hasattr(func, 'alternative_to'):
                        arg_signature = ', '.join(
                            f'{name}: {repr(dtype)}'
                            for name, dtype in func.__annotations__.items())

                        alt_arg_names, *_ = inspect.getfullargspec(func.alternative_to)

                        msg = (
                            f"not enough arguments specified for '{self.func.__name__}' for an"
                            f" alternative with argument '{alt_arg_names[0]}' unpacked to: ({arg_signature})"
                        )

                    self.errors[i] = (func, TypeError, TypeError(msg), [])
            except Exception as e:
                # If no alternatives, just re-raise an error
                if len(alternatives) == 1:
                    raise e
                else:
                    # errors.append((func, e, sys.exc_info()))
                    self.errors[i] = (func, *sys.exc_info())
        else:
            if self._all_alternative_error():
                raise MultiAlternativeError(self.errors)
            else:
                # If some alternative can handle more arguments, try to wait
                # for it
                p = Partial(self.func, *args, **kwds)
                p.errors = self.errors[:]
                return p
Beispiel #20
0
def make_key(function, *args, **kwargs):
    name_and_args = (function.__qualname__, ) + tuple(a for a in args)
    return functools._make_key(name_and_args, kwargs, False)
Beispiel #21
0
def hash_key(function, *args, **kwargs):
    function_hash = hashlib.md5(bytes(function.__qualname__, 'utf-8'))
    params = str(functools._make_key(args, kwargs, False))
    params_hash = hashlib.md5(bytes(params, 'utf-8'))
    return '{}_{}'.format(function_hash.hexdigest(), params_hash.hexdigest())
Beispiel #22
0
 def make_key(args, kwargs):
     return _make_key(args, kwargs, typed=self.typed)
Beispiel #23
0
 def __init__(self, caching_object: object, args: Any,
              kwargs: Any) -> None:
     self.caching_object = caching_object
     self.cache = getattr(caching_object, self._cache_name(), {})
     # noinspection PyUnresolvedReferences
     self.cache_key = functools._make_key(args, kwargs, typed=False)
Beispiel #24
0
import time
from functools import _make_key
from cachetools.keys import typedkey

if __name__ == '__main__':
    print(_make_key((), {}, False))
    print(typedkey((), {}))