def methdispatch(func): dispatcher = singledispatch(func) def wrapper(*args, **kw): return dispatcher.dispatch(args[1].__class__)(*args, **kw) wrapper.register = dispatcher.register update_wrapper(wrapper, func) return wrapper
def version_select(*args, **kwargs): """Look for the method which matches the name supplied and version constraints and calls it with the supplied arguments. @return: Returns the result of the method called @raises: VersionNotFoundForAPIMethod if there is no method which matches the name and version constraints """ # The first arg to all versioned methods is always the request # object. The version for the request is attached to the # request object if len(args) == 0: ver = kwargs['req'].api_version_request else: ver = args[0].api_version_request func_list = self.versioned_methods[key] for func in func_list: if ver.matches(func.start_version, func.end_version): # Update the version_select wrapper function so # other decorator attributes like wsgi.response # are still respected. functools.update_wrapper(version_select, func.func) return func.func(self, *args, **kwargs) # No version match raise exception.VersionNotFoundForAPIMethod(version=ver)
def wrapper(*args, **kwargs): """ When called returns instance of AttributiveGenerator instead of generator. """ def __iter__(self): return self def send(self): raise NotImplementedError def throw(self): raise NotImplementedError tdict = { '__iter__': __iter__, 'send': send, 'throw': throw,} # use type to create dynamic instance of class AttributiveGenerator #spec = {'__iter__': lambda self: self, 'send': ,} AG = type("AttributiveGenerator", (Generator,), tdict) ag = AG() # create instance so we can inject it into genfunc fargs = inspect.getfullargspec(genfunc).args if fargs and fargs[0] == 'self': gen = genfunc(args[0], ag, *args[1:], **kwargs) else: gen = genfunc(ag, *args, **kwargs) # create generator insert ag ref # now add to class references to gen attributes "duckify" for attr in ('__next__', 'close', 'send', 'throw', 'gi_code', 'gi_frame', 'gi_running', 'gi_yieldfrom'): setattr(AG, attr, getattr(gen, attr)) functools.update_wrapper(wrapper=ag, wrapped=gen) return ag
def basictag_func(tag_func): def _setup_tag(parser, token): bits = token.split_contents() tag_name = bits[0] del(bits[0]) params, xx, xxx, defaults = getargspec(tag_func) max_args = len(params) if takes_context: if params[0] == 'context': max_args -= 1 # Ignore context else: raise TemplateSyntaxError( "Any tag function decorated with takes_context=True " "must have a first argument of 'context'") min_args = max_args - len(defaults or []) if not min_args <= len(bits) <= max_args: if min_args == max_args: raise TemplateSyntaxError( "%r tag takes %d arguments." % (tag_name, min_args)) else: raise TemplateSyntaxError( "%r tag takes %d to %d arguments, got %d." % (tag_name, min_args, max_args, len(bits))) return BasicTagNode(takes_context, tag_name, tag_func, bits) update_wrapper(_setup_tag, tag_func) return _setup_tag
def method_decorator(decorator): """Converts a function decorator into a method decorator. This works properly for both: decorators with arguments and without them. The Django's version of this function just supports decorators with no arguments.""" # For simple decorators, like @login_required, without arguments def _dec(func): def _wrapper(self, *args, **kwargs): def bound_func(*args2, **kwargs2): return func(self, *args2, **kwargs2) return decorator(bound_func)(*args, **kwargs) return wraps(func)(_wrapper) # Called everytime def _args(*argsx, **kwargsx): # Detect a simple decorator and call _dec for it if len(argsx) == 1 and callable(argsx[0]) and not kwargsx: return _dec(argsx[0]) # Used for decorators with arguments, like @permission_required('something') def _dec2(func): def _wrapper(self, *args, **kwargs): def bound_func(*args2, **kwargs2): return func(self, *args2, **kwargs2) return decorator(*argsx, **kwargsx)(bound_func)(*args, **kwargs) return wraps(func)(_wrapper) return _dec2 update_wrapper(_args, decorator) # Change the name to aid debugging. _args.__name__ = 'method_decorator(%s)' % decorator.__name__ return _args
def __init__(self, py_func, locals={}, targetoptions={}, impl_kind='direct'): """ Parameters ---------- py_func: function object to be compiled locals: dict, optional Mapping of local variable names to Numba types. Used to override the types deduced by the type inference engine. targetoptions: dict, optional Target-specific config options. """ self.typingctx = self.targetdescr.typing_context self.targetctx = self.targetdescr.target_context pysig = utils.pysignature(py_func) arg_count = len(pysig.parameters) can_fallback = not targetoptions.get('nopython', False) _DispatcherBase.__init__(self, arg_count, py_func, pysig, can_fallback) functools.update_wrapper(self, py_func) self.targetoptions = targetoptions self.locals = locals self._cache = NullCache() compiler_class = self._impl_kinds[impl_kind] self._impl_kind = impl_kind self._compiler = compiler_class(py_func, self.targetdescr, targetoptions, locals) self._cache_hits = collections.Counter() self._cache_misses = collections.Counter() self._type = types.Dispatcher(self) self.typingctx.insert_global(self, self._type)
def __init__(self, callable_obj, manager): """ :type callable_obj: callable :type manager: TaskManager """ self.callable = callable_obj functools.update_wrapper(self, self.callable) self.callable_is_object = not isinstance(self.callable, types.FunctionType) self.name = self.callable.__name__.replace('_', '-').lower() self.arguments = {} self.manual_arguments = OrderedDict() self.pass_argparse_namespace = False self.cleanup_function = lambda x: None self.manager = manager self.parser = None """:type: argparse.ArgumentParser""" self.argument_groups = None self.aliases = [] self.help_text = None self.args = () self.kwargs = {} self.varargs = None """:type: str"""
def __init__(self, fn, delegate_names, attribute_name): self.fn = fn self.delegate_names = delegate_names self.attribute_name = attribute_name # update the docstring of the descriptor update_wrapper(self, fn)
def __init__(self, py_func, locals={}, targetoptions={}): """ Parameters ---------- py_func: function object to be compiled locals: dict, optional Mapping of local variable names to Numba types. Used to override the types deduced by the type inference engine. targetoptions: dict, optional Target-specific config options. """ self.tm = default_type_manager argspec = inspect.getargspec(py_func) argct = len(argspec.args) super(Overloaded, self).__init__(self.tm.get_pointer(), argct) self.py_func = py_func functools.update_wrapper(self, py_func) # other parts of Numba assume the old Python 2 name for code object self.func_code = get_code_object(py_func) # but newer python uses a different name self.__code__ = self.func_code self.overloads = {} self.targetoptions = targetoptions self.locals = locals self._compiling = False self.targetdescr.typing_context.insert_overloaded(self)
def test_builtin_update(self): # Test for bug #1576241 def wrapper(): pass functools.update_wrapper(wrapper, max) self.assertEqual(wrapper.__name__, 'max') self.assertTrue(wrapper.__doc__.startswith('max('))
def as_view(cls, **initkwargs): """ Main entry point for a request-response process. """ # sanitize keyword arguments for key in initkwargs: if key in cls.http_method_names: raise TypeError(u"You tried to pass in the %s method name as a " u"keyword argument to %s(). Don't do that." % (key, cls.__name__)) if not hasattr(cls, key): raise TypeError(u"%s() received an invalid keyword %r" % ( cls.__name__, key)) def view(request, *args, **kwargs): self = cls(**initkwargs) if hasattr(self, 'get') and not hasattr(self, 'head'): self.head = self.get return self.dispatch(request, *args, **kwargs) # take name and docstring from class update_wrapper(view, cls, updated=()) # and possible attributes set by decorators # like csrf_exempt from dispatch update_wrapper(view, cls.dispatch, assigned=()) return view
def singledispatch(func): registry = Map() def dispatch(typ): """generic_func.dispatch(type) -> <function implementation> Runs the dispatch algorithm to return the best available implementation for the given `type` registered on `generic_func`. """ return registry[ClassMapKey(typ)] def register(typ, func=None): """generic_func.register(type, func) -> func Registers a new implementation for the given `type` on a `generic_func`. """ if func is None: return lambda f: register(typ, f) registry[ClassMapKey(typ)] = func return func def wrapper(*args, **kw): return dispatch(args[0].__class__)(*args, **kw) registry[ClassMapKey(object)] = func wrapper.register = register wrapper.dispatch = dispatch update_wrapper(wrapper, func) return wrapper
def wrap_unwrap_wrapper(fun): def wrapped(self, value, *args, **kwds): if self._allow_none and value == None: return None return fun(self, value, *args, **kwds) functools.update_wrapper(wrapped, fun, ('__name__', '__doc__')) return wrapped
def __init__ (self, generator): self.candidates = [] self.generator = generator try: functools.update_wrapper(self, generator) except: pass
def __init__(self, py_func, locals={}, targetoptions={}): """ Parameters ---------- py_func: function object to be compiled locals: dict, optional Mapping of local variable names to Numba types. Used to override the types deduced by the type inference engine. targetoptions: dict, optional Target-specific config options. """ self.typingctx = self.targetdescr.typing_context self.targetctx = self.targetdescr.target_context argspec = inspect.getargspec(py_func) argct = len(argspec.args) _OverloadedBase.__init__(self, argct, py_func) functools.update_wrapper(self, py_func) self.targetoptions = targetoptions self.locals = locals self.typingctx.insert_overloaded(self)
def decorate_callable(self, func): def wrapper(*args, **kwargs): with self: result = func(*args, **kwargs) return result functools.update_wrapper(wrapper, func) return wrapper
def oai_view(wrapped): """Augment the return value of a function with common template parameters and add time property to the request parameter.""" def wrapper(context, request=None): if request is None: request = context context = None # Get the datestamp before any database queries. setattr(request, 'time', datestamp_now()) if wrapped.func_code.co_argcount == 1: result = wrapped(request) else: result = wrapped(context, request) # time of the response result['time'] = request.time # function for formatting datestamps result['format_date'] = format_datestamp request.response.content_type = 'text/xml' return result functools.update_wrapper(wrapper, wrapped) return wrapper
def partial_with_wrapper( func, **kwargs ): """ Return partial function func with the updated wrapper. """ func_partial = partial( func, **kwargs ) update_wrapper( func_partial, func ) return func_partial
def __init__(self, f): """ If there are no decorator arguments, the function to be decorated is passed to the constructor. """ self.f = f update_wrapper(self, f)
def gobject_exec(function): def wrapper(*args, **kwargs): caller = _Caller(function, args, kwargs) gobject.idle_add(caller) return caller functools.update_wrapper(wrapper, function) return wrapper
def decorator(function): hasArg, hasType, type = processWithOneArg(function) if isinstance(setup, SetupConfig): if hasArg: raise SetupError("No argument expected for function %s, when replacing a configuration" % function) if hasType: raise SetupError("No return type expected for function %s, when replacing a configuration" % function) return update_wrapper(register(SetupConfigReplace(function, setup), callerLocals()), function) if isinstance(setup, SetupEvent): if hasArg: raise SetupError("No argument expected for function %s, when replacing an event" % function) if hasType: raise SetupError("No return type expected for function %s, when replacing an event" % function) return update_wrapper(register(SetupEventReplace(function, setup), callerLocals()), function) if hasType: if not isclass(type): raise SetupError("Expected a class as the return annotation for function %s" % function) else: types = (type,) else: types = () return update_wrapper(register(SetupSourceReplace(function, setup, hasArg, types), callerLocals()), function)
def __get__(self, instance, instancetype): # Bind the self reference and use update_wrapper to propagate the # function's metadata (e.g. name and docstring). wrapper = functools.partial(self.__call__, instance) functools.update_wrapper(wrapper, self.func) wrapper.is_perception_method = True return wrapper
def func_wrapper(func): cfunc = lru_cache(maxsize, typed=True)(func) # wraps here does not propagate all the necessary info # for py2.7, use update_wrapper below def wrapper(*args, **kwargs): try: retval = cfunc(*args, **kwargs) except TypeError: retval = func(*args, **kwargs) return retval wrapper.cache_info = cfunc.cache_info wrapper.cache_clear = cfunc.cache_clear # Some versions of update_wrapper erroneously assign the final # function of the wrapper chain to __wrapped__, see # https://bugs.python.org/issue17482 . # To work around this, we need to call update_wrapper first, then # assign to wrapper.__wrapped__. update_wrapper(wrapper, func) wrapper.__wrapped__ = cfunc.__wrapped__ CACHE.append(wrapper) return wrapper
def __getattr__(self, name): """Send all method calls to redis, while serializing arguments and results. Using pickle for (de)serialization. For argument serialization, he must provide the data in a dictionary named `data`. """ attr = getattr(self._r, name) if name in self.set_methods: def new_attr(*args, **kwargs): if kwargs: # argument serialization data = pickle.dumps(kwargs.pop('data')) args = list(args) # value data almost always goes to the end # override the other methods manually args.append(data) return attr(*args, **kwargs) return functools.update_wrapper(new_attr, attr) elif name in self.get_methods: def new_attr(*args, **kwargs): res = attr(*args, **kwargs) if isinstance(res, basestring): return pickle.loads(res) elif isinstance(res, list): new_res = [] for r in res: new_res.append(pickle.loads(r)) return new_res else: return res return functools.update_wrapper(new_attr, attr) else: return super(TxRedisMapper, self).__getattr__(name)
def action_method(wrapped): """ Wrapper to provide the right conflict info report data when a method that calls Configurator.action calls another that does the same. Not a documented API but used by some external systems.""" def wrapper(self, *arg, **kw): if self._ainfo is None: self._ainfo = [] info = kw.pop('_info', None) # backframes for outer decorators to actionmethods backframes = kw.pop('_backframes', 0) + 2 if is_nonstr_iter(info) and len(info) == 4: # _info permitted as extract_stack tuple info = ActionInfo(*info) if info is None: try: f = traceback.extract_stack(limit=3) info = ActionInfo(*f[-backframes]) except: # pragma: no cover info = ActionInfo(None, 0, '', '') self._ainfo.append(info) try: result = wrapped(self, *arg, **kw) finally: self._ainfo.pop() return result if hasattr(wrapped, '__name__'): functools.update_wrapper(wrapper, wrapped) wrapper.__docobj__ = wrapped return wrapper
def test_builtin_update(self): # Test for bug #1576241 def wrapper(): pass functools.update_wrapper(wrapper, max) self.assertEqual(wrapper.__name__, 'max') self.assert_(wrapper.__doc__ == max.__doc__)
def __init__(self, func): self._write_lock = threading.Lock() self.func = func self._dispatch_table = {} self._prefer_table = {} self.implementations = [] functools.update_wrapper(self, func)
def version_select(*args, **kwargs): """Look for the method and invoke the versioned one. This method looks for the method that matches the name provided and version constraints then calls it with the supplied arguments. :returns: The result of the method called. :raises: MethodVersionNotFound if there is no method matching the name and the version constraints. """ # The first argument is always the request object. The version # request is attached to the request object. req = kwargs['req'] if len(args) == 0 else args[0] ver = req.version_request func_list = self.versioned_methods[key] for func in func_list: if ver.matches(func.min_version, func.max_version): # update version_select wrapper so other decorator # attributes are still respected functools.update_wrapper(version_select, func.func) return func.func(self, *args, **kwargs) # no version match raise exception.MethodVersionNotFound(version=ver)
def _blocktag_func(tag_func): def _setup_tag(parser, token): bits = token.split_contents() tag_name = bits[0] del(bits[0]) params, xx, xxx, defaults = getargspec(tag_func) max_args = len(params) - 2 # Ignore context and nodelist min_args = max_args - len(defaults or []) if not min_args <= len(bits) <= max_args: if min_args == max_args: raise TemplateSyntaxError( "%r tag takes %d arguments." % (tag_name, min_args)) else: raise TemplateSyntaxError( "%r tag takes %d to %d arguments, got %d." % (tag_name, min_args, max_args, len(bits))) nodelist = parser.parse((('%s%s' % (end_prefix, tag_name)),)) parser.delete_first_token() return BlockTagNode(tag_name, tag_func, nodelist, bits) update_wrapper(_setup_tag, tag_func) return _setup_tag
def thread_exec(function): def wrapper(*args, **kwargs): caller = _Caller(function, args, kwargs) threading.Thread(target=caller).start() return caller functools.update_wrapper(wrapper, function) return wrapper
def wrapper(klass): wrapped = _Nested(klass, **kwargs) functools.update_wrapper(wrapped, klass) return wrapped
def stream_with_context(generator_or_function): """Request contexts disappear when the response is started on the server. This is done for efficiency reasons and to make it less likely to encounter memory leaks with badly written WSGI middlewares. The downside is that if you are using streamed responses, the generator cannot access request bound information any more. This function however can help you keep the context around for longer:: from flask import stream_with_context, request, Response @app.route('/stream') def streamed_response(): @stream_with_context def generate(): yield 'Hello ' yield request.args['name'] yield '!' return Response(generate()) Alternatively it can also be used around a specific generator:: from flask import stream_with_context, request, Response @app.route('/stream') def streamed_response(): def generate(): yield 'Hello ' yield request.args['name'] yield '!' return Response(stream_with_context(generate())) .. versionadded:: 0.9 """ try: gen = iter(generator_or_function) except TypeError: def decorator(*args, **kwargs): gen = generator_or_function(*args, **kwargs) return stream_with_context(gen) return update_wrapper(decorator, generator_or_function) def generator(): ctx = _request_ctx_stack.top if ctx is None: raise RuntimeError( "Attempted to stream with context but " "there was no context in the first place to keep around.") with ctx: # Dummy sentinel. Has to be inside the context block or we're # not actually keeping the context around. yield None # The try/finally is here so that if someone passes a WSGI level # iterator in we're still running the cleanup logic. Generators # don't need that because they are closed on their destruction # automatically. try: for item in gen: yield item finally: if hasattr(gen, "close"): gen.close() # The trick is to start the generator. Then the code execution runs until # the first dummy None is yielded at which point the context was already # pushed. This item is discarded. Then when the iteration continues the # real generator is executed. wrapped_g = generator() next(wrapped_g) return wrapped_g
def __get__(self, instance, cls): return functools.update_wrapper( functools.partial(self.method_call, instance), self.func)
def __init__(self, func): self.func = func functools.update_wrapper(self, func)
def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) return update_wrapper(wrapper, view)
def wsgify(func, *args, **kwargs): result = webob.dec.wsgify(func, *args, **kwargs) update_wrapper(result, func) return result
def __init__(self, fn): update_wrapper(self, fn) self._fn = fn
def decorating_function(user_function): cache = dict() stats = [0, 0] # make statistics updateable non-locally HITS, MISSES = 0, 1 # names for the stats fields make_key = _make_key cache_get = cache.get # bound method to lookup key or return None _len = len # localize the global len() function lock = RLock() # because linkedlist updates aren't threadsafe root = [] # root of the circular doubly linked list root[:] = [root, root, None, None] # initialize by pointing to self nonlocal_root = [root] # make updateable non-locally PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields if maxsize == 0: def wrapper(*args, **kwds): # no caching, just do a statistics update after a successful call result = user_function(*args, **kwds) stats[MISSES] += 1 return result elif maxsize is None: def wrapper(*args, **kwds): # simple caching without ordering or size limit key = make_key(args, kwds, typed) result = cache_get( key, root) # root used here as a unique not-found sentinel if result is not root: stats[HITS] += 1 return result result = user_function(*args, **kwds) cache[key] = result stats[MISSES] += 1 return result else: def wrapper(*args, **kwds): # size limited caching that tracks accesses by recency try: key = make_key(args, kwds, typed) if kwds or typed else args except TypeError: stats[MISSES] += 1 return user_function(*args, **kwds) with lock: link = cache_get(key) if link is not None: # record recent use of the key by moving it to the front of the list root, = nonlocal_root link_prev, link_next, key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root stats[HITS] += 1 return result result = user_function(*args, **kwds) with lock: root, = nonlocal_root if key in cache: # getting here means that this same key was added to the # cache while the lock was released. since the link # update is already done, we need only return the # computed result and update the count of misses. pass elif _len(cache) >= maxsize: # use the old root to store the new key and result oldroot = root oldroot[KEY] = key oldroot[RESULT] = result # empty the oldest link and make it the new root root = nonlocal_root[0] = oldroot[NEXT] oldkey = root[KEY] root[KEY] = root[RESULT] = None # now update the cache dictionary for the new links del cache[oldkey] cache[key] = oldroot else: # put result in a new link at the front of the list last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link stats[MISSES] += 1 return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache)) def cache_clear(): """Clear the cache and cache statistics""" with lock: cache.clear() root = nonlocal_root[0] root[:] = [root, root, None, None] stats[:] = [0, 0] wrapper.__wrapped__ = user_function wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return update_wrapper(wrapper, user_function)
def wrapper(func: Serializer): update_wrapper(wrapper, func) register_serializer(type, func) return func
def __call__(self, fn): def _preparer_wrapper(test_class_instance, **kwargs): _logger.debug("Entering preparer wrapper for %s and test %s", self.__class__.__name__, str(test_class_instance)) # If a child is cached we must use the same cached resource their equivalent parent did so all the deps line up child_is_cached = getattr(fn, '__use_cache', False) # Note: If it is ever desired to make caching inferred, remove this if/throw. # This ensures that a user must _very specifically say they want caching_ on an item and all parents. if not self._use_cache and child_is_cached: raise Exception( """Preparer exception for test {}:\n Child preparers are cached, but parent {} is not. You must specify use_cache=True in the preparer decorator""".format( test_class_instance, self.__class__.__name__)) self._use_cache |= child_is_cached _logger.debug("Child cache status for %s: %s", self.__class__.__name__, child_is_cached) # We must use a cache_key that includes our parents, so that we get a cached stack # matching the desired resource stack. (e.g. if parent resource has specific settings) try: aggregate_cache_key = (self._cache_key, kwargs['__aggregate_cache_key']) except KeyError: # If we're at the root of the cache stack, start with our own key. aggregate_cache_key = self._cache_key kwargs['__aggregate_cache_key'] = aggregate_cache_key self._aggregate_cache_key = aggregate_cache_key _logger.debug("Aggregate cache key: %s", aggregate_cache_key) # If cache is enabled, and the cached resource exists, use it, otherwise create and store. if self._use_cache and aggregate_cache_key in AbstractPreparer._resource_cache: _logger.debug("Using cached resource for %s", self.__class__.__name__) with self._cache_lock: resource_name, kwargs, _ = AbstractPreparer._resource_cache[ aggregate_cache_key] else: resource_name, kwargs = self._prepare_create_resource( test_class_instance, **kwargs) if self._use_cache: with self._cache_lock: if aggregate_cache_key not in AbstractPreparer._resource_cache: _logger.debug("Storing cached resource for %s", self.__class__.__name__) AbstractPreparer._resource_cache[ aggregate_cache_key] = AbstractPreparer.ResourceCacheEntry( resource_name, kwargs, self) if test_class_instance.is_live: test_class_instance.scrubber.register_name_pair( resource_name, self.moniker) # We shouldn't trim the same kwargs that we use for deletion, # we may remove some of the variables we needed to do the delete. trimmed_kwargs = {k: v for k, v in kwargs.items()} trim_kwargs_from_test_function(fn, trimmed_kwargs) try: try: import asyncio except ImportError: fn(test_class_instance, **trimmed_kwargs) else: if asyncio.iscoroutinefunction(fn): loop = asyncio.get_event_loop() loop.run_until_complete( fn(test_class_instance, **trimmed_kwargs)) else: fn(test_class_instance, **trimmed_kwargs) finally: # If we use cache we delay deletion for the end. # This won't guarantee deletion order, but it will guarantee everything delayed # does get deleted, in the worst case by getting rid of the RG at the top. if not (self._use_cache or child_is_cached): # Russian Doll - the last declared resource to be deleted first. self.remove_resource_with_record_override( resource_name, **kwargs) # _logger.debug("Setting up preparer stack for {}".format(self.__class__.__name__)) setattr(_preparer_wrapper, '__is_preparer', True) # Inform the next step in the chain (our parent) that we're cached. if self._use_cache or getattr(fn, '__use_cache', False): setattr(_preparer_wrapper, '__use_cache', True) functools.update_wrapper(_preparer_wrapper, fn) return _preparer_wrapper
def decorating_function(user_fnc): return functools.update_wrapper( _login_required_wrapper(user_fnc, role, token), user_fnc)
def __call__(self, caller): """ :param caller: A METHOD THAT IS EXPECTED TO CALL func :return: caller, BUT WITH SIGNATURE OF self.func """ return update_wrapper(caller, self.func)
def __init__(self, fget): self.fget = fget # copy the getter function's docstring and other attributes functools.update_wrapper(self, fget)
def wrap(view, cacheable=False): def wrapper(*args, **kwargs): return self.admin_view(view, cacheable)(*args, **kwargs) wrapper.admin_site = self return update_wrapper(wrapper, view)
def _wrap_w_kw(self, fn): def wrap(*arg, **kw): return fn(*arg) return update_wrapper(wrap, fn)
def __init__(self, obj): self.obj = obj update_wrapper(self, obj)
def wrapped_partial(func, *args, **kwargs): partial_func = functools.partial(func, *args, **kwargs) functools.update_wrapper(partial_func, func) return partial_func
def __init__(self, func): self.func = func functools.update_wrapper(self, func) self._first_call = True self._not_cached = object()
def __new__(cls, func): namespace = inspect.currentframe().f_back.f_locals self = functools.update_wrapper(super().__new__(cls), func) return namespace.get(func.__name__, self)
def __init__(self, method): self.method = method self.procmem_type = ProcessMemory self.weak = False functools.update_wrapper(self, method)
def _d(fn): return update_wrapper(d(fn), fn)
for name in dir(AbstractPickleTests): if name.startswith('test_recursive_'): func = getattr(AbstractPickleTests, name) if (test_support.check_impl_detail(pypy=True) or '_subclass' in name and '_and_inst' not in name): # PyPy's cPickle matches pure python pickle's behavior here assert_args = RuntimeError, 'maximum recursion depth exceeded' else: assert_args = ValueError, "can't pickle cyclic objects" def wrapper(self, func=func, assert_args=assert_args): with self.assertRaisesRegexp(*assert_args): func(self) functools.update_wrapper(wrapper, func) setattr(cPickleFastPicklerTests, name, wrapper) class cStringIOCPicklerFastTests(cStringIOMixin, cPickleFastPicklerTests): pass class BytesIOCPicklerFastTests(BytesIOMixin, cPickleFastPicklerTests): pass class FileIOCPicklerFastTests(FileIOMixin, cPickleFastPicklerTests): pass
def partial(fun, *args, **kwargs): wrapped = functools.partial(fun, *args, **kwargs) functools.update_wrapper(wrapped, fun) wrapped._bound_args = args return wrapped
def __new__(cls, func): namespace = inspect.currentframe().f_back.f_locals self = functools.update_wrapper(dict.__new__(cls), func) self.pending = set() self.get_type = type # default type checker return namespace.get(func.__name__, self)
def wrapper(func): @pass_context def new_func(ctx, *args, **kwargs): return ctx.invoke(func, ctx, *args, **kwargs) return click_command(*args, **kwargs)(update_wrapper(new_func, func))
def service_route(service, validation_pred=None, coerce_data=True, rule=None, input_schema=None, output_schema=None, fn=None): """Function decorator that registers a ``webservice_fn`` version of the function on the provided service. Note that this function is used primarily to register functions en masse with the ``service_registry`` interface. However, it can be used as a traditional decorator if desired. e.g.: .. code_block:: python my_service_route = partial(service_route, my_service) @my_service_route(rule="/this-route", input_schema=[int], output_schema=int) def my_sum(*ints): return sum(ints) I find there to be 2 important pitfalls to bear in mind if using the decorator this way: 1. This makes it seem like the function is being somehow modified, which it is not. It can confuse people reading your code into thinking they have to make separate, dedicated webservice versions of the function in order to register them. 2. It becomes unsafe to use positional arguments when using the decorator like this. If I had defined that decorator using the canonical flask pattern, e.g. ``@my_service_route("/this-route")`` it would have caused everything to explode. To get consistent behavior, you MUST specify the arguments by keyword. :param service: The service or app which is to have the rule added to it. Must support the ``add_url_rule`` interface as described in the `flask documentation <http://flask.pocoo.org/docs/0.11/api/>`_. :param validation_pred: see description in ``validator`` fn of the same param. function that returns true or false The default value for validation on webservice routes is to use the value of ``__debug__`` as a guide. :param coerce_data: see description in ``validator`` fn of the same param. boolean flag for coercing data. The default is to coerce data. This is often very helpful in parsing json from a web request. :param rule: the url route to use when accessing this function :param input_schema: a data definition as described in the ``validate_against_schema`` fn documentation. This value is not required. If none is given, no validation will be done on the input. :param output_schema: a data definition as decribed in the ``validate_against_schema`` fn documentation. This value is not required. If none is given, no validation will be done on the output. :param fn: The function intended to implement the request. :return: the original function, unmodified. """ if not rule: return update_wrapper( partial(service_route, service, validation_pred, coerce_data), service_route) if fn is None: return update_wrapper( partial(service_route, service, validation_pred, coerce_data, rule, input_schema, output_schema), service_route) validation_pred = validation_pred or _WHEN_DEBUGGING input_validator = validator(input_schema or _IDENTITY, "input to endpoint {0}".format(rule), validation_predicate=validation_pred, coerce_data=coerce_data) output_validator = validator(output_schema or _IDENTITY, "output from endpoint {0}".format(rule), validation_predicate=validation_pred, coerce_data=coerce_data) service.add_url_rule( rule=rule, endpoint=fn.__name__ if hasattr(fn, "__name__") else rule, view_func=update_wrapper( lambda: webservice_fn(fn, input_validator, output_validator), fn), methods=['POST']) return fn
def __init__(self, f): self.f = f functools.update_wrapper(self, f)
def __get__(self, parent, objtype): """ Include self for methods """ func = functools.partial(self.__call__, parent) return functools.update_wrapper(func, self.awaitable)
def __init__(self, func, activation=None): self.func = func self.activation = activation functools.update_wrapper(self, func)
def decorating_function(user_function): cache = {} hits = misses = 0 full = False cache_get = cache.get # bound method to lookup a key or return None lock = RLock() # because linkedlist updates aren't threadsafe root = [] # root of the circular doubly linked list root[:] = [root, root, None, None] # initialize by pointing to self if use_memory_up_to: def wrapper(*args, **kwds): # Size limited caching that tracks accesses by recency nonlocal root, hits, misses, full key = make_key(args, kwds, typed) with lock: link = cache_get(key) if link is not None: # Move the link to the front of the circular queue link_prev, link_next, _key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root hits += 1 return result result = user_function(*args, **kwds) with lock: if key in cache: # Getting here means that this same key was added to the # cache while the lock was released. Since the link # update is already done, we need only return the # computed result and update the count of misses. pass elif full: # Use the old root to store the new key and result. oldroot = root oldroot[KEY] = key oldroot[RESULT] = result # Empty the oldest link and make it the new root. # Keep a reference to the old key and old result to # prevent their ref counts from going to zero during the # update. That will prevent potentially arbitrary object # clean-up code (i.e. __del__) from running while we're # still adjusting the links. root = oldroot[NEXT] oldkey = root[KEY] oldresult = root[RESULT] root[KEY] = root[RESULT] = None # Now update the cache dictionary. del cache[oldkey] # Save the potentially reentrant cache[key] assignment # for last, after the root and links have been put in # a consistent state. cache[key] = oldroot else: # Put result in a new link at the front of the queue. last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link full = (psutil.virtual_memory().available < use_memory_up_to) misses += 1 return result elif maxsize == 0: def wrapper(*args, **kwds): # No caching -- just a statistics update after a successful call nonlocal misses result = user_function(*args, **kwds) misses += 1 return result elif maxsize is None: def wrapper(*args, **kwds): # Simple caching without ordering or size limit nonlocal hits, misses key = make_key(args, kwds, typed) result = cache_get(key, sentinel) if result is not sentinel: hits += 1 return result result = user_function(*args, **kwds) cache[key] = result misses += 1 return result else: def wrapper(*args, **kwds): # Size limited caching that tracks accesses by recency nonlocal root, hits, misses, full key = make_key(args, kwds, typed) with lock: link = cache_get(key) if link is not None: # Move the link to the front of the circular queue link_prev, link_next, _key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root hits += 1 return result result = user_function(*args, **kwds) with lock: if key in cache: # Getting here means that this same key was added to the # cache while the lock was released. Since the link # update is already done, we need only return the # computed result and update the count of misses. pass elif full: # Use the old root to store the new key and result. oldroot = root oldroot[KEY] = key oldroot[RESULT] = result # Empty the oldest link and make it the new root. # Keep a reference to the old key and old result to # prevent their ref counts from going to zero during the # update. That will prevent potentially arbitrary object # clean-up code (i.e. __del__) from running while we're # still adjusting the links. root = oldroot[NEXT] oldkey = root[KEY] oldresult = root[RESULT] root[KEY] = root[RESULT] = None # Now update the cache dictionary. del cache[oldkey] # Save the potentially reentrant cache[key] assignment # for last, after the root and links have been put in # a consistent state. cache[key] = oldroot else: # Put result in a new link at the front of the queue. last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link full = (len(cache) >= maxsize) misses += 1 return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(hits, misses, maxsize, len(cache)) def cache_clear(): """Clear the cache and cache statistics""" nonlocal hits, misses, full with lock: cache.clear() root[:] = [root, root, None, None] hits = misses = 0 full = False wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return update_wrapper(wrapper, user_function)