Пример #1
0
    def _bind_executors(self):
        callable_wrapper = self._get_callable_wrapper()

        __call__ = decorator.decorator(
            callable_wrapper,
            DropFirstParameter.from_function(self._callable))
        __call__.__name__ = __call__.__qualname__ = '__call__'
        __call__.__module__ = self._get_import_path()
        del __call__.__annotations__
        del __call__.__wrapped__
        self._dynamic_call = __call__

        # TODO `async` execution has some problems with garbage-collection in
        # the subprocess given certain inputs. Needs further investigation.
        def async_wrapper(*args, **kwargs):
            args = args[1:]
            pool = concurrent.futures.ProcessPoolExecutor(max_workers=1)
            future = pool.submit(self, *args, **kwargs)
            pool.shutdown(wait=False)
            return future

        async = decorator.decorator(
            async_wrapper,
            DropFirstParameter.from_function(self._callable))
        async.__name__ = async.__qualname__ = 'async'
        async.__module__ = self._get_import_path()
        del async.__annotations__
        del async.__wrapped__
        self._dynamic_async = async
Пример #2
0
    def _bind_executors(self):
        callable_wrapper = self._get_callable_wrapper()

        # TODO drop function annotations as they only make sense for the
        # "view API". Necessary for `async` too. Simply setting __annotations__
        # to {} or None doesn't work for some reason.

        # TODO the signature (as `inspect` and IPython sees it) isn't correct,
        # it displays `output_dir` when there shouldn't be one
        __call__ = decorator.decorator(
            callable_wrapper, FunctionMakerDropFirstArg.clone_sig_without_first_arg(self._callable)
        )
        __call__.__name__ = "__call__"
        self._dynamic_call = __call__

        # TODO `async` execution has some problems with garbage-collection in
        # the subprocess given certain inputs. Needs further investigation.
        def async_wrapper(*args, **kwargs):
            args = args[1:]
            pool = concurrent.futures.ProcessPoolExecutor(max_workers=1)
            future = pool.submit(self, *args, **kwargs)
            pool.shutdown(wait=False)
            return future

        async = decorator.decorator(
            async_wrapper, FunctionMakerDropFirstArg.clone_sig_without_first_arg(self._callable)
        )
        async.__name__ = "async"
        self._dynamic_async = async
Пример #3
0
def load(function=None, extensions=None, CustomException=IOError):
    """
    Decorator to support loading from compressed files for functions
    whose first argument is the sourcePath

    If the first argument ends with a recognized extension,
    the decorator uncompresses sourcePath to a temporary folder
    and runs the function on each resulting file until it succeeds.

    Archive format is determined by file extension:
    .zip .tar.gz .tar.bz2 .tar

    # Try load(sourcePath) on each file in the archive until one works
    @archiveIO.load
    def load(sourcePath):
        return open(sourcePath).read()
    # Try load(sourcePath) on TXT files and if all fail, try CSV files
    @archiveIO.load(extensions=['.txt', '.csv'])
    def load(sourcePath):
        return open(sourcePath).read()
    # Raise AppError instead of IOError
    @archiveIO.load(CustomException=AppError)
    def load(sourcePath):
        return open(sourcePath).read()
    """
    def load(function, *args, **kw):
        sourcePath = kw.get('sourcePath', args[0])
        try:
            archive = Archive(sourcePath)
        # If we did not recognize the extension, run function as usual
        except ArchiveError:
            return function(*args, **kw)
        # Make temporaryFolder
        with TemporaryFolder() as temporaryFolder:
            try:
                # Convert a disposable generator into a reusable list
                paths = list(archive.load(temporaryFolder))
                # For each path, run function and exit if successful
                for path in select_extensions(paths, extensions or []):
                    try:
                        return function(path, *args[1:], **kw)
                    except:
                        pass
            except:
                raise CustomException('Could not open archive')
            raise CustomException('Could not load archive')
    if function:
        return decorator(load, function)
    else:
        return decorator(load)
Пример #4
0
 def entangle(func):
     def caller(func, *args, **kargs):
         perm()
         # Remove login credentials
         form = parse_formvars(request.environ, include_get_vars=True)
         if form.pop('do_login__', None):
             form.pop('login', None)
             form.pop('password', None)
         return func(*args, **kargs)
     try:
         # Old Decorators version
         return decorator(caller)(func)
     except:
         # New version
         return decorator(caller, func)
Пример #5
0
def cached(project, key_format):
    """Caching decorator for slow functions.

    Usage:
    @cached(project name (used as directory), key format string (used as filename))
    def slow(...):
        pass

    Key format string can include positional args with {0} and keyword args {name} from the function call.
    A special keyword arg func_name is added to the formatting parameters.
    """
    def caching(func, *args, **kwargs):
        key = key_format.format(*args, **dict(kwargs, func_name=func.__name__))
        dirname = os.path.join(CACHES_BASE, project)
        if not os.path.exists(dirname):
            os.mkdir(dirname, 0755)
        filename = os.path.join(dirname, key + '.pickle')
        try:
            with open(filename, 'rb') as f:
                return cPickle.load(f)
        except IOError:
            print('No cache for {0}/{1}, running slow function.'.format(project, key), file=sys.stderr)
            data = func(*args, **kwargs)
            with open(filename, 'wb') as f:
                cPickle.dump(data, f, -1)
            return data
    return decorator(caching)
def memoize_with_expiry(expiry_time=0, _cache=None, num_args=None):
    """Taken from http://seanblanchfield.com/python-memoize-with-expiry/"""
    def _memoize_with_expiry(func, *args, **kw):
        # Determine what cache to use - the supplied one, or one we create inside the
        # wrapped function.
        if _cache is None and not hasattr(func, '_cache'):
            func._cache = {}
        cache = _cache or func._cache

        mem_args = args[:num_args]
        # frozenset is used to ensure hashability
        if kw:
            key = mem_args, frozenset(kw.iteritems())
        else:
            key = mem_args
        if key in cache:
            result, timestamp = cache[key]
            # Check the age.
            age = time() - timestamp
            if not expiry_time or age > expiry_time:
                return result
        result = func(*args, **kw)
        cache[key] = (result, time())
        return result
    return decorator(_memoize_with_expiry)
    def magic_deco(arg):
        call = lambda f, *a, **k: f(*a, **k)

        # Find get_ipython() in the caller's namespace
        caller = sys._getframe(1)
        for ns in ['f_locals', 'f_globals', 'f_builtins']:
            get_ipython = getattr(caller, ns).get('get_ipython')
            if get_ipython is not None:
                break
        else:
            raise NameError('Decorator can only run in context where '
                            '`get_ipython` exists')

        ip = get_ipython()

        if callable(arg):
            # "Naked" decorator call (just @foo, no args)
            func = arg
            name = func.__name__
            ip.register_magic_function(func, magic_kind, name)
            retval = decorator(call, func)
        elif isinstance(arg, str):
            # Decorator called with arguments (@foo('bar'))
            name = arg
            def mark(func, *a, **kw):
                ip.register_magic_function(func, magic_kind, name)
                return decorator(call, func)
            retval = mark
        else:
            raise TypeError("Decorator can only be called with "
                             "string or function")
        return retval
Пример #8
0
Файл: db.py Проект: bitctrl/odoo
def check_db_management_enabled(method):
    def if_db_mgt_enabled(method, self, *args, **kwargs):
        if not odoo.tools.config['list_db']:
            _logger.error('Database management functions blocked, admin disabled database listing')
            raise AccessDenied()
        return method(self, *args, **kwargs)
    return decorator(if_db_mgt_enabled, method)
Пример #9
0
def deprecated(message):
    """
    Return a decorator to make deprecated functions.

    :param message:
        the message to print the first time the
        deprecated function is used.

    Here is an example of usage:

    >>> @deprecated('Use new_function instead')
    ... def old_function():
    ...     'Do something'

    Notice that if the function is called several time, the deprecation
    warning will be displayed only the first time.
    """
    def _deprecated(func, *args, **kw):
        msg = '%s.%s has been deprecated. %s' % (
            func.__module__, func.__name__, message)
        if not hasattr(func, 'called'):
            warnings.warn(msg, DeprecationWarning, stacklevel=2)
            func.called = 0
        func.called += 1
        return func(*args, **kw)
    return decorator(_deprecated)
Пример #10
0
def one(method):
    """ Decorate a record-style method where ``self`` is expected to be a
        singleton instance. The decorated method automatically loops on records,
        and makes a list with the results. In case the method is decorated with
        :func:`returns`, it concatenates the resulting instances. Such a
        method::

            @api.one
            def method(self, args):
                return self.name

        may be called in both record and traditional styles, like::

            # recs = model.browse(cr, uid, ids, context)
            names = recs.method(args)

            names = model.method(cr, uid, ids, args, context=context)

        .. deprecated:: 9.0

            :func:`~.one` often makes the code less clear and behaves in ways
            developers and readers may not expect.

            It is strongly recommended to use :func:`~.multi` and either
            iterate on the ``self`` recordset or ensure that the recordset
            is a single record with :meth:`~odoo.models.Model.ensure_one`.
    """
    def loop(method, self, *args, **kwargs):
        result = [method(rec, *args, **kwargs) for rec in self]
        return aggregate(method, result, self)

    wrapper = decorator(loop, method)
    wrapper._api = 'one'
    return wrapper
Пример #11
0
 def _wrapper(function):
     # Needed to create the Wrapper in the right scope.
     if HAS_DECORATOR:
         # The decorator calls a callable with (func, *args, **kw) signature
         return decorator(Wrapper(function).decorator_call, function)
     else:
         return wraps(function)(Wrapper(function))
Пример #12
0
def deprecated(item):
    def wrapper(item, *args, **kwargs):
        raise_deprecation_warning(item.__name__)
        return item(*args, **kwargs)
    function = decorator(wrapper, item)
    function.maturity = 'deprecated'
    return function
Пример #13
0
def RequireInternalRequest(methods=['POST', 'GET', 'PUT', 'DELETE']):
    """
    CSRF Spoof Filter

    TODO: There is still a scenario in which an attacker opens an adhocracy
    page in an iframe, extracts a valid modtoken via javascript and uses this
    token to execute the request.
    """
    def _decorate(f, *a, **kw):
        def check():

            method = request.environ.get('REQUEST_METHOD').upper()
            if not method in methods:
                return False

            identifier = request.environ.get(
                'repoze.who.identity', {}).get('identifier')
            if (identifier is not None and
                    isinstance(identifier, BasicAuthPlugin)):
                return True

            if request.params.get(KEY) == token_id():
                return True

            return False
        if check():
            return f(*a, **kw)
        else:
            from adhocracy.lib.templating import ret_abort
            ret_abort(_("I'm sorry, it looks like we made a mistake "
                        "(CSRF alert). Please try again."), code=403)
    return decorator(_decorate)
Пример #14
0
def trace2(f):
    """
    Same as :function:`trace` but more verbose.
    :param f: the function
    :returns: the decorated function
    """
    return decorator(_trace2, f)
Пример #15
0
def f4a_decorator_v1(**kwargs):
    p1 = kwargs.get('p1', False)
    print "f4a_decorator.wrapper: %s" % p1
    def wrapper(func, *args, **kwargs):
        return func(*args, **kwargs)

    return decorator(wrapper)
Пример #16
0
def require_creds(use_slice_urn):
    """Decorator to verify credentials"""
    def require_creds(func, *args, **kw):
        
        logger.debug("Checking creds")
        
        client_cert = kw["request"].META["SSL_CLIENT_CERT"]

        if use_slice_urn:
            slice_urn = args[0]
            credentials = args[1]
        else:
            slice_urn = None
            credentials = args[0]
            
        cred_verifier = CredentialVerifier(settings.GCF_X509_TRUSTED_CERT_DIR)
            
        cred_verifier.verify_from_strings(
            client_cert, credentials,
            slice_urn, PRIVS_MAP[func.func_name])

        logger.debug("Creds pass")
        
        return func(*args, **kw)
        
    return decorator(require_creds)
Пример #17
0
def _from_dbus_type(fn):
    def from_dbus_type(dbusVal):
        def from_dbus_dict(dbusDict):
            d = dict()
            for dbusKey, dbusVal in dbusDict.items():
                d[from_dbus_type(dbusKey)] = from_dbus_type(dbusVal)
            return d

        typeUnwrapper = {
            dbus.types.Dictionary: from_dbus_dict,
            dbus.types.Array: lambda x: list(map(from_dbus_type, x)),
            dbus.types.Double: float,
            dbus.types.Boolean: bool,
            dbus.types.Byte: int,
            dbus.types.Int16: int,
            dbus.types.Int32: int,
            dbus.types.Int64: int,
            dbus.types.UInt32: int,
            dbus.types.UInt64: int,
            dbus.types.ByteArray: str,
            dbus.types.ObjectPath: str,
            dbus.types.Signature: str,
            dbus.types.String: str
        }
        try:
            return typeUnwrapper[type(dbusVal)](dbusVal)
        except KeyError:
            return dbusVal

    def wrapped(fn, self, *args, **kwargs):
            return from_dbus_type(fn(self, *args, **kwargs))

    return decorator(wrapped, fn)
Пример #18
0
def validator(func, *args, **kwargs):
    """
    A decorator that makes given function validator.

    Whenever the given function is called and returns ``False`` value
    this decorator returns :class:`ValidationFailure` object.

    Example::

        >>> @validator
        ... def even(value):
        ...     return not (value % 2)

        >>> even(4)
        True

        >>> even(5)
        ValidationFailure(func=even, args={'value': 5})

    :param func: function to decorate
    :param args: positional function arguments
    :param kwargs: key value function arguments
    """
    def wrapper(func, *args, **kwargs):
        value = func(*args, **kwargs)
        if not value:
            return ValidationFailure(
                func, func_args_as_dict(func, args, kwargs)
            )
        return True
    return decorator(wrapper, func)
Пример #19
0
    def __call__(self, f):
        argspec = misc.getargspec(f)
        self.arg_names = argspec[0]
        self.have_varargs = (argspec[1] is not None) and not self.add_varargs

        assert ((len(self.constraints) == len(self.arg_names)
                    and not self.have_varargs)
             or (len(self.constraints) == len(self.arg_names) + 1
                    and self.have_varargs))

        if self.add_varargs:
            # add varargs to the signature and use decorator.FunctionMaker
            # directly to create the decorated function
            orig_signature = inspect.getargspec(f)
            assert orig_signature.varargs is None
            signature = orig_signature._replace(varargs='args')
            evaldict = self.wrapper.__globals__.copy()
            evaldict['_call_'] = self.wrapper
            evaldict['_func_'] = f
            return decorator.FunctionMaker.create(
                '%s%s' % (f.__name__, inspect.formatargspec(*signature)),
                'return _call_(_func_, %(shortsignature)s)',
                evaldict, undecorated=f, __wrapped__=f, doc=f.__doc__)
        else:
            return decorator.decorator(self.wrapper, f)
Пример #20
0
def render_thumbnail(width=None, height=None, fit="contain"):
    if width is None and height is None:
        raise Exception("Must specify at least one of width/height for render_thumbnail")

    if fit not in ["cover", "contain", "stretch"]:
        raise Exception("fit must be one of cover, contain, or stretch for render_thumbnail")

    if fit in ["cover", "stretch"] and (width is None or height is None):
        raise Exception("fit=%s requires both width and height to be set for render_thumbnail" % fit)

    def decorated(f, *args, **kwargs):
        upload = f(*args, **kwargs)

        if upload is None:
            return flask.abort(404)

        if isinstance(upload, app.response_class):
            return upload

        if not isinstance(upload, trex.support.model.TrexUpload):
            raise Exception("Can't render upload from %s" % upload)

        if not upload.can_thumbnail():
            raise Exception("Upload doesn't support thumbnailing" % upload)

        return upload.generate_thumbnail_response(width=width, height=height, fit=fit)

    return decorator(decorated)
 def entangle(func):
   def method(func, request):
     if request.method in acceptables:
       return func(request)
     else:
       return views.method_not_allowed(request)
   return decorator(method, func)
Пример #22
0
def identify(cls):
    """
    class decorator which designates a class to be used as user_metric
    and which allows the specification of other user_metrics on which this
    one depends.  Those dependencies will be used to do pipeline ordering,
    and automatic dependency fulfillment (i.e. if you try to use class A
    as a metric which depends on a class B already having processed the user
    record, the cohort_eval will automatically add and instance of class B
    to the pipeline).

    specifically, this decorator applies the metric_init decorator to the
    passed in class's __init__ function

    returns :
        the class passed in as argument 'cls' with the added attributes 'id
        which is created using the metric_init function decorator
    """

    if not hasattr(cls, '__init__') or not hasattr(cls.__init__, 'im_func'):
        # check for case where cls has not declared __init__ function
        def __blank_init__(self):
            pass
        cls.__init__ = __blank_init__
    cls.__init__ = decorator(id_init, cls.__init__.im_func)
    
    def print_id(self):
        return self.id

    return cls
Пример #23
0
Файл: x.py Проект: quad/pif
def thunk(function):
    """Enqueue the method-call into the GObject mainloop."""

    def _(func, *args, **kwargs):
        gobject.idle_add(lambda: func(*args, **kwargs))

    return decorator(_, function)
Пример #24
0
def authorized(expression=None, arg_name="authentication"):
    """
    Decorator that is used to check authorization.

    :param expression: the security expression. If no expression is passed
    then the system just checks to see if a user is authenticated in the system.

    :param arg_name: the optional argument name used to inject the authentication
    object into a method or function that is decorated.
    """
    def _authorized(f, *args, **kwargs):

        if expression is None and not SecurityManager().authenticated():
            raise AuthenticationException("Not Authenticated")
        else:
            attribute = AuthorizedAttribute(f=f, args=args, kwargs=kwargs)
            SecurityManager().authorized(expression, attribute=attribute)

        if arg_name in kwargs:
            kwargs[arg_name] = SecurityManager().get_authentication()
        else:
            sig = inspect.signature(f)
            params = sig.parameters
            for indx, (name, param) in enumerate(params.items()):
                if indx == len(args):
                    break
                if name == arg_name or \
                        (param.annotation != inspect.Parameter.empty and param.annotation == "auth"):
                    args = list(args)
                    args[indx] = SecurityManager().get_authentication()
                    break

        return f(*args, **kwargs)

    return decorator(_authorized)
def httpdigest(*args, **kwargs):
    """

    :param args:
    :param kwargs:
    May be used in one of three ways:
    * as a decorator factory (with the arguments being parameters to an
      instance of HttpDigestAuthenticator used to protect the decorated view)
    * as a decorator (protecting the decorated view with a default-constructed
      instance of HttpDigestAuthenticator)
    * as a decorator factory (with the argument being a pre-constructed
      HttpDigestAuthenticator instance used to protect the decorated view)
    """
    if len(args) == 1 and not kwargs and isCallable(args[0]):
        authenticator = HttpDigestAuthenticator()
        return decorator(partial(_httpdigest, authenticator), args[0])
    if len(args) == 1 and not kwargs and isinstance(args[0],
                                                    HttpDigestAuthenticator):
        authenticator = args[0]
    else:
        authenticator = HttpDigestAuthenticator(*args, **kwargs)

    def decor(f):
        return decorator(partial(_httpdigest, authenticator), f)
    return decor
Пример #26
0
    def __call__(self, f):
        """
        If there are decorator arguments, __call__() is only called
        once, as part of the decoration process! You can only give
        it a single argument, which is the function object.
        """
        def wrapped_f(f, *args, **kwargs):
            log.debug('Decorator arguments: ' + self.permission)
            self.session = args[0].session
            self.request = args[0]
            log.debug(self.session)
            
            id = None
            if getSettings('auth.enable') == 'True':
                if 'token' in self.session:
                    id = self.session['token']
                if 'id' in self.request.params:
                    id = self.request.params['id']
                    self.session['token'] = id
                if not self.isAuthorized(id):
                    return self.redirect_to_auth(400)
                if 'permissions' in self.session and self.permission in self.session['permissions']:
                    pass
                else:
                    return self.redirect_to_auth(401)
            value = f(*args, **kwargs)
            log.debug('After f(*args)')
            return value

        """ Note: http://www.siafoo.net/article/68 for this """
        return decorator(wrapped_f)(f)
Пример #27
0
def block_observed(func):
    #Almost copied from gaphas.state
    """
    Simple observer, dispatches events to functions registered in the observers
    list.

    On the function an ``__observer__`` property is set, which references to
    the observer decorator. This is nessesary, since the event handlers expect
    the outer most function to be returned (that's what they see).

    Also note that the events are dispatched *before* the function is invoked.
    This is an important feature, esp. for the reverter code.
    """
    def wrapper(func, *args, **kwargs):
        o = func.__observer__
        acquired = mutex.acquire(False)
        try:
            if acquired:
                block_dispatch((o, args, kwargs), queue=block_observers)
            return func(*args, **kwargs)
        finally:
            if acquired:
                mutex.release()
    dec = decorator(wrapper)(func)    
    func.__observer__ = dec
    return dec
Пример #28
0
def feature_flag(flag):
    def call(f, *args, **kw):
        if app.config.get(flag, False) == True:
            return f(*args, **kw)
        return abort(404)

    return decorator(call)
Пример #29
0
def dispatch_on(**method_map):
    """Dispatches to alternate controller methods based on HTTP method

    Multiple keyword arguments should be passed, with the keyword corresponding
    to the HTTP method to dispatch on (DELETE, POST, GET, etc.) and the
    value being the function to call. The value should be a string indicating
    the name of the function to dispatch to.

    Example:

    .. code-block:: Python

        from pylons.decorators import rest

        class SomeController(BaseController):

            @rest.dispatch_on(POST='create_comment')
            def comment(self):
                # Do something with the comment

            def create_comment(self, id):
                # Do something if its a post to comment
    """
    def dispatcher(func, self, *args, **kwargs):
        """Wrapper for dispatch_on"""
        alt_method = method_map.get(pylons.request.method)
        if alt_method:
            alt_method = getattr(self, alt_method)
            log.debug("Dispatching to %s instead", alt_method)
            return self._inspect_call(alt_method, **kwargs)
        return func(self, *args, **kwargs)
    return decorator(dispatcher)
Пример #30
0
def timeout(s):
    """Create decorator to time out wrapped functions after ``s`` seconds.

    :param s: number of seconds before the decorator will time out
    :rtype: decorator
    """
    def _timeout(f, *args, **kwargs):
        """A decorator that prevents ``f`` from running too long.

        :param f: the function to wrap
        :rtype: whatever ``f`` returns
        """
        def handle_timeout(signal_number, frame):
            """Handle the SIGALRM by raising a ``TimedOutException``."""
            raise TimedOutException

        # Grab a reference to the old handler.
        old = signal.signal(signal.SIGALRM, handle_timeout)

        # Start our timeout logic.
        signal.alarm(s)
        try:
            result = f(*args, **kwargs)
        finally:
            # Put the old handler back.
            old = signal.signal(signal.SIGALRM, old)
        # Wipe out any alarms that are left hanging around.
        signal.alarm(0)

        return result
    return decorator(_timeout)
Пример #31
0
def deprecated_params(
    params: str | Iterable[str] | None = None,
    since: str | None = None,
    until: str | None = None,
    message: str | None = "",
    redirections: None
    | (Iterable[tuple[str, str] | Callable[..., dict[str, Any]]]) = None,
) -> Callable:
    """Decorator to mark parameters of a callable as deprecated.

    It can also be used to automatically redirect deprecated parameter values to their
    replacements.

    Parameters
    ----------
    params
        The parameters to be deprecated. Can consist of:

        * An iterable of strings, with each element representing a parameter to deprecate
        * A single string, with parameter names separated by commas or spaces.
    since
        The version or date since deprecation.
    until
        The version or date until removal of the deprecated callable.
    message
        The reason for why the callable has been deprecated.
    redirections
        A list of parameter redirections. Each redirection can be one of the following:

        * A tuple of two strings. The first string defines the name of the deprecated
          parameter; the second string defines the name of the parameter to redirect to,
          when attempting to use the first string.

        * A function performing the mapping operation. The parameter names of the
          function determine which parameters are used as input. The function must
          return a dictionary which contains the redirected arguments.

        Redirected parameters are also implicitly deprecated.

    Returns
    -------
    Callable
        The decorated callable.

    Raises
    ------
    ValueError
        If no parameters are defined (neither explicitly nor implicitly).
    ValueError
        If defined parameters are invalid python identifiers.

    Examples
    --------
    Basic usage::

        from manim.utils.deprecation import deprecated_params

        @deprecated_params(params="a, b, c")
        def foo(**kwargs):
            pass

        foo(x=2, y=3, z=4)
        # No warning

        foo(a=2, b=3, z=4)
        # WARNING  The parameters a and b of method foo have been deprecated and may be removed in a later version.

    You can also specify additional information for a more precise warning::

        from manim.utils.deprecation import deprecated_params

        @deprecated_params(
            params="a, b, c",
            since="v0.2",
            until="v0.4",
            message="The letters x, y, z are cooler."
        )
        def foo(**kwargs):
            pass

        foo(a=2)
        # WARNING  The parameter a of method foo has been deprecated since v0.2 and is expected to be removed after v0.4. The letters x, y, z are cooler.

    Basic parameter redirection::

        from manim.utils.deprecation import deprecated_params

        @deprecated_params(redirections=[
            # Two ways to redirect one parameter to another:
            ("old_param", "new_param"),
            lambda old_param2: {"new_param22": old_param2}
        ])
        def foo(**kwargs):
            return kwargs

        foo(x=1, old_param=2)
        # WARNING  The parameter old_param of method foo has been deprecated and may be removed in a later version.
        # returns {"x": 1, "new_param": 2}

    Redirecting using a calculated value::

        from manim.utils.deprecation import deprecated_params

        @deprecated_params(redirections=[
            lambda runtime_in_ms: {"run_time": runtime_in_ms / 1000}
        ])
        def foo(**kwargs):
            return kwargs

        foo(runtime_in_ms=500)
        # WARNING  The parameter runtime_in_ms of method foo has been deprecated and may be removed in a later version.
        # returns {"run_time": 0.5}

    Redirecting multiple parameter values to one::

        from manim.utils.deprecation import deprecated_params

        @deprecated_params(redirections=[
            lambda buff_x=1, buff_y=1: {"buff": (buff_x, buff_y)}
        ])
        def foo(**kwargs):
            return kwargs

        foo(buff_x=2)
        # WARNING  The parameter buff_x of method foo has been deprecated and may be removed in a later version.
        # returns {"buff": (2, 1)}

    Redirect one parameter to multiple::

        from manim.utils.deprecation import deprecated_params

        @deprecated_params(redirections=[
            lambda buff=1: {"buff_x": buff[0], "buff_y": buff[1]} if isinstance(buff, tuple)
                    else {"buff_x": buff,    "buff_y": buff}
        ])
        def foo(**kwargs):
            return kwargs

        foo(buff=0)
        # WARNING  The parameter buff of method foo has been deprecated and may be removed in a later version.
        # returns {"buff_x": 0, buff_y: 0}

        foo(buff=(1,2))
        # WARNING  The parameter buff of method foo has been deprecated and may be removed in a later version.
        # returns {"buff_x": 1, buff_y: 2}


    """
    # Check if decorator is used without parenthesis
    if callable(params):
        raise ValueError(
            "deprecate_parameters requires arguments to be specified.")

    if params is None:
        params = []

    # Construct params list
    params = re.split(r"[,\s]+", params) if isinstance(params,
                                                       str) else list(params)

    # Add params which are only implicitly given via redirections
    if redirections is None:
        redirections = []
    for redirector in redirections:
        if isinstance(redirector, tuple):
            params.append(redirector[0])
        else:
            params.extend(list(inspect.signature(redirector).parameters))
    # Keep ordering of params so that warning message is consistently the same
    # This will also help pass unit testing
    params = list(dict.fromkeys(params))

    # Make sure params only contains valid identifiers
    identifier = re.compile(r"^[^\d\W]\w*\Z", re.UNICODE)
    if not all(re.match(identifier, param) for param in params):
        raise ValueError("Given parameter values are invalid.")

    redirections = list(redirections)

    def warning_msg(func: Callable, used: list[str]):
        """Generate the deprecation warning message.

        Parameters
        ----------
        func
            The callable with deprecated parameters.
        used
            The list of deprecated parameters used in a call.

        Returns
        -------
        str
            The deprecation message.
        """
        what, name = _get_callable_info(func)
        plural = len(used) > 1
        parameter_s = "s" if plural else ""
        used_ = ", ".join(
            used[:-1]) + " and " + used[-1] if plural else used[0]
        has_have_been = "have been" if plural else "has been"
        deprecated = _deprecation_text_component(since, until, message)
        return f"The parameter{parameter_s} {used_} of {what} {name} {has_have_been} {deprecated}"

    def redirect_params(kwargs: dict, used: list[str]):
        """Adjust the keyword arguments as defined by the redirections.

        Parameters
        ----------
        kwargs
            The keyword argument dictionary to be updated.
        used
            The list of deprecated parameters used in a call.
        """
        for redirector in redirections:
            if isinstance(redirector, tuple):
                old_param, new_param = redirector
                if old_param in used:
                    kwargs[new_param] = kwargs.pop(old_param)
            else:
                redirector_params = list(
                    inspect.signature(redirector).parameters)
                redirector_args = {}
                for redirector_param in redirector_params:
                    if redirector_param in used:
                        redirector_args[redirector_param] = kwargs.pop(
                            redirector_param)
                if len(redirector_args) > 0:
                    kwargs.update(redirector(**redirector_args))

    def deprecate_params(func, *args, **kwargs):
        """The actual decorator function used to extend the callables behavior.

        Logs a warning message when a deprecated parameter is used and redirects it if
        specified.

        Parameters
        ----------
        func
            The callable to decorate.
        args
            The arguments passed to the given callable.
        kwargs
            The keyword arguments passed to the given callable.

        Returns
        -------
        Any
            The return value of the given callable when being passed the given
            arguments.

        """
        used = []
        for param in params:
            if param in kwargs:
                used.append(param)

        if len(used) > 0:
            logger.warning(warning_msg(func, used))
            redirect_params(kwargs, used)
        return func(*args, **kwargs)

    return decorator(deprecate_params)
Пример #32
0
def dynamic_programming(f):
    f.cache = {}
    f.data = None
    return decorator(_dynamic_programming, f)
Пример #33
0
 def __new__(cls, clsname, bases, dict):
     if decorator:
         for name, method in dict.items():
             if not name.startswith('_') and inspect.isroutine(method):
                 dict[name] = decorator(_run_on_failure_decorator, method)
     return type.__new__(cls, clsname, bases, dict)
Пример #34
0
 def __call__(self, method):
     self.method = method
     self.determine_key()
     lookup = decorator(self.lookup, method)
     lookup.clear_cache = self.clear
     return lookup
Пример #35
0
def expected(decorator, func):
    """ Decorate ``func`` with ``decorator`` if ``func`` is not wrapped yet. """
    return decorator(func) if not hasattr(func, '_api') else func
Пример #36
0
class CinderCleanableObject(base.CinderPersistentObject):
    """Base class for cleanable OVO resources.

    All cleanable objects must have a host property/attribute.
    """
    worker = None

    cleanable_resource_types = set()

    @classmethod
    def get_rpc_api(cls):
        # By default assume all resources are handled by c-vol services
        return vol_rpcapi.VolumeAPI

    @classmethod
    def cinder_ovo_cls_init(cls):
        """Called on OVO registration, sets set of cleanable resources."""
        # First call persistent object method to store the DB model
        super(CinderCleanableObject, cls).cinder_ovo_cls_init()

        # Add this class to the set of resources
        cls.cleanable_resource_types.add(cls.obj_name())

    @classmethod
    def get_pinned_version(cls):
        # We pin the version by the last service that gets updated, which is
        # c-vol or c-bak
        min_obj_vers_str = cls.get_rpc_api().determine_obj_version_cap()

        # Get current pinned down version for this object
        version = base.OBJ_VERSIONS[min_obj_vers_str][cls.__name__]
        return versionutils.convert_version_to_int(version)

    @staticmethod
    def _is_cleanable(status, obj_version):
        """Check if a specific status for a specific OBJ version is cleanable.

        Each CinderCleanableObject class should implement this method and
        return True for cleanable status for versions equal or higher to the
        ones where the functionality was added.

        :returns: Whether to create a workers DB entry or not
        :param obj_version: Min object version running in the cloud or None if
                            current version.
        :type obj_version: float
        """
        return False

    def is_cleanable(self, pinned=False):
        """Check if cleanable VO status is cleanable.

        :param pinned: If we should check against pinned version or current
                       version.
        :type pinned_version: bool
        :returns: Whether this needs a workers DB entry or not
        """
        if pinned:
            obj_version = self.get_pinned_version()
        else:
            obj_version = None
        return self._is_cleanable(self.status, obj_version)

    def create_worker(self, pinned=True):
        """Create a worker entry at the API."""
        # This method is mostly called from the rpc layer, therefore it checks
        # if it's cleanable given current pinned version.
        if not self.is_cleanable(pinned):
            return False

        resource_type = self.__class__.__name__

        entry_in_db = False

        # This will only loop on very rare race conditions
        while not entry_in_db:
            try:
                # On the common case there won't be an entry in the DB, that's
                # why we try to create first.
                db.worker_create(self._context,
                                 status=self.status,
                                 resource_type=resource_type,
                                 resource_id=self.id)
                entry_in_db = True
            except exception.WorkerExists:
                try:
                    db.worker_update(self._context,
                                     None,
                                     filters={
                                         'resource_type': resource_type,
                                         'resource_id': self.id
                                     },
                                     service_id=None,
                                     status=self.status)
                    entry_in_db = True
                except exception.WorkerNotFound:
                    pass
        return entry_in_db

    def set_worker(self):
        worker = self.worker

        service_id = service.Service.service_id
        resource_type = self.__class__.__name__

        if worker:
            if worker.cleaning:
                return
        else:
            try:
                worker = db.worker_get(self._context,
                                       resource_type=resource_type,
                                       resource_id=self.id)
            except exception.WorkerNotFound:
                # If the call didn't come from an RPC call we still have to
                # create the entry in the DB.
                try:
                    self.worker = db.worker_create(self._context,
                                                   status=self.status,
                                                   resource_type=resource_type,
                                                   resource_id=self.id,
                                                   service_id=service_id)
                    return
                except exception.WorkerExists:
                    # If 2 cleanable operations are competing for this resource
                    # and the other one created the entry first that one won
                    raise exception.CleanableInUse(type=resource_type,
                                                   id=self.id)

        # If we have to claim this work or if the status has changed we have
        # to update DB.
        if (worker.service_id != service_id or worker.status != self.status):
            try:
                db.worker_update(self._context,
                                 worker.id,
                                 filters={
                                     'service_id': worker.service_id,
                                     'status': worker.status,
                                     'updated_at': worker.updated_at
                                 },
                                 service_id=service_id,
                                 status=self.status,
                                 orm_worker=worker)
            except exception.WorkerNotFound:
                self.worker = None
                raise exception.CleanableInUse(type=self.__class__.__name__,
                                               id=self.id)
        self.worker = worker

    def unset_worker(self):
        if self.worker:
            db.worker_destroy(self._context,
                              id=self.worker.id,
                              status=self.worker.status,
                              service_id=self.worker.service_id)
            self.worker = None

    # NOTE(geguileo): To be compatible with decorate v3.4.x and v4.0.x
    decorate = staticmethod(
        getattr(decorator, 'decorate', lambda f, w: decorator.decorator(w, f)))

    @staticmethod
    def set_workers(*decorator_args):
        """Decorator that adds worker DB rows for cleanable versioned  objects.

        By default will take care of all cleanable objects, but we can limit
        which objects we want by passing the name of the arguments we want
        to be added.
        """
        def _decorator(f):
            def wrapper(f, *args, **kwargs):
                if decorator_args:
                    call_args = inspect.getcallargs(f, *args, **kwargs)
                    candidates = [call_args[obj] for obj in decorator_args]
                else:
                    candidates = list(args)
                    candidates.extend(kwargs.values())
                cleanables = [
                    cand for cand in candidates
                    if (isinstance(cand, CinderCleanableObject)
                        and cand.is_cleanable(pinned=False))
                ]
                try:
                    # Create the entries in the workers table
                    for cleanable in cleanables:
                        cleanable.set_worker()

                    # Call the function
                    result = f(*args, **kwargs)
                finally:
                    # Remove entries from the workers table
                    for cleanable in cleanables:
                        # NOTE(geguileo): We check that the status has changed
                        # to avoid removing the worker entry when we finished
                        # the operation due to an unexpected exception and also
                        # when this process stops because the main process has
                        # stopped.
                        if (cleanable.worker and
                                cleanable.status != cleanable.worker.status):
                            try:
                                cleanable.unset_worker()
                            except Exception:
                                pass
                return result

            return CinderCleanableObject.decorate(f, wrapper)

        # If we don't have optional decorator arguments the argument in
        # decorator_args is the function we have to decorate
        if len(decorator_args) == 1 and callable(decorator_args[0]):
            function = decorator_args[0]
            decorator_args = None
            return _decorator(function)
        return _decorator

    def refresh(self):
        # We want to keep the worker entry on refresh
        worker = self.worker
        super(CinderCleanableObject, self).refresh()
        self.worker = worker
Пример #37
0
 def foo(f):
     return decorator.decorator(make_errormator_middleware, f)
Пример #38
0
def doc_stub(item):
    """Doc stub decorator"""
    if not inspect.isclass(item):
        item = decorator(_doc_stub, item)
    setattr(item, ATTR_DOC_STUB, item.__name__)
    return item
Пример #39
0
def memo(f):
    f.cache = {}
    return decorator(_memo, f)
Пример #40
0
        # Append global messages if any.
        if len(c.glob_messages):
            data['glob_msgs'] = map(lambda x: x.to_dict(), c.glob_messages)

        return simplejson.dumps(data)

    except Exception, e:
        exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
        if config['debug']:

            # Log exception and debugging informations.
            log.debug("JSON exception:")
            for t in traceback.format_tb(exceptionTraceback):
                log.debug("%-9s: '%s'" % ("trace", str(t)))
            log.debug("%-9s: '%s'" % ("exception", str(e)))

            # Return ajax dictionary with exception details.
            raise KJsonifyException(str(exceptionType), str(exceptionValue),
                                    traceback.format_tb(exceptionTraceback))

        else:
            # Log exception.
            log.debug("JSON exception: '%s'" % (str(e)))

            # Return generic error.
            raise KJsonifyException(str(exceptionType), 'Internal error')


kjsonify = decorator(kjsonify)
Пример #41
0
from paste.request import construct_url
from paste.deploy.converters import asbool


def authorize(valid, handler):
    """
    Checks if the user is authorized to view page
    """
    def validate(func, self, *args, **kwargs):
        try:
            valid.check()
        except NotValidAuth, e:
            return handler(e)
        return func(self, *args, **kwargs)

    return decorator(validate)


class NotValidAuth(Exception):
    pass


class IsLogged(object):
    """
    Checks if the user is logged in
    """
    def __init__(self, *args):
        self.args = args

    def check(self):
        """
Пример #42
0
def ckan_cache(test=lambda *av, **kw: 0,
               key="cache_default",
               expires=None,
               type=None,
               query_args=False,
               cache_headers=(
                   'content-type',
                   'content-length',
               ),
               **cache_kwargs):
    """
    This is a specialised cache decorator that borrows much of its functionality
    from the func:`pylons.decorators.cache.beaker_cache`. The key differences are

    :param expires: is not the expiry of the local disk or memory cache but the
        expiry that gets set in the max-age Cache-Control header. The default is
        not to set Cache-Control
        
    :param test: is a function that takes the same arguments as the wrapped
        controller and returns an numeric value in seconds from the epoch GMT.

    The ''test'' function is crucial for cache expiry. The decorator keeps a
    timestamp for the last time the cache was updated. If the value returned by
    ''test()'' is greater than the timestamp, the cache will be purged and the
    document re-rendered.

    This decorator sets the ''Last-Modified'', ''ETag'' and ''Cache-Control''
    HTTP headers in the response according to the remembered timestamp and the
    given ''expires'' parameter.

    Other parameters as supported by the beaker cache are supported here in the
    same way.
    
    Some examples:

    .. code-block:: python

        # defaults
        @cache()
        def controller():
            return "I never expire, last-modified is the epoch"

        from time import timegm, gmtime
        @cache(test=lambda *av, **kw: timegm(gmtime()))
        def controller():
            return "I am never cached locally but set cache-control headers"

        @cache(query_args=True)
        def controller():
            return "I cache each new combination of GET parameters separately"
        
    """
    cache_headers = set(cache_headers)
    log = __import__("logging").getLogger("ckan_cache")

    def wrapper(func, *args, **kwargs):
        pylons = get_pylons(args)
        if not cache_enabled:
            log.debug("Caching disabled, skipping cache lookup")
            return func(*args, **kwargs)

        cfg_expires = "%s.expires" % _func_cname(func)

        # this section copies entirely too much from beaker cache
        if key:
            if query_args:
                key_dict = pylons.request.GET.mixed()
            else:
                key_dict = kwargs.copy()
            # beaker only does this if !query_args, we do it in
            # all cases to support both query args and method args
            # in the controller
            key_dict.update(_make_dict_from_args(func, args))
            if key != "cache_default":
                if isinstance(key, list):
                    key_dict = dict((k, key_dict[k]) for k in key)
                else:
                    key_dict = {key: key_dict[key]}
        else:
            key_dict = None

        self = None
        if args:
            self = args[0]

        namespace, cache_key = create_cache_key(func, key_dict, self)

        if type:
            cache_kwargs["type"] = type
        my_cache = pylons.cache.get_cache(namespace, **cache_kwargs)

        ## end copy from beaker_cache

        last_modified = test(*args, **kwargs)
        cache_miss = list()

        def render():
            log.debug("Creating new cache copy with key: %s, type: %s",
                      cache_key, type)
            result = func(*args, **kwargs)
            glob_response = pylons.response
            headers = dict(glob_response.headerlist)
            status = glob_response.status
            full_response = dict(headers=headers,
                                 status=status,
                                 cookies=None,
                                 content=result,
                                 timestamp=last_modified)
            cache_miss.append(True)
            return full_response

        response = my_cache.get_value(cache_key, createfunc=render)
        timestamp = response["timestamp"]
        if timestamp < last_modified:
            my_cache.remove(cache_key)
        response = my_cache.get_value(cache_key, createfunc=render)

        glob_response = pylons.response

        if response["status"][0] in ("4", "5"):  # do not cache 4XX, 5XX
            my_cache.remove(cache_key)
        else:
            headers = dict(glob_response.headerlist)
            headers.update(header for header in response["headers"].items()
                           if header[0].lower() in cache_headers)

            headers["Last-Modified"] = strftime("%a, %d %b %Y %H:%M:%S GMT",
                                                gmtime(last_modified))
            headers["ETag"] = str(last_modified)
            if cache_miss:
                headers["X-CKAN-Cache"] = "MISS"
            else:
                headers["X-CKAN-Cache"] = "HIT"

            if expires:
                if "Pragma" in headers: del headers["Pragma"]
                if "Cache-Control" in headers: del headers["Cache-Control"]
            else:
                headers["Pragma"] = "no-cache"
                headers["Cache-Control"] = "no-cache"

            glob_response.headerlist = headers.items()

            if expires:
                glob_response.cache_expires(seconds=expires)
                cc = glob_response.headers["Cache-Control"]
                glob_response.headers[
                    "Cache-Control"] = "%s, must-revalidate" % cc

        glob_response.status = response['status']
        return response["content"]

    return decorator(wrapper)
Пример #43
0
def layer_register(
        log_shape=True,
        use_scope=True):
    """
    Register a layer.

    Args:
        log_shape (bool): log input/output shape of this layer
        use_scope (bool): whether to call this layer with an extra first argument as scope.
            If set to False, will try to figure out whether the first argument
            is scope name or not.
    """

    def wrapper(func):
        @wraps(func)
        def wrapped_func(*args, **kwargs):
            assert args[0] is not None, args
            if use_scope:
                name, inputs = args[0], args[1]
                args = args[1:]  # actual positional args used to call func
                assert isinstance(name, six.string_types), name
            else:
                assert not log_shape
                if isinstance(args[0], six.string_types):
                    name, inputs = args[0], args[1]
                    args = args[1:]  # actual positional args used to call func
                else:
                    inputs = args[0]
                    name = None
            if not (isinstance(inputs, (tf.Tensor, tf.Variable)) or
                    (isinstance(inputs, (list, tuple)) and
                        isinstance(inputs[0], (tf.Tensor, tf.Variable)))):
                raise ValueError("Invalid inputs to layer: " + str(inputs))

            # TODO use inspect.getcallargs to enhance?
            # update from current argument scope
            actual_args = copy.copy(get_arg_scope()[func.__name__])
            actual_args.update(kwargs)

            if name is not None:        # use scope
                with tf.variable_scope(name) as scope:
                    scope_name = re.sub('tower[0-9]+/', '', scope.name)
                    do_log_shape = log_shape and scope_name not in _LAYER_LOGGED
                    if do_log_shape:
                        logger.info("{} input: {}".format(scope.name, get_shape_str(inputs)))

                    # run the actual function
                    outputs = func(*args, **actual_args)

                    if do_log_shape:
                        # log shape info and add activation
                        logger.info("{} output: {}".format(
                            scope.name, get_shape_str(outputs)))
                        _LAYER_LOGGED.add(scope_name)
            else:
                # run the actual function
                outputs = func(*args, **actual_args)
            return outputs

        wrapped_func.symbolic_function = func   # attribute to access the underlying function object
        wrapped_func.use_scope = use_scope
        _register(func.__name__, wrapped_func)
        return wrapped_func

    # need some special handling for sphinx to work with the arguments
    if building_rtfd():
        from decorator import decorator
        wrapper = decorator(wrapper)

    return wrapper
Пример #44
0
 def real_decorator(function):
     def wrapper(function, *args, **kwargs):
         with tmp(path, teardown=teardown):
             return function(*args, **kwargs)
     return decorator.decorator(wrapper, function)
Пример #45
0
 def mark(func, *a, **kw):
     record_magic(magics, magic_kind, name, func.__name__)
     return decorator(call, func)
Пример #46
0
 def mark(func, *a, **kw):
     ip.register_magic_function(func, magic_kind, name)
     return decorator(call, func)
Пример #47
0
def https(*redirect_args, **redirect_kwargs):
    """Decorator to redirect to the SSL version of a page if not
    currently using HTTPS. Apply this decorator to controller methods
    (actions).

    Takes a url argument: either a string url, or a callable returning a
    string url. The callable will be called with no arguments when the
    decorated method is called. The url's scheme will be rewritten to
    https if necessary.

    Non-HTTPS POST requests are aborted (405 response code) by this
    decorator.

    Example:

    .. code-block:: python

        # redirect to HTTPS /pylons
        @https('/pylons')
        def index(self):
            do_secure()

        # redirect to HTTPS /auth/login, delaying the url() call until
        # later (as the url object may not be functional when the
        # decorator/method are defined)
        @https(lambda: url(controller='auth', action='login'))
        def login(self):
            do_secure()

        # redirect to HTTPS version of myself
        @https()
        def get(self):
            do_secure()

    .. warning::

        Arguments as would be passed to the
        :func:`url_for`/:func:`redirect_to` functions are
        deprecated. Explicitly specify the url or a callable returning
        the url instead.

    """
    def wrapper(func, *args, **kwargs):
        """Decorator Wrapper function"""
        request = get_pylons(args).request
        if request.scheme.lower() == 'https':
            return func(*args, **kwargs)
        if request.method.upper() == 'POST':
            # don't allow POSTs (raises an exception)
            abort(405, headers=[('Allow', 'GET')])

        if redirect_kwargs:
            # XXX: not a foolproof check for url_for arguments, but the
            # best we can do
            import warnings
            from routes import url_for
            warnings.warn(
                'Calling https with url_for args is deprecated, use '
                'https(lambda: url(*args, **kwargs)) instead',
                DeprecationWarning, 2)
            redirect_kwargs['protocol'] = 'https'
            url = url_for(*redirect_args, **redirect_kwargs)
        elif not redirect_args:
            url = request.url
        else:
            url = redirect_args[0]
            if callable(url):
                url = url()
        # Ensure an https scheme, which also needs a host
        parts = urlparse.urlparse(url)
        url = urlparse.urlunparse(('https', parts[1] or request.host) +
                                  parts[2:])

        log.debug('Redirecting non-https request: %s to: %s',
                  request.path_info, url)
        redirect(url)

    return decorator(wrapper)
Пример #48
0
def beaker_cache(key="cache_default",
                 expire="never",
                 type=None,
                 query_args=False,
                 cache_headers=('content-type', 'content-length'),
                 invalidate_on_startup=False,
                 cache_response=True,
                 **b_kwargs):
    """Cache decorator utilizing Beaker. Caches action or other
    function that returns a pickle-able object as a result.

    Optional arguments:

    ``key``
        None - No variable key, uses function name as key
        "cache_default" - Uses all function arguments as the key
        string - Use kwargs[key] as key
        list - Use [kwargs[k] for k in list] as key
    ``expire``
        Time in seconds before cache expires, or the string "never".
        Defaults to "never"
    ``type``
        Type of cache to use: dbm, memory, file, memcached, or None for
        Beaker's default
    ``query_args``
        Uses the query arguments as the key, defaults to False
    ``cache_headers``
        A tuple of header names indicating response headers that
        will also be cached.
    ``invalidate_on_startup``
        If True, the cache will be invalidated each time the application
        starts or is restarted.
    ``cache_response``
        Determines whether the response at the time beaker_cache is used
        should be cached or not, defaults to True.

        .. note::
            When cache_response is set to False, the cache_headers
            argument is ignored as none of the response is cached.

    If cache_enabled is set to False in the .ini file, then cache is
    disabled globally.

    """
    if invalidate_on_startup:
        starttime = time.time()
    else:
        starttime = None
    cache_headers = set(cache_headers)

    def wrapper(func, *args, **kwargs):
        """Decorator wrapper"""
        pylons = get_pylons(args)
        log.debug("Wrapped with key: %s, expire: %s, type: %s, query_args: %s",
                  key, expire, type, query_args)
        enabled = pylons.config.get("cache_enabled", "True")
        if not asbool(enabled):
            log.debug("Caching disabled, skipping cache lookup")
            return func(*args, **kwargs)

        if key:
            key_dict = kwargs.copy()
            key_dict.update(_make_dict_from_args(func, args))

            ## FIXME: if we can stop there variables from being passed to the
            # controller action (also the Genshi Markup/pickle problem is
            # fixed, see below) then we can use the stock beaker_cache.
            # Remove some system variables that can cause issues while generating cache keys
            [
                key_dict.pop(x, None)
                for x in ("pylons", "start_response", "environ")
            ]

            if query_args:
                key_dict.update(pylons.request.GET.mixed())

            if key != "cache_default":
                if isinstance(key, list):
                    key_dict = dict((k, key_dict[k]) for k in key)
                else:
                    key_dict = {key: key_dict[key]}
        else:
            key_dict = None

        self = None
        if args:
            self = args[0]
        namespace, cache_key = create_cache_key(func, key_dict, self)

        if type:
            b_kwargs['type'] = type

        cache_obj = getattr(pylons.app_globals, 'cache', None)
        if not cache_obj:
            cache_obj = getattr(pylons, 'cache', None)
        if not cache_obj:
            raise Exception('No cache object found')
        my_cache = cache_obj.get_cache(namespace, **b_kwargs)

        if expire == "never":
            cache_expire = None
        else:
            cache_expire = expire

        def create_func():
            log.debug("Creating new cache copy with key: %s, type: %s",
                      cache_key, type)
            result = func(*args, **kwargs)
            # This is one of the two changes to the stock beaker_cache
            # decorator
            if hasattr(result, '__html__'):
                # Genshi Markup object, can not be pickled
                result = unicode(result.__html__())
            glob_response = pylons.response
            headers = glob_response.headerlist
            status = glob_response.status
            full_response = dict(headers=headers,
                                 status=status,
                                 cookies=None,
                                 content=result)
            return full_response

        response = my_cache.get_value(cache_key,
                                      createfunc=create_func,
                                      expiretime=cache_expire,
                                      starttime=starttime)
        if cache_response:
            glob_response = pylons.response
            glob_response.headerlist = [
                header for header in response['headers']
                if header[0].lower() in cache_headers
            ]
            glob_response.status = response['status']

        return response['content']

    return decorator(wrapper)
Пример #49
0
                authfail_callback(action_id=action_id)

            if authfail_exception is not None:
                try:
                    af_exc = authfail_exception(action_id=action_id)
                except:
                    af_exc = authfail_exception()
                raise af_exc

            if authfail_result is AUTHFAIL_DONTCATCH:
                raise

            return authfail_result

    if func is not None:
        return decorator(_enable_proxy, func)
    else:

        def decorate(func):
            return decorator(_enable_proxy, func)

        return decorate


class NotAuthorizedException(dbus.DBusException):
    """Exception which a DBus service method throws if an authorization
    required for executing it can't be obtained."""

    _dbus_error_name = \
        "org.fedoraproject.slip.dbus.service.PolKit.NotAuthorizedException"
Пример #50
0
def logged_api_call(func):
    """
    Function decorator that causes the decorated API function or method to log
    calls to itself to a logger.

    The logger's name is the dotted module name of the module defining the
    decorated function (e.g. 'zhmcclient._cpc').

    Parameters:

      func (function object): The original function being decorated.

    Returns:

      function object: The function wrappering the original function being
        decorated.

    Raises:

      TypeError: The @logged_api_call decorator must be used on a function or
        method (and not on top of the @property decorator).
    """

    # Note that in this decorator function, we are in a module loading context,
    # where the decorated functions are being defined. When this decorator
    # function is called, its call stack represents the definition of the
    # decorated functions. Not all global definitions in the module have been
    # defined yet, and methods of classes that are decorated with this
    # decorator are still functions at this point (and not yet methods).

    module = inspect.getmodule(func)
    if not inspect.isfunction(func) or not hasattr(module, '__name__'):
        raise TypeError("The @logged_api_call decorator must be used on a "
                        "function or method (and not on top of the @property "
                        "decorator)")

    try:
        # We avoid the use of inspect.getouterframes() because it is slow,
        # and use the pointers up the stack frame, instead.

        this_frame = inspect.currentframe()  # this decorator function here
        apifunc_frame = this_frame.f_back  # the decorated API function

        apifunc_owner = inspect.getframeinfo(apifunc_frame)[2]

    finally:
        # Recommended way to deal with frame objects to avoid ref cycles
        del this_frame
        del apifunc_frame

    # TODO: For inner functions, show all outer levels instead of just one.

    if apifunc_owner == '<module>':
        # The decorated API function is defined globally (at module level)
        apifunc_str = '{func}()'.format(func=func.__name__)
    else:
        # The decorated API function is defined in a class or in a function
        apifunc_str = '{owner}.{func}()'.format(owner=apifunc_owner,
                                                func=func.__name__)

    logger = get_logger(API_LOGGER_NAME)

    def is_external_call():
        """
        Return a boolean indicating whether the call to the decorated API
        function is an external call (vs. b eing an internal call).
        """
        try:
            # We avoid the use of inspect.getouterframes() because it is slow,
            # and use the pointers up the stack frame, instead.

            log_it_frame = inspect.currentframe()  # this log_it() function
            log_api_call_frame = log_it_frame.f_back  # the log_api_call() func
            apifunc_frame = log_api_call_frame.f_back  # the decorated API func
            apicaller_frame = apifunc_frame.f_back  # caller of API function
            apicaller_module = inspect.getmodule(apicaller_frame)
            if apicaller_module is None:
                apicaller_module_name = "<unknown>"
            else:
                apicaller_module_name = apicaller_module.__name__
        finally:
            # Recommended way to deal with frame objects to avoid ref cycles
            del log_it_frame
            del log_api_call_frame
            del apifunc_frame
            del apicaller_frame
            del apicaller_module

        # Log only if the caller is not from the zhmcclient package
        return apicaller_module_name.split('.')[0] != 'zhmcclient'

    def log_api_call(func, *args, **kwargs):
        """
        Log entry to and exit from the decorated function, at the debug level.

        Note that this wrapper function is called every time the decorated
        function/method is called, but that the log message only needs to be
        constructed when logging for this logger and for this log level is
        turned on. Therefore, we do as much as possible in the decorator
        function, plus we use %-formatting and lazy interpolation provided by
        the log functions, in order to save resources in this function here.

        Parameters:

          func (function object): The decorated function.

          *args: Any positional arguments for the decorated function.

          **kwargs: Any keyword arguments for the decorated function.
        """

        # Note that in this function, we are in the context where the
        # decorated function is actually called.

        _log_it = is_external_call() and logger.isEnabledFor(logging.DEBUG)

        if _log_it:
            logger.debug("Called: {}, args: {:.500}, kwargs: {:.500}".format(
                apifunc_str, log_escaped(repr(args)),
                log_escaped(repr(kwargs))))

        result = func(*args, **kwargs)

        if _log_it:
            logger.debug("Return: {}, result: {:.1000}".format(
                apifunc_str, log_escaped(repr(result))))

        return result

    if 'decorate' in globals():
        return decorate(func, log_api_call)
    else:
        return decorator(log_api_call, func)
Пример #51
0
def contracts_decorate(function_, modify_docstring=True, **kwargs):
    """ An explicit way to decorate a given function.
        The decorator :py:func:`decorate` calls this function internally.
    """

    if isinstance(function_, classmethod):
        msg = """
The function is a classmethod; PyContracts cannot decorate a classmethod. 
You can, however, first decorate a function and then turn it into a
classmethod.

For example, instead of doing this:

    class A():
    
        @contract(a='>0')
        @classmethod
        def f(cls, a):
            pass

you can achieve the same goal by inverting the two decorators:

    class A():
    
        @classmethod
        @contract(a='>0')
        def f(cls, a):
            pass
"""
        raise CannotDecorateClassmethods(msg)

    all_args = get_all_arg_names(function_)

    if kwargs:

        returns = kwargs.pop('returns', None)

        for kw in kwargs:
            if not kw in all_args:
                msg = 'Unknown parameter %r; I know %r.' % (kw, all_args)
                raise ContractException(msg)

        accepts_dict = dict(**kwargs)

    else:
        # Py3k: check if there are annotations
        annotations = get_annotations(function_)

        if annotations:
            if 'return' in annotations:
                returns = annotations['return']
                del annotations['return']
            else:
                returns = None

            accepts_dict = annotations
        else:
            # Last resort: get types from documentation string.
            if function_.__doc__ is None:
                # XXX: change name
                raise ContractException(
                    'You did not specify a contract, nor I can '
                    'find a docstring for %r.' % function_)

            accepts_dict, returns = parse_contracts_from_docstring(function_)

            if not accepts_dict and not returns:
                raise ContractException('No contract specified in docstring.')

    if returns is None:
        returns_parsed = None
    else:
        returns_parsed = parse_flexible_spec(returns)

    accepts_parsed = dict([(x, parse_flexible_spec(accepts_dict[x]))
                           for x in accepts_dict])

    is_bound_method = 'self' in all_args

    def contracts_checker(unused, *args, **kwargs):
        do_checks = not all_disabled()
        if not do_checks:
            return function_(*args, **kwargs)

        def get_nice_function_display():
            nice_function_display = '%s()' % function_.__name__
            if is_bound_method:
                klass = type(args[0]).__name__
                nice_function_display = klass + ':' + nice_function_display
            return nice_function_display

        bound = getcallargs(function_, *args, **kwargs)

        context = {}
        # add self if we are a bound method
        if is_bound_method:
            context['self'] = args[0]

        for arg in all_args:
            if arg in accepts_parsed:
                try:
                    accepts_parsed[arg]._check_contract(context, bound[arg])
                except ContractNotRespected as e:
                    msg = ('Breach for argument %r to %s.\n'
                           % (arg, get_nice_function_display()))
                    e.error = msg + e.error
                    raise e

        result = function_(*args, **kwargs)

        if returns_parsed is not None:
            try:
                returns_parsed._check_contract(context, result)
            except ContractNotRespected as e:
                msg = ('Breach for return value of %s.\n'
                       % (get_nice_function_display()))
                e.error = msg + e.error
                raise e

        return result

    # TODO: add rtype statements if missing

    if modify_docstring:
        def write_contract_as_rst(c):
            return '``%s``' % c

        if function_.__doc__ is not None:
            docs = DocStringInfo.parse(function_.__doc__)
        else:
            docs = DocStringInfo("")
        for param in accepts_parsed:
            if not param in docs.params:
                # default = '*not documented*'
                default = ''
                docs.params[param] = Arg(default, None)

            docs.params[param].type = \
                write_contract_as_rst(accepts_parsed[param])

        if returns_parsed is not None:
            if not docs.returns:
                docs.returns.append(Arg(None, None))
            docs.returns[0].type = write_contract_as_rst(returns_parsed)
        new_docs = docs.__str__()

    else:
        new_docs = function_.__doc__

    # XXX: why doesn't this work?
    contracts_checker.__name__ = 'checker-for-%s' % function_.__name__
    contracts_checker.__module__ = function_.__module__

    # TODO: is using functools.wraps better?
    from decorator import decorator

    wrapper = decorator(contracts_checker, function_)

    wrapper.__doc__ = new_docs
    wrapper.__name__ = function_.__name__
    wrapper.__module__ = function_.__module__

    wrapper.__contracts__ = dict(returns=returns_parsed, **accepts_parsed)
    return wrapper
Пример #52
0
 def decorate(func):
     return decorator(_enable_proxy, func)
Пример #53
0
 def sage_method(function):
     return decorator.decorator(_sage_method, function)
def async_db_test(func):
    def inner(func, *args, **kwargs):
        return async_to_sync(func)(*args, **kwargs)

    return decorator.decorator(inner, func)
Пример #55
0
def memoize(f):
    f.cache = {}
    return decorator(_memoize, f)
Пример #56
0
def timelag_filter(function, pad=True, index=0):
    """Filtering in the time-lag domain.

    This is primarily useful for adapting image filters to operate on
    `recurrence_to_lag` output.

    Using `timelag_filter` is equivalent to the following sequence of
    operations:

    >>> data_tl = librosa.segment.recurrence_to_lag(data)
    >>> data_filtered_tl = function(data_tl)
    >>> data_filtered = librosa.segment.lag_to_recurrence(data_filtered_tl)

    Parameters
    ----------
    function : callable
        The filtering function to wrap, e.g., `scipy.ndimage.median_filter`

    pad : bool
        Whether to zero-pad the structure feature matrix

    index : int >= 0
        If ``function`` accepts input data as a positional argument, it should be
        indexed by ``index``


    Returns
    -------
    wrapped_function : callable
        A new filter function which applies in time-lag space rather than
        time-time space.


    Examples
    --------

    Apply a 31-bin median filter to the diagonal of a recurrence matrix.
    With default, parameters, this corresponds to a time window of about
    0.72 seconds.

    >>> y, sr = librosa.load(librosa.ex('nutcracker'), duration=30)
    >>> chroma = librosa.feature.chroma_cqt(y=y, sr=sr)
    >>> chroma_stack = librosa.feature.stack_memory(chroma, n_steps=3, delay=3)
    >>> rec = librosa.segment.recurrence_matrix(chroma_stack)
    >>> from scipy.ndimage import median_filter
    >>> diagonal_median = librosa.segment.timelag_filter(median_filter)
    >>> rec_filtered = diagonal_median(rec, size=(1, 31), mode='mirror')

    Or with affinity weights

    >>> rec_aff = librosa.segment.recurrence_matrix(chroma_stack, mode='affinity')
    >>> rec_aff_fil = diagonal_median(rec_aff, size=(1, 31), mode='mirror')

    >>> import matplotlib.pyplot as plt
    >>> fig, ax = plt.subplots(nrows=2, ncols=2, sharex=True, sharey=True)
    >>> librosa.display.specshow(rec, y_axis='s', x_axis='s', ax=ax[0, 0])
    >>> ax[0, 0].set(title='Raw recurrence matrix')
    >>> ax[0, 0].label_outer()
    >>> librosa.display.specshow(rec_filtered, y_axis='s', x_axis='s', ax=ax[0, 1])
    >>> ax[0, 1].set(title='Filtered recurrence matrix')
    >>> ax[0, 1].label_outer()
    >>> librosa.display.specshow(rec_aff, x_axis='s', y_axis='s',
    ...                          cmap='magma_r', ax=ax[1, 0])
    >>> ax[1, 0].set(title='Raw affinity matrix')
    >>> librosa.display.specshow(rec_aff_fil, x_axis='s', y_axis='s',
    ...                          cmap='magma_r', ax=ax[1, 1])
    >>> ax[1, 1].set(title='Filtered affinity matrix')
    >>> ax[1, 1].label_outer()
    """
    def __my_filter(wrapped_f, *args, **kwargs):
        """Decorator to wrap the filter"""
        # Map the input data into time-lag space
        args = list(args)

        args[index] = recurrence_to_lag(args[index], pad=pad)

        # Apply the filtering function
        result = wrapped_f(*args, **kwargs)

        # Map back into time-time and return
        return lag_to_recurrence(result)

    return decorator(__my_filter, function)
Пример #57
0
def profile(method=None, whitelist=None, blacklist=(None,), files=None,
        minimum_time=0, minimum_queries=0):
    """
        Decorate an entry point method.
        If profile is used without params, log as shallow mode else, log
        all methods for all odoo models by applying the optional filters.

        :param whitelist: None or list of model names to display in the log
                        (Default: None)
        :type whitelist: list or None
        :param files: None or list of filenames to display in the log
                        (Default: None)
        :type files: list or None
        :param list blacklist: list model names to remove from the log
                        (Default: remove non odoo model from the log: [None])
        :param int minimum_time: minimum time (ms) to display a method
                        (Default: 0)
        :param int minimum_queries: minimum sql queries to display a method
                        (Default: 0)
        
        .. code-block:: python

          from odoo.tools.profiler import profile

          class SaleOrder(models.Model):
            ...

            @api.model
            @profile                    # log only this create method
            def create(self, vals):
            ...
            @api.multi
            @profile()                  # log all methods for all odoo models
            def unlink(self):
            ...
            @profile(whitelist=['sale.order', 'ir.model.data'])
            def action_quotation_send(self):
            ...
            @profile(files=['/home/openerp/odoo/odoo/addons/sale/models/sale.py'])
            def write(self):
            ...

        NB: The use of the profiler modifies the execution time
    """

    deep = not method

    def _odooProfile(method, *args, **kwargs):
        log_tracer = _LogTracer(whitelist=whitelist, blacklist=blacklist, files=files, deep=deep)
        sys.settrace(log_tracer.tracer)
        try:
            result = method(*args, **kwargs)
        finally:
            sys.settrace(None)

        log = ["\n%-10s%-10s%s\n" % ('calls', 'queries', 'ms')]

        for v in log_tracer.profiles.values():
            v['report'] = {}
            l = len(v['calls'])
            for k, call in enumerate(v['calls']):
                if k+1 >= l:
                    continue

                if call['lineno'] not in v['report']:
                    v['report'][call['lineno']] = {
                        'nb_queries': 0,
                        'delay': 0,
                        'nb': 0,
                    }
                v['report'][call['lineno']]['nb'] += 1

                n = k+1
                while k+1 <= l and v['calls'][k+1]['callno'] != call['callno']:
                    n += 1
                if n >= l:
                    continue
                next_call = v['calls'][n]
                if next_call['queries'] is not None:
                    v['report'][call['lineno']]['nb_queries'] += next_call['queries'] - call.get('queries', 0)
                v['report'][call['lineno']]['delay'] += next_call['time'] - call['time']

            queries = 0
            delay = 0
            for call in v['report'].values():
                queries += call['nb_queries']
                delay += call['delay']

            if minimum_time and minimum_time > delay*1000:
                continue
            if minimum_queries and minimum_queries > queries:
                continue

            # todo: no color if output in a file
            log.append("\033[1;33m%s %s--------------------- %s, %s\033[1;0m\n\n" % (v['model'] or '', '-' * (15-len(v['model'] or '')), v['filename'], v['firstline']))
            for lineno, line in enumerate(v['code']):
                if (lineno + v['firstline']) in v['report']:
                    data = v['report'][lineno + v['firstline']]
                    log.append("%-10s%-10s%-10s%s" % (
                        str(data['nb']) if 'nb_queries' in data else '.',
                        str(data.get('nb_queries', '')),
                        str(round(data['delay']*100000)/100) if 'delay' in data else '',
                        line[:-1]))
                else:
                    log.append(" " * 30)
                    log.append(line[:-1])
                log.append('\n')

            log.append("\nTotal:\n%-10s%-10d%-10s\n\n" % (
                        str(data['nb']),
                        queries,
                        str(round(delay*100000)/100)))

        _logger.info(''.join(log))

        return result

    if not method:
        return lambda method: decorator(_odooProfile, method)

    wrapper = decorator(_odooProfile, method)
    return wrapper
Пример #58
0
 def decorate_func(f):
     if args:
         f.__blazeweb_task_attrs = args
     return decorator(_attributes, f)
Пример #59
0
 def _rewrite_wrapper_signature(self, wrapper):
     # Convert the callable's signature into the wrapper's signature and set
     # it on the wrapper.
     return decorator.decorator(
         wrapper, self._callable_sig_converter_(self._callable))
Пример #60
0
def layer_register(
        summary_activation=False,
        log_shape=True,
        use_scope=True):
    """
    Register a layer.

    Args:
        summary_activation (bool): Define the default behavior of whether to
            summary the output(activation) of this layer.
            Can be overriden when creating the layer.
        log_shape (bool): log input/output shape of this layer
        use_scope (bool): whether to call this layer with an extra first argument as scope.
            If set to False, will try to figure out whether the first argument
            is scope name or not.
    """

    def wrapper(func):
        @wraps(func)
        def wrapped_func(*args, **kwargs):
            if use_scope:
                name, inputs = args[0], args[1]
                args = args[1:]  # actual positional args used to call func
                assert isinstance(name, six.string_types), name
            else:
                assert not log_shape and not summary_activation
                if isinstance(args[0], six.string_types):
                    name, inputs = args[0], args[1]
                    args = args[1:]  # actual positional args used to call func
                else:
                    inputs = args[0]
                    name = None
            if not (isinstance(inputs, (tf.Tensor, tf.Variable)) or
                    (isinstance(inputs, (list, tuple)) and
                        isinstance(inputs[0], (tf.Tensor, tf.Variable)))):
                raise ValueError("Invalid inputs to layer: " + str(inputs))
            do_summary = kwargs.pop(
                'summary_activation', summary_activation)

            # TODO use inspect.getcallargs to enhance?
            # update from current argument scope
            actual_args = copy.copy(get_arg_scope()[func.__name__])
            actual_args.update(kwargs)

            if name is not None:
                with tf.variable_scope(name) as scope:
                    do_log_shape = log_shape and scope.name not in _layer_logged
                    do_summary = do_summary and scope.name not in _layer_logged
                    if do_log_shape:
                        logger.info("{} input: {}".format(scope.name, get_shape_str(inputs)))

                    # run the actual function
                    outputs = func(*args, **actual_args)

                    if do_log_shape:
                        # log shape info and add activation
                        logger.info("{} output: {}".format(
                            scope.name, get_shape_str(outputs)))
                        _layer_logged.add(scope.name)

                    if do_summary:
                        if isinstance(outputs, list):
                            for x in outputs:
                                add_activation_summary(x, scope.name)
                        else:
                            add_activation_summary(outputs, scope.name)
            else:
                # run the actual function
                outputs = func(*args, **actual_args)
            return outputs

        wrapped_func.f = func   # attribute to access the underlining function object
        wrapped_func.use_scope = use_scope
        return wrapped_func

    # need some special handling for sphinx to work with the arguments
    if building_rtfd():
        from decorator import decorator
        wrapper = decorator(wrapper)

    return wrapper