def prepare_cache():
    register_backend("dictionary", "client.services.simple_cache",
                     "DictionaryBackend")

    region = make_region("myregion")
    region.configure("dictionary", wrap=[LoggingProxy])

    return region
Example #2
0
File: cache.py Project: 0x15/weasyl
    def set(self, key, value):
        try:
            self.client.async_operation('set', key, value)
        except MEMCACHED_FAILURE_EXCEPTIONS:
            pass

    def set_multi(self, items):
        try:
            self.client.async_operation('setMultiple', items)
        except MEMCACHED_FAILURE_EXCEPTIONS:
            pass

    def delete(self, key):
        try:
            self.client.async_operation('delete', key)
        except MEMCACHED_FAILURE_EXCEPTIONS:
            pass

    def delete_multi(self, keys):
        try:
            self.client.async_operation('deleteMultiple', keys)
        except MEMCACHED_FAILURE_EXCEPTIONS:
            pass


register_backend('txyam', 'weasyl.cache', 'YamBackend')


__all__ = ['region']
Example #3
0
    def release(self):
        return

class MockBackend(CacheBackend):
    def __init__(self, arguments):
        self.arguments = arguments
        self._cache = {}
    def get_mutex(self, key):
        return MockMutex(key)
    def get(self, key):
        try:
            return self._cache[key]
        except KeyError:
            return NO_VALUE
    def get_multi(self, keys):
        return [
            self.get(key) for key in keys
        ]

    def set(self, key, value):
        self._cache[key] = value
    def set_multi(self, mapping):
        for key,value in mapping.items():
            self.set(key, value)
    def delete(self, key):
        self._cache.pop(key, None)
    def delete_multi(self, keys):
        for key in keys:
            self.delete(key)
register_backend("mock", __name__, "MockBackend")
Example #4
0
def includeme(config: pyramid.config.Configurator):
    """
    This function returns a Pyramid WSGI application.
    """

    settings = config.get_settings()

    config.include("c2cgeoportal_commons")

    call_hook(settings, "after_settings", settings)

    get_user_from_request = create_get_user_from_request(settings)
    config.add_request_method(get_user_from_request,
                              name="user",
                              property=True)
    config.add_request_method(get_user_from_request, name="get_user")

    # Configure 'locale' dir as the translation dir for c2cgeoportal app
    config.add_translation_dirs("c2cgeoportal_geoportal:locale/")

    config.include("c2cwsgiutils.pyramid.includeme")
    health_check = HealthCheck(config)
    config.registry["health_check"] = health_check

    metrics_config = config.registry.settings["metrics"]
    if metrics_config["memory_maps_rss"]:
        add_provider(MemoryMapProvider("rss"))
    if metrics_config["memory_maps_size"]:
        add_provider(MemoryMapProvider("size"))
    if metrics_config["memory_cache"]:
        add_provider(
            MemoryCacheSizeProvider(
                metrics_config.get("memory_cache_all", False)))
    if metrics_config["raster_data"]:
        add_provider(RasterDataSizeProvider())
    if metrics_config["total_python_object_memory"]:
        add_provider(TotalPythonObjectMemoryProvider())

    # Initialise DBSessions
    init_dbsessions(settings, config, health_check)

    checker.init(config, health_check)
    check_collector.init(config, health_check)

    # dogpile.cache configuration
    if "cache" in settings:
        register_backend("c2cgeoportal.hybrid",
                         "c2cgeoportal_geoportal.lib.caching", "HybridBackend")
        for name, cache_config in settings["cache"].items():
            caching.init_region(cache_config, name)

            @zope.event.classhandler.handler(InvalidateCacheEvent)
            def handle(event: InvalidateCacheEvent):  # pylint: disable=unused-variable
                del event
                caching.invalidate_region()
                if caching.MEMORY_CACHE_DICT:
                    caching.get_region("std").delete_multi(
                        caching.MEMORY_CACHE_DICT.keys())
                caching.MEMORY_CACHE_DICT.clear()

    # Register a tween to get back the cache buster path.
    if "cache_path" not in config.get_settings():
        config.get_settings()["cache_path"] = ["static"]
    config.add_tween(
        "c2cgeoportal_geoportal.lib.cacheversion.CachebusterTween")
    config.add_tween("c2cgeoportal_geoportal.lib.headers.HeadersTween")

    # Bind the mako renderer to other file extensions
    add_mako_renderer(config, ".html")
    add_mako_renderer(config, ".js")

    # Add the "geojson" renderer
    config.add_renderer("geojson", GeoJSON())

    # Add the "xsd" renderer
    config.add_renderer("xsd", XSD(include_foreign_keys=True))

    # Add the set_user_validator directive, and set a default user validator
    config.add_directive("set_user_validator", set_user_validator)
    config.set_user_validator(default_user_validator)

    config.add_route("dynamic", "/dynamic.json", request_method="GET")

    # Add routes to the mapserver proxy
    config.add_route_predicate("mapserverproxy", MapserverproxyRoutePredicate)
    config.add_route(
        "mapserverproxy",
        "/mapserv_proxy",
        mapserverproxy=True,
        pregenerator=C2CPregenerator(role=True),
        request_method="GET",
    )
    config.add_route(
        "mapserverproxy_post",
        "/mapserv_proxy",
        mapserverproxy=True,
        pregenerator=C2CPregenerator(role=True),
        request_method="POST",
    )
    add_cors_route(config, "/mapserv_proxy", "mapserver")

    # Add route to the tinyows proxy
    config.add_route("tinyowsproxy",
                     "/tinyows_proxy",
                     pregenerator=C2CPregenerator(role=True))

    # Add routes to the entry view class
    config.add_route("base", "/", static=True)
    config.add_route("loginform", "/login.html", request_method="GET")
    add_cors_route(config, "/login", "login")
    config.add_route("login", "/login", request_method="POST")
    add_cors_route(config, "/logout", "login")
    config.add_route("logout", "/logout", request_method="GET")
    add_cors_route(config, "/loginchangepassword", "login")
    config.add_route("change_password",
                     "/loginchangepassword",
                     request_method="POST")
    add_cors_route(config, "/loginresetpassword", "login")
    config.add_route("loginresetpassword",
                     "/loginresetpassword",
                     request_method="POST")
    add_cors_route(config, "/loginuser", "login")
    config.add_route("loginuser", "/loginuser", request_method="GET")
    config.add_route("testi18n", "/testi18n.html", request_method="GET")

    config.add_renderer(".map", AssetRendererFactory)
    config.add_renderer(".css", AssetRendererFactory)
    config.add_renderer(".ico", AssetRendererFactory)
    config.add_route("localejson", "/locale.json", request_method="GET")

    def add_api_route(name: str, attr: str, path: str, renderer: str):
        config.add_route(name, path, request_method="GET")
        config.add_view(Entry, attr=attr, route_name=name, renderer=renderer)

    add_api_route("favicon", "favicon", "/favicon.ico",
                  "/etc/geomapfish/static/images/favicon.ico")
    add_api_route("apijs", "apijs", "/api.js", "/etc/static-ngeo/api.js")
    add_api_route("apijsmap", "apijsmap", "/api.js.map",
                  "/etc/static-ngeo/api.js.map")
    add_api_route("apicss", "apicss", "/api.css", "/etc/static-ngeo/api.css")
    add_api_route("apihelp", "apihelp", "/apihelp/index.html",
                  "/etc/geomapfish/static/apihelp/index.html")
    c2cgeoportal_geoportal.views.add_redirect(config, "apihelp_redirect",
                                              "/apihelp.html", "apihelp")

    config.add_route("themes",
                     "/themes",
                     request_method="GET",
                     pregenerator=C2CPregenerator(role=True))

    config.add_route("invalidate", "/invalidate", request_method="GET")

    # Print proxy routes
    config.add_route("printproxy", "/printproxy", request_method="HEAD")
    add_cors_route(config, "/printproxy/*all", "print")
    config.add_route(
        "printproxy_capabilities",
        "/printproxy/capabilities.json",
        request_method="GET",
        pregenerator=C2CPregenerator(role=True),
    )
    config.add_route(
        "printproxy_report_create",
        "/printproxy/report.{format}",
        request_method="POST",
        header=JSON_CONTENT_TYPE,
    )
    config.add_route("printproxy_status",
                     "/printproxy/status/{ref}.json",
                     request_method="GET")
    config.add_route("printproxy_cancel",
                     "/printproxy/cancel/{ref}",
                     request_method="DELETE")
    config.add_route("printproxy_report_get",
                     "/printproxy/report/{ref}",
                     request_method="GET")

    # Full-text search routes
    add_cors_route(config, "/search", "fulltextsearch")
    config.add_route("fulltextsearch", "/search", request_method="GET")

    # Access to raster data
    add_cors_route(config, "/raster", "raster")
    config.add_route("raster", "/raster", request_method="GET")

    add_cors_route(config, "/profile.json", "profile")
    config.add_route("profile.json", "/profile.json", request_method="POST")

    # Shortener
    add_cors_route(config, "/short/create", "shortener")
    config.add_route("shortener_create",
                     "/short/create",
                     request_method="POST")
    config.add_route("shortener_get", "/s/{ref}", request_method="GET")

    # Geometry processing
    config.add_route("difference", "/difference", request_method="POST")

    # PDF report tool
    config.add_route("pdfreport",
                     "/pdfreport/{layername}/{ids}",
                     request_method="GET")

    # Add routes for the "layers" web service
    add_cors_route(config, "/layers/*all", "layers")
    config.add_route("layers_count",
                     "/layers/{layer_id:\\d+}/count",
                     request_method="GET")
    config.add_route(
        "layers_metadata",
        "/layers/{layer_id:\\d+}/md.xsd",
        request_method="GET",
        pregenerator=C2CPregenerator(role=True),
    )
    config.add_route("layers_read_many",
                     "/layers/{layer_id:\\d+,?(\\d+,)*\\d*$}",
                     request_method="GET")  # supports URLs like /layers/1,2,3
    config.add_route("layers_read_one",
                     "/layers/{layer_id:\\d+}/{feature_id}",
                     request_method="GET")
    config.add_route("layers_create",
                     "/layers/{layer_id:\\d+}",
                     request_method="POST",
                     header=GEOJSON_CONTENT_TYPE)
    config.add_route(
        "layers_update",
        "/layers/{layer_id:\\d+}/{feature_id}",
        request_method="PUT",
        header=GEOJSON_CONTENT_TYPE,
    )
    config.add_route("layers_delete",
                     "/layers/{layer_id:\\d+}/{feature_id}",
                     request_method="DELETE")
    config.add_route(
        "layers_enumerate_attribute_values",
        "/layers/{layer_name}/values/{field_name}",
        request_method="GET",
        pregenerator=C2CPregenerator(),
    )
    # There is no view corresponding to that route, it is to be used from
    # mako templates to get the root of the "layers" web service
    config.add_route("layers_root", "/layers", request_method="HEAD")

    # Resource proxy (load external url, useful when loading non https content)
    config.add_route("resourceproxy", "/resourceproxy", request_method="GET")

    # Dev
    config.add_route("dev", "/dev/*path", request_method="GET")

    # Used memory in caches
    config.add_route("memory", "/memory", request_method="GET")

    # Scan view decorator for adding routes
    config.scan(ignore=[
        "c2cgeoportal_geoportal.lib",
        "c2cgeoportal_geoportal.scaffolds",
        "c2cgeoportal_geoportal.scripts",
    ])

    add_admin_interface(config)

    # Add the project static view with cache buster
    config.add_static_view(
        name="static",
        path="/etc/geomapfish/static",
        cache_max_age=int(config.get_settings()["default_max_age"]),
    )
    config.add_cache_buster("/etc/geomapfish/static", version_cache_buster)

    # Add the project static view without cache buster
    config.add_static_view(
        name="static-ngeo",
        path="/etc/static-ngeo",
        cache_max_age=int(config.get_settings()["default_max_age"]),
    )

    # Handles the other HTTP errors raised by the views. Without that,
    # the client receives a status=200 without content.
    config.add_view(error_handler, context=HTTPException)

    c2cwsgiutils.index.additional_title = (
        '<div class="row"><div class="col-lg-3"><h2>GeoMapFish</h2>'
        '</div><div class="col-lg">')
    c2cwsgiutils.index.additional_auth.extend([
        '<a href="../tiles/admin/">TileCloud chain admin</a><br>',
        '<a href="../tiles/c2c/">TileCloud chain c2c tools</a><br>',
        '<a href="../invalidate">Invalidate the cache</a><br>',
        '<a href="../memory">Memory status</a><br>',
    ])
    if config.get_settings().get("enable_admin_interface", False):
        c2cwsgiutils.index.additional_noauth.append(
            '<a href="../admin/">Admin</a><br>')

    c2cwsgiutils.index.additional_noauth.append(
        '</div></div><div class="row"><div class="col-lg-3"><h3>Interfaces</h3></div><div class="col-lg">'
    )
    c2cwsgiutils.index.additional_noauth.append(
        '<a href="../">Default</a><br>')
    for interface in config.get_settings().get("interfaces", []):
        if not interface.get("default", False):
            c2cwsgiutils.index.additional_noauth.append(
                '<a href="../{interface}">{interface}</a><br>'.format(
                    interface=interface["name"]))
    c2cwsgiutils.index.additional_noauth.append(
        '<a href="../apihelp/index.html">API help</a><br>')
    c2cwsgiutils.index.additional_noauth.append("</div></div><hr>")
Example #5
0
                         .format(len(self._keylist), self._max_keys, self._size, self._max_size))

    def _enforce_limits(self, new_value):
        while len(self._keylist) > 0 and (not (len(self._keylist) <= self._max_keys)
                                          or self._max_size and self._size > self._max_size):
            self.logger.debug('shrinking limited file cache')
            key, size = self._keylist.popleft()
            self.delete(key)
            self._size -= size

    def set(self, key, value):
        self._enforce_limits(value)
        value = pymor.core.dumps(value)
        if not key in self._keylist:
            self._new_key(key, len(value))
        with self._dbm_file(True) as dbm:
            dbm[key] = value

    def delete(self, key):
        super(LimitedFileBackend, self).delete(key)
        try:
            #api says this method is supposed to be idempotent
            self._keylist.remove(key)
        except ValueError:
            pass
        self._dump_keylist()

dc.register_backend("LimitedMemory", "pymor.core.dogpile_backends", "LimitedMemoryBackend")
dc.register_backend("LimitedFile", "pymor.core.dogpile_backends", "LimitedFileBackend")
dc.register_backend("Dummy", "pymor.core.dogpile_backends", "DummyBackend")
Example #6
0
        return ret

    def set(self, key, value):
        try:
            self.client.async_operation('set', key, value)
        except MEMCACHED_FAILURE_EXCEPTIONS:
            pass

    def set_multi(self, items):
        try:
            self.client.async_operation('setMultiple', items)
        except MEMCACHED_FAILURE_EXCEPTIONS:
            pass

    def delete(self, key):
        try:
            self.client.async_operation('delete', key)
        except MEMCACHED_FAILURE_EXCEPTIONS:
            pass

    def delete_multi(self, keys):
        try:
            self.client.async_operation('deleteMultiple', keys)
        except MEMCACHED_FAILURE_EXCEPTIONS:
            pass


register_backend('txyam', 'weasyl.cache', 'YamBackend')

__all__ = ['region']
from dogpile.cache import register_backend

LRU_CACHE = "lru-cache"

register_backend(LRU_CACHE, "dogpile_lru.backends", "LruBackend")
Example #8
0
        for arg in arg_blacklist:
            kw.pop(arg, None)

        key = namespace + '|' + ' '.join(
            value_mangler(kw[k]) for k in sorted(kw))
        return key

    return generate_key


ReportRegion = make_region(function_key_generator=make_key_generator, )


# TODO: Remove, obsolete by dogpile.cache.null
class DisabledBackend(CacheBackend):
    def __init__(self, arguments):
        pass

    def get(self, key):
        return NO_VALUE

    def set(self, key, value):
        pass

    def delete(self, key):
        pass


register_backend("DisabledBackend", "briefmetrics.lib.cache",
                 "DisabledBackend")
Example #9
0
    """
    A memory backed cache for individual CherryPy requests.

    This provides a cache backend for dogpile.cache which is designed
    to work in a thread-safe manner using cherrypy.request, a thread local
    storage that only lasts for the duration of a request.
    """
    def __init__(self, arguments):
        pass

    @property
    def _cache(self):
        if not hasattr(cherrypy.request, '_girderCache'):
            setattr(cherrypy.request, '_girderCache', {})

        return cherrypy.request._girderCache


register_backend('cherrypy_request', 'girder.utility._cache', 'CherrypyRequestBackend')

# These caches must be configured with the null backend upon creation due to the fact
# that user-based configuration of the regions doesn't happen until server start, which
# doesn't occur when using Girder as a library.
cache = make_region(name='girder.cache').configure(backend='dogpile.cache.null')
requestCache = make_region(name='girder.request').configure(backend='dogpile.cache.null')

# This cache is not configurable by the user, and will always be configured when the server is.
# It holds data for rate limiting, which is ephemeral, but must be persisted (i.e. it's not optional
# or best-effort).
rateLimitBuffer = make_region(name='girder.rate_limit')
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from dogpile import cache

from google.api.auth import suppliers
from google.api.auth import tokens

cache.register_backend("lru_cache", "google.api.auth.caches", "LruBackend")


def create_authenticator(issuers_to_provider_ids, issuer_uri_configs):
    key_uri_supplier = suppliers.KeyUriSupplier(issuer_uri_configs)
    jwks_supplier = suppliers.JwksSupplier(key_uri_supplier)
    return tokens.Authenticator(issuers_to_provider_ids, jwks_supplier)
Example #11
0
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import

from dogpile import cache

from . import suppliers, tokens


cache.register_backend("lru_cache", "endpoints_management.auth.caches", "LruBackend")


def create_authenticator(issuers_to_provider_ids, issuer_uri_configs):
    key_uri_supplier = suppliers.KeyUriSupplier(issuer_uri_configs)
    jwks_supplier = suppliers.JwksSupplier(key_uri_supplier)
    return tokens.Authenticator(issuers_to_provider_ids, jwks_supplier)
            pattern = self.key_mangler(pattern)
        return self.client.keys(pattern)

    def pipeline(self):
        return self.client.pipeline()


def make_redis_region(app, prefix):
    expiration_time = app.config.setdefault('REDICA_DEFAULT_EXPIRE', 3600)
    key_mangler = functools.partial(_md5_key_mangler, prefix)
    redica_cache_url = app.config.get('REDICA_CACHE_URL')
    cfg = {
        'backend': 'extended_redis_backend',
        'expiration_time': expiration_time,
        'arguments': {
            'redis_expiration_time': expiration_time + 30,
            'key_mangler': key_mangler,
        }
    }
    if app.config.get('REDICA_CACHE_POOL_BLOCKING', True):
        cfg['arguments']['connection_pool'] = BlockingConnectionPool.from_url(
            redica_cache_url)
    else:
        cfg['arguments']['url'] = redica_cache_url

    return dict(default=make_region().configure(**cfg))


register_backend("extended_redis_backend", "flask_sqlalchemy_redica.redis",
                 "ExtendRedisBackend")
Example #13
0
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA

import logging
from dogpile.cache import register_backend

register_backend("dogpile.cache.rc.memory_lru",
                 "vcsserver.lib.rc_cache.backends", "LRUMemoryBackend")

register_backend("dogpile.cache.rc.file_namespace",
                 "vcsserver.lib.rc_cache.backends", "FileNamespaceBackend")

register_backend("dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
                 "RedisPickleBackend")

register_backend("dogpile.cache.rc.redis_msgpack",
                 "vcsserver.lib.rc_cache.backends", "RedisMsgPackBackend")

log = logging.getLogger(__name__)

from . import region_meta
from .utils import (get_default_cache_settings, backend_key_generator,
                    make_region)
Example #14
0
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from dogpile import cache

from google.api.auth import suppliers
from google.api.auth import tokens


cache.register_backend("lru_cache", "google.api.auth.caches", "LruBackend")


def create_authenticator(issuers_to_provider_ids, issuer_uri_configs):
  key_uri_supplier = suppliers.KeyUriSupplier(issuer_uri_configs)
  jwks_supplier = suppliers.JwksSupplier(key_uri_supplier)
  return tokens.Authenticator(issuers_to_provider_ids, jwks_supplier)
Example #15
0
    This provides a cache backend for dogpile.cache which is designed
    to work in a thread-safe manner using cherrypy.request, a thread local
    storage that only lasts for the duration of a request.
    """
    def __init__(self, arguments):
        pass

    @property
    def _cache(self):
        if not hasattr(cherrypy.request, '_girderCache'):
            cherrypy.request._girderCache = {}

        return cherrypy.request._girderCache


register_backend('cherrypy_request', 'girderformindlogger.utility._cache',
                 'CherrypyRequestBackend')

# These caches must be configured with the null backend upon creation due to the fact
# that user-based configuration of the regions doesn't happen until server start, which
# doesn't occur when using Girder as a library.
cache = make_region(name='girderformindlogger.cache').configure(
    backend='dogpile.cache.null')
requestCache = make_region(name='girderformindlogger.request').configure(
    backend='dogpile.cache.null')

# This cache is not configurable by the user, and will always be configured when the server is.
# It holds data for rate limiting, which is ephemeral, but must be persisted (i.e. it's not optional
# or best-effort).
rateLimitBuffer = make_region(name='girderformindlogger.rate_limit')
Example #16
0
        self._cache = {}

    def get_mutex(self, key):
        return MockMutex(key)

    def get(self, key):
        try:
            return self._cache[key]
        except KeyError:
            return NO_VALUE

    def get_multi(self, keys):
        return [self.get(key) for key in keys]

    def set(self, key, value):
        self._cache[key] = value

    def set_multi(self, mapping):
        for key, value in mapping.items():
            self.set(key, value)

    def delete(self, key):
        self._cache.pop(key, None)

    def delete_multi(self, keys):
        for key in keys:
            self.delete(key)


register_backend("mock", __name__, "MockBackend")
Example #17
0
from dogpile.cache import register_backend
from dogpile.cache.api import CacheBackend, NO_VALUE

from dorthy.security import crypto


def sha2_mangle_key(key):
    return crypto.secure_hash(key, crypto.SecureHashAlgorithms.SHA2)


class LRULocalBackend(CacheBackend):
    def __init__(self, arguments):
        maxsize = arguments.get("maxsize", 1024)
        ttl = arguments.get("ttl", None)
        if ttl:
            self.__cache = TTLCache(maxsize, ttl=ttl)
        else:
            self.__cache = LRUCache(maxsize)

    def get(self, key):
        return self.__cache.get(key, NO_VALUE)

    def set(self, key, value):
        self.__cache[key] = value

    def delete(self, key):
        del self.__cache[key]


register_backend("dorthy.cache.local.lru", "dorthy.cache", "LRULocalBackend")