示例#1
0
class Cache(object):
    """ Helpers on the top of pylibmc
    """

    implements(IAppCache)

    def __init__(self, **options):
        self.servers = [server.strip() for server in options["servers"].split(",")]
        self.prefix = options["prefix"]
        self._client = Client(self.servers)
        self.pool = ThreadMappedPool(self._client)
        # using a locker to avoid race conditions
        # when several clients for the same user
        # get/set the cached data
        self._locker = threading.RLock()

    def _key(self, *key):
        return ":".join([self.prefix] + list(key))

    def cleanup(self):
        self.pool.pop(thread.get_ident(), None)

    def flush_all(self):
        with self.pool.reserve() as mc:
            mc.flush_all()

    def get(self, key):
        key = self._key(key)

        with self.pool.reserve() as mc:
            try:
                return mc.get(key)
            except MemcachedError, err:
                # memcache seems down
                raise CacheError(str(err))
class CacheManager(object):
    """ Helpers on the top of pylibmc
    """
    def __init__(self, *args, **kw):
        self._client = Client(*args, **kw)
        self.pool = ThreadMappedPool(self._client)
        # using a locker to avoid race conditions
        # when several clients for the same user
        # get/set the cached data
        self._locker = threading.RLock()
        subscribe(REQUEST_ENDS, self._cleanup_pool)

    @property
    def logger(self):
        # Store the result on the object itself. This will prevent the
        # @property descriptor from firing on every call.
        client = CLIENT_HOLDER.default_client
        self.__dict__['logger'] = client
        return client

    def _cleanup_pool(self, response):
        self.pool.pop(thread.get_ident(), None)

    def flush_all(self):
        with self.pool.reserve() as mc:
            mc.flush_all()

    @metlog_timeit
    def get(self, key):
        with self.pool.reserve() as mc:
            try:
                return mc.get(key)
            except MemcachedError, err:
                # memcache seems down
                raise BackendError(str(err))
 def __init__(self, *args, **kw):
     self._client = Client(*args, **kw)
     self.pool = ThreadMappedPool(self._client)
     # using a locker to avoid race conditions
     # when several clients for the same user
     # get/set the cached data
     self._locker = threading.RLock()
     subscribe(REQUEST_ENDS, self._cleanup_pool)
示例#4
0
 def __init__(self, **options):
     self.servers = [
         server.strip() for server in options['servers'].split(',')
     ]
     self.prefix = options['prefix']
     self._client = Client(self.servers)
     self.pool = ThreadMappedPool(self._client)
     # using a locker to avoid race conditions
     # when several clients for the same user
     # get/set the cached data
     self._locker = threading.RLock()
示例#5
0
 def __init__(self, *args, **kw):
     self._client = Client(*args, **kw)
     self.pool = ThreadMappedPool(self._client)
     # using a locker to avoid race conditions
     # when several clients for the same user
     # get/set the cached data
     self._locker = threading.RLock()
     subscribe(REQUEST_ENDS, self._cleanup_pool)
示例#6
0
 def __init__(self, **options):
     self.servers = [server.strip() for server in options["servers"].split(",")]
     self.prefix = options["prefix"]
     self._client = Client(self.servers)
     self.pool = ThreadMappedPool(self._client)
     # using a locker to avoid race conditions
     # when several clients for the same user
     # get/set the cached data
     self._locker = threading.RLock()
示例#7
0
class CacheManager(object):
    """ Helpers on the top of pylibmc
    """

    def __init__(self, *args, **kw):
        self._client = Client(*args, **kw)
        self.pool = ThreadMappedPool(self._client)
        # using a locker to avoid race conditions
        # when several clients for the same user
        # get/set the cached data
        self._locker = threading.RLock()
        subscribe(REQUEST_ENDS, self._cleanup_pool)

    @property
    def logger(self):
        # Store the result on the object itself. This will prevent the
        # @property descriptor from firing on every call.
        client = CLIENT_HOLDER.default_client
        self.__dict__["logger"] = client
        return client

    def _cleanup_pool(self, response):
        self.pool.pop(thread.get_ident(), None)

    def flush_all(self):
        with self.pool.reserve() as mc:
            mc.flush_all()

    @metlog_timeit
    def get(self, key):
        with self.pool.reserve() as mc:
            try:
                return mc.get(key)
            except MemcachedError, err:
                # memcache seems down
                raise BackendError(str(err))
示例#8
0
class Cache(object):
    """ Helpers on the top of pylibmc
    """
    implements(IAppCache)

    def __init__(self, **options):
        self.servers = [
            server.strip() for server in options['servers'].split(',')
        ]
        self.prefix = options['prefix']
        self._client = Client(self.servers)
        self.pool = ThreadMappedPool(self._client)
        # using a locker to avoid race conditions
        # when several clients for the same user
        # get/set the cached data
        self._locker = threading.RLock()

    def _key(self, *key):
        return ':'.join([self.prefix] + list(key))

    def cleanup(self):
        self.pool.pop(thread.get_ident(), None)

    def flush_all(self):
        with self.pool.reserve() as mc:
            mc.flush_all()

    def get(self, key):
        key = self._key(key)

        with self.pool.reserve() as mc:
            try:
                return mc.get(key)
            except MemcachedError, err:
                # memcache seems down
                raise CacheError(str(err))