def __init__(self): """Initialize the cache.""" super(ImageRedisCache, self).__init__() redis_url = current_app.config['IIIF_CACHE_REDIS_URL'] prefix = current_app.config.get('IIIF_CACHE_REDIS_PREFIX', 'iiif') self.cache = RedisCache(host=StrictRedis.from_url(redis_url), key_prefix=prefix)
def test_rediscache_get_set(): """ test basic RedisCache capabilities """ _check_redis() cache = RedisCache() cache.set('foo', 'bar') assert cache.get('foo') == 'bar'
def decorated_function(*args, **kwargs): cache = RedisCache() cache_key = 'view:{0}'.format(request.path) response = cache.get(cache_key) if response is None: response = f(*args, **kwargs) cache.set(cache_key, response, timeout) return response
def test_rediscache_inc_dec(): """ test if Rediscache effectively handles incrementation and decrementation """ _check_redis() cache = RedisCache() cache.set('foo', 1) assert cache.inc('foo') == 2 assert cache.dec('foo') == 1
def test_rediscache_set_many(): """ test setting multiple values from RedisCache """ _check_redis() cache = RedisCache() cache.set_many({'foo': 'bar', 'spam': 'eggs'}) assert cache.get('foo') == 'bar' assert cache.get('spam') == 'eggs'
def test_rediscache_expire(): """ test RedisCache handling expire time on keys """ _check_redis() import time cache = RedisCache() cache.set('foo', 'bar', 1) time.sleep(2) assert cache.get('foo') is None
def _configure_cache_redis(self, url): from werkzeug.contrib.cache import RedisCache from redis import from_url msg = "enable redis cache url[%s] prefix[%s] cache_timeout[%s]" logger.info(msg % (url, self.cache_prefix, self.cache_timeout)) client = from_url(url) self.cache = RedisCache(host=client, default_timeout=self.cache_timeout, key_prefix=self.cache_prefix)
def test_rediscache_delete_many(): """ test if RedisCache correctly deletes many keys """ _check_redis() cache = RedisCache() cache.add('foo', 'bar') cache.add('spam', 'eggs') cache.delete_many('foo', 'spam') assert cache.get('foo') is None assert cache.get('spam') is None
def __init__(self, host='localhost', port=6379, password=None, db=0, default_timeout=300, key_prefix=None): BaseRedisCache.__init__(self, host=host, port=port, password=password, db=db, default_timeout=default_timeout, key_prefix=key_prefix)
def redis(app, config, args, kwargs): try: from redis import from_url as redis_from_url except ImportError: raise RuntimeError('no redis module found') kwargs.update(dict( host=config.get('CACHE_REDIS_HOST', 'localhost'), port=config.get('CACHE_REDIS_PORT', 6379), )) password = config.get('CACHE_REDIS_PASSWORD') if password: kwargs['password'] = password key_prefix = config.get('CACHE_KEY_PREFIX') if key_prefix: kwargs['key_prefix'] = key_prefix db_number = config.get('CACHE_REDIS_DB') if db_number: kwargs['db'] = db_number redis_url = config.get('CACHE_REDIS_URL') if redis_url: kwargs['host'] = redis_from_url( redis_url, db=kwargs.pop('db', None), ) return RedisCache(*args, **kwargs)
def test_invenio_access_permission_cache_redis(app): """Caching the user using redis.""" cache = RedisCache() InvenioAccess(app, cache=cache) with app.test_request_context(): user_can_all = User(email='*****@*****.**') user_can_open = User(email='*****@*****.**') db.session.add(user_can_all) db.session.add(user_can_open) db.session.add(ActionUsers(action='open', user=user_can_all)) db.session.flush() identity_open = FakeIdentity(UserNeed(user_can_open.id)) permission_open = DynamicPermission(ActionNeed('open')) assert not permission_open.allows(identity_open) assert current_access.get_action_cache('open') == (set( [Need(method='id', value=1)]), set([])) db.session.add(ActionUsers(action='open', user=user_can_open)) db.session.flush() permission_open = DynamicPermission(ActionNeed('open')) assert permission_open.allows(identity_open) assert current_access.get_action_cache('open') == (set( [Need(method='id', value=1), Need(method='id', value=2)]), set([]))
def __call__(self): """ Returns redis cache from redis config """ if self._cache is None: cache_type = CacheConfig.cache_type if cache_type == 'redis': global _connection_pool if _connection_pool is None: _connection_pool = redis.ConnectionPool(host=CacheConfig.host) self._cache = RedisCache( key_prefix=CacheConfig.key_prefix, host=CacheConfig.host, port=CacheConfig.port, connection_pool=_connection_pool ) else: global _cache if _cache is None: _cache = SimpleCache() self._cache = _cache else: self._cache = _cache return self._cache
def prepare(self): self.prepared = True # Cache if self.config.CACHE_TYPE == 'redis': self.cache = RedisCache(host=self.config.CACHE_SERV) elif self.config.CACHE_TYPE == 'memcached': self.cache = MemcachedCache(servers=[self.config.CACHE_SERV]) else: self.cache = FileSystemCache(self.config.CACHE_SERV) # Options from .admin import Option self.options = Option.auto_load() # Timer @self.app.before_request def before_request(): g.start = time.time() # Medias self.app.add_url_rule(self.app.config['UPLOAD_DIRECTORY_URL'] + '<filename>', 'FyPress.uploaded_file', build_only=True) self.app.wsgi_app = SharedDataMiddleware( self.app.wsgi_app, { self.app.config['UPLOAD_DIRECTORY_URL']: self.app.config['UPLOAD_DIRECTORY'] })
def redis(app, config, args, kwargs): try: from redis import from_url as redis_from_url except ImportError: raise RuntimeError("no redis module found") kwargs.update( dict( host=config.get("CACHE_REDIS_HOST", "localhost"), port=config.get("CACHE_REDIS_PORT", 6379), )) password = config.get("CACHE_REDIS_PASSWORD") if password: kwargs["password"] = password key_prefix = config.get("CACHE_KEY_PREFIX") if key_prefix: kwargs["key_prefix"] = key_prefix db_number = config.get("CACHE_REDIS_DB") if db_number: kwargs["db"] = db_number redis_url = config.get("CACHE_REDIS_URL") if redis_url: kwargs["host"] = redis_from_url(redis_url, db=kwargs.pop("db", None)) return RedisCache(*args, **kwargs)
def redis(app, config, args, kwargs): kwargs.update(dict( host=config.get('CACHE_REDIS_HOST', 'localhost'), port=config.get('CACHE_REDIS_PORT', 6379), )) password = config.get('CACHE_REDIS_PASSWORD') if password: kwargs['password'] = password key_prefix = config.get('CACHE_KEY_PREFIX') if key_prefix: kwargs['key_prefix'] = key_prefix db_number = config.get('CACHE_REDIS_DB') if db_number: kwargs['db'] = db_number redis_url = config.get('CACHE_REDIS_URL') if redis_url: kwargs['host'] = redis_from_url( redis_url, db=kwargs.pop('db', None), ) return RedisCache(*args, **kwargs)
def get_cache(): if 'cache' not in g: if 'REDIS_HOST' in current_app.config: g.cache = RedisCache(current_app.config.get('REDIS_HOST')) else: g.cache = SimpleCache() return g.cache
def _configure_cache_simple(self): from werkzeug.contrib.cache import SimpleCache msg = "enable memory threshold[%s] cache_timeout[%s]" logger.warning(msg % (self.cache_threshold, self.cache_timeout)) self.cache = SimpleCache(threshold=self.cache_threshold, default_timeout=self.cache_timeout)
def __init__(self, host='localhost', port=6379): cache = RedisCache(host=host, port=port) CacheSessionInterface.__init__(self, cache, prefix='redis_cache_session:')
def get_redis(): if not hasattr(current_app, 'redis'): host, port = current_app.config["REDIS_CONN"] cache = RedisCache(host, port, default_timeout=300) current_app.redis = cache return current_app.redis
class ProductionConfig(BaseConfig): """Production specific configuration.""" DEBUG = False CACHE_TIMEOUT = 60 * 24 * 7 CACHE = RedisCache(host=redis_conf.get('HOST'), port=redis_conf.get('PORT'), password=redis_conf.get('PASSWORD'))
def test_rediscache_delete(): """ test if RedisCache correctly deletes single key """ _check_redis() cache = RedisCache() cache.add('foo', 'bar') assert cache.get('foo') == 'bar' cache.delete('foo') assert cache.get('foo') is None
def wrapper(*args, **kwargs): if DISABLE_CACHE: return None global redis_cache if redis_cache is None: redis_cache = RedisCache(host=REDIS_SERVER, password=REDIS_PASS, default_timeout=DEFAULT_CACHE_TIME) return func(*args, **kwargs)
def __init__(self, app): self.app = app self.config = app.config self.default_timeout = self.config.get('CACHE_DEFAULT_TIMEOUT') kwargs = dict( host=self.config.get('CACHE_REDIS_HOST', 'localhost'), port=self.config.get('CACHE_REDIS_PORT', 6379), ) password = self.config.get('CACHE_REDIS_PASSWORD') if password: kwargs['password'] = password db_number = self.config.get('CACHE_REDIS_DB') if db_number: kwargs['db'] = db_number self.cache = RedisCache(**kwargs)
def set_up(cls, config): """ Set up Cache """ cls.instance = RedisCache( host=config["REDIS"]["host"], port=config["REDIS"]["port"], password=config["REDIS"]["password"], )
def test_rediscache_add(): """ test if RedisCache.add() preserves existing keys """ _check_redis() cache = RedisCache() # sanity check that add() works like set() cache.add('foo', 'bar') assert cache.get('foo') == 'bar' cache.add('foo', 'qux') assert cache.get('foo') == 'bar'
def _redis(self, **kwargs): """Returns a :class:`RedisCache` instance""" kwargs.update(dict( host=self._config('REDIS_HOST', 'localhost'), port=self._config('REDIS_PORT', 6379), password=self._config('REDIS_PASSWORD', None), db=self._config('REDIS_DB', 0), key_prefix=self._config('KEY_PREFIX', None), )) return RedisCache(**kwargs)
def _configure_cache_simple(self): from werkzeug.contrib.cache import SimpleCache msg = "enable memory threshold[%s] cache_timeout[%s]" logger.warning(msg % (self.cache_threshold, self.cache_timeout)) self.cache = SimpleCache(threshold=self.cache_threshold, default_timeout=self.cache_timeout) self.cache._cache = OrderedDict() self.cache.clear = self.cache._cache.clear
def test_rediscache_get_many(): """ test retrieving multiple vahelues from RedisCache """ _check_redis() cache = RedisCache() cache.set('foo', 'bar') cache.set('spam', 'eggs') assert cache.get_many('foo', 'spam') == ['bar', 'eggs']
def _init_cache(self,app): cache = None prefix=app.name.lower() try: from werkzeug.contrib.cache import RedisCache cache = RedisCache(key_prefix=prefix) except: RuntimeWarning("RedisCache not available") # try: # from werkzeug.contrib.cache import MemcachedCache # cache = MemcachedCache(app.config['MEMCACHED_SERVERS'], key_prefix=prefix) # except: # RuntimeWarning("Memcached not available") if cache is not None: cache.clear() app.cache = cache self.cache = cache
class Meta: table_name = 'folder' dao_class = CommonDao cache_db_factory = lambda: RedisCache(db=1) cache_conditions = { '*': 3600, 'folder_id': 3600, 'name': 3600, 'folder_id__lt': 3600, 'folder_id__gt + name__contains': 3500 }
def _configure_cache_redis(self, url): from werkzeug.contrib.cache import RedisCache from redis import from_url #'cache_url': 'redis://localhost:6379', client = from_url(url) self.cache = RedisCache(host=client, default_timeout=self.cache_timeout, key_prefix=self.cache_prefix )
def create_app(): logging.config.dictConfig(import_string('settings.LOGGING_CONFIG')) app = Flask(__name__, instance_relative_config=True) app.config.from_object(settings) app.cache = RedisCache(host=settings.REDIS_HOST, port=settings.REDIS_PORT) # Register blueprint here app.register_blueprint(auth) return app
def redis(config, *args, **kwargs): kwargs.setdefault('host', config.get('CACHE_REDIS_HOST', DEF_REDIS_HOST)) kwargs.setdefault('port', config.get('CACHE_REDIS_PORT', DEF_REDIS_PORT)) kwargs.setdefault('password', config.get('CACHE_REDIS_PASSWORD')) kwargs.setdefault('key_prefix', config.get('CACHE_KEY_PREFIX')) kwargs.setdefault('db', config.get('CACHE_REDIS_DB')) redis_url = config.get('CACHE_REDIS_URL') if redis_url: kwargs['host'] = from_url(redis_url, db=kwargs.pop('db', None)) return RedisCache(*args, **kwargs)
def redis(app, args, kwargs): kwargs.update(dict( host=app.config.get('CACHE_REDIS_HOST', 'localhost'), port=app.config.get('CACHE_REDIS_PORT', 6379), )) password = app.config.get('CACHE_REDIS_PASSWORD') if password: kwargs['password'] = password key_prefix = app.config.get('CACHE_KEY_PREFIX') if key_prefix: kwargs['key_prefix'] = key_prefix return RedisCache(*args, **kwargs)
def initiate(cls): """ Method to decide which caching client is available at the moment. It first tries with cache_client, if It fails, it will use SimpleCache """ if not cls.cache: try: cls.cache = RedisCache(host=constants.REDIS_HOST) cls.cache.get("test") logger.info(msg="We are using Redis for Caching") except: cls.cache = SimpleCache() logger.info(msg='We are using SimpleCache for Caching') return cls.cache
def test_rediscache_get_many(): """ test retrieving multiple values from RedisCache """ _check_redis() cache = RedisCache() cache.set('foo', 'bar') cache.set('spam', 'eggs') assert cache.get_many('foo', 'spam') == ['bar', 'eggs']
class ImageRedisCache(ImageCache): """Redis image cache.""" def __init__(self): """Initialize the cache.""" super(ImageRedisCache, self).__init__() redis_url = current_app.config['IIIF_CACHE_REDIS_URL'] self.cache = RedisCache(host=StrictRedis.from_url(redis_url)) def get(self, key): """Return the key value. :param key: the object's key :return: the stored object :rtype: `BytesIO` object """ return self.cache.get(key) def set(self, key, value, timeout=None): """Cache the object. :param key: the object's key :param value: the stored object :type value: `BytesIO` object :param timeout: the cache timeout in seconds """ timeout = timeout if timeout else self.timeout self.cache.set(key, value, timeout=timeout) def delete(self, key): """Delete the specific key.""" self.cache.delete(key) def flush(self): """Flush the cache.""" self.cache.clear()
def _configure_null_cache(self): from werkzeug.contrib.cache import NullCache self.cache = NullCache(default_timeout=self.cache_timeout)
import os from werkzeug.contrib.cache import RedisCache, NullCache from config import config typ = os.environ.get('FLASK_CONFIG') if typ and typ in ['dev', 'test', 'prod']: Config = config[typ] else: Config = config['default'] if Config.CACHE: cache = RedisCache(default_timeout=3000) cache.key_prefix = Config.APP_NAME else: cache = NullCache()
class Cache(object): DEFAULT_KEY_PREFIX = "dlstats" def __init__(self, cache_url="simple", cache_timeout=7200, cache_threshold=20000, cache_prefix=None): # 2H self.cache_timeout = cache_timeout self.cache = None self.cache_prefix = cache_prefix or self.DEFAULT_KEY_PREFIX self.cache_threshold = cache_threshold if cache_url == "simple": self._configure_cache_simple() elif cache_url.startswith("redis"): self._configure_cache_redis(cache_url) else: self._configure_null_cache() def _configure_null_cache(self): from werkzeug.contrib.cache import NullCache self.cache = NullCache(default_timeout=self.cache_timeout) logger.warning("cache disable") def _configure_cache_simple(self): from werkzeug.contrib.cache import SimpleCache msg = "enable memory threshold[%s] cache_timeout[%s]" logger.warning(msg % (self.cache_threshold, self.cache_timeout)) self.cache = SimpleCache(threshold=self.cache_threshold, default_timeout=self.cache_timeout) self.cache._cache = OrderedDict() self.cache.clear = self.cache._cache.clear def _configure_cache_redis(self, url): from werkzeug.contrib.cache import RedisCache from redis import from_url msg = "enable redis cache url[%s] prefix[%s] cache_timeout[%s]" logger.info(msg % (url, self.cache_prefix, self.cache_timeout)) client = from_url(url) self.cache = RedisCache(host=client, default_timeout=self.cache_timeout, key_prefix=self.cache_prefix) @timeit("cache.get", stats_only=True) def get(self, key, **kwargs): "Proxy function for internal cache object." if logger.isEnabledFor(logging.DEBUG): logger.debug("get from cache key[%s]" % key) return self.cache.get(key, **kwargs) @timeit("cache.set", stats_only=True) def set(self, key, value, timeout=None): "Proxy function for internal cache object." if not key: raise Exception("Not valid key") if logger.isEnabledFor(logging.DEBUG): logger.debug("set cache key[%s]" % key) self.cache.set(key, value, timeout=timeout) def add(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.add(*args, **kwargs) def delete(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete(*args, **kwargs) def delete_many(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete_many(*args, **kwargs) def clear(self): "Proxy function for internal cache object." self.cache.clear() def get_many(self, *args, **kwargs): "Proxy function for internal cache object." return self.cache.get_many(*args, **kwargs) def set_many(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.set_many(*args, **kwargs)
def _configure_null_cache(self): from werkzeug.contrib.cache import NullCache self.cache = NullCache(default_timeout=self.cache_timeout) logger.warning("cache disable")
from pygments import highlight from pygments.lexers import DiffLexer from pygments.formatters import HtmlFormatter try: from collections import Counter except ImportError: from python26 import Counter app = Flask(__name__) app.config.from_object('settings') if 'GIX_SETTINGS' in os.environ: app.config.from_envvar('GIX_SETTINGS') cache = RedisCache(key_prefix='cache-') cache.clear() # remove old locks app.jinja_env.filters['prettydate'] = utils.prettydate app.jinja_env.filters['to_date'] = utils.to_date app.jinja_env.filters['prettybyte'] = utils.humanize_bytes app.jinja_env.filters['gravatar'] = utils.to_gravatar app.jinja_env.filters['strip_ref_name'] = utils.strip_ref_name app.jinja_env.filters['short'] = utils.short app.jinja_env.filters['commit_subject'] = utils.commit_subject app.jinja_env.globals['join_paths'] = utils.join_paths app.jinja_env.globals['back'] = utils.back @app.route('/git/<reponame>') @app.route('/git/<reponame>/<revspec>')
def cache_response(response): cache = RedisCache(Redis.from_url(current_app.config.get('REDISTOGO_URL'))) if not request.values: cache.set(request.path, response) return response
def load_object(self, value): try: serialized_str = zlib.decompress(value) except (zlib.error, TypeError): serialized_str = value return RedisCache.load_object(self, serialized_str)
def return_cached(): cache = RedisCache(Redis.from_url(current_app.config.get('REDISTOGO_URL'))) if not request.values: response = cache.get(request.path) if response: return response
class SimpleRedisCache(RedisCache): def __init__(self, app): self.app = app self.config = app.config self.default_timeout = self.config.get('CACHE_DEFAULT_TIMEOUT') kwargs = dict( host=self.config.get('CACHE_REDIS_HOST', 'localhost'), port=self.config.get('CACHE_REDIS_PORT', 6379), ) password = self.config.get('CACHE_REDIS_PASSWORD') if password: kwargs['password'] = password db_number = self.config.get('CACHE_REDIS_DB') if db_number: kwargs['db'] = db_number self.cache = RedisCache(**kwargs) def get_key(self, *args, **kwargs): args_key = ''.join(str(args)) kwargs_values_key = ''.join(str(kwargs.values())) kwargs_keys = ''.join(str(kwargs.keys())) key = '%s%s%s' % (args_key, kwargs_values_key, kwargs_keys) return hashlib.md5(key).hexdigest().encode('utf-8') def set(self, key, value, timeout=None): if timeout is None: timeout = self.default_timeout self.cache.set(key, value, timeout) def get(self, key): return self.cache.get(key) def delete(self, key): return self.cache.delete(key) def clear(self): self.cache.clear() def cached(self, timeout=None, key_prefix='view/%s', unless=None): def decorator(f): @functools.wraps(f) def decorated_function(*args, **kwargs): #: Bypass the cache entirely. if callable(unless) and unless() is True: return f(*args, **kwargs) try: cache_key = self.get_key( key_prefix=key_prefix, *args, **kwargs) rv = self.cache.get(cache_key) except Exception: if current_app.debug: raise logger.exception( "Exception possibly due to cache backend.") return f(*args, **kwargs) if rv is None: rv = f(*args, **kwargs) try: self.cache.set(cache_key, rv, timeout=decorated_function.cache_timeout) except Exception: if current_app.debug: raise logger.exception( "Exception possibly due to cache backend.") return f(*args, **kwargs) return rv decorated_function.uncached = f decorated_function.cache_timeout = timeout return decorated_function return decorator
def _configure_cache_simple(self): from werkzeug.contrib.cache import SimpleCache #threshold=500, cache_timeout=300 #threshold : the maximum number of items the cache stores before self.cache = SimpleCache(default_timeout=self.cache_timeout)
cache = None if __name__ == '__main__': parser = ArgumentParser(description="API for Riot API Challenge 2016 project") parser.add_argument("-d", "--debug", dest="debug", action="store_true", help="sets the debug flag when running Flask") parser.add_argument("-p", "--public", dest="public", action="store_true", help="allows the API to run publicly") parser.add_argument("-c", "--cache", dest="cache", action="store_true", help="causes the API to cache responses and use local resources") parser.add_argument("-t", "--thread", dest="thread", action="store_true", help="causes Flask to run in threaded mode") parser.add_argument("-k", "--api-key", dest="api_key", default="", help="the Riot API key") args = parser.parse_args() if args.api_key == "": print("No API key provided. Exiting.") sys.exit(1) if args.cache: cache = RedisCache(default_timeout=0) """ =============================== Database Tools (These are ripped almost straight from the Flask docs on sqlite integration) =============================== """ def get_db(): db = getattr(g, '_database', None) if db is None: db = g._database = sqlite3.connect(database_url) db.row_factory = sqlite3.Row return db
def __init__(self): """Initialize the cache.""" super(ImageRedisCache, self).__init__() redis_url = current_app.config['IIIF_CACHE_REDIS_URL'] self.cache = RedisCache(host=StrictRedis.from_url(redis_url))
class Cache(object): """ Pour cache Redis: global cache cache = Cache('redis', default_timeout=300, host='192.168.0.188') Pour cache simple: global cache cache = Cache('simple', default_timeout=300) Pour désactiver le cache: global cache cache = Cache() """ DEFAULT_KEY_PREFIX = 'mongrey-cache' def __init__(self, cache_url='simple', cache_timeout=300, cache_prefix=None, ): self.cache_timeout = cache_timeout self.cache = None self.cache_prefix = cache_prefix or self.DEFAULT_KEY_PREFIX if cache_url == 'simple': self._configure_cache_simple() elif cache_url.startswith('redis'): self._configure_cache_redis(cache_url) else: self._configure_null_cache() def _configure_null_cache(self): from werkzeug.contrib.cache import NullCache self.cache = NullCache(default_timeout=self.cache_timeout) def _configure_cache_simple(self): from werkzeug.contrib.cache import SimpleCache #threshold=500, cache_timeout=300 #threshold : the maximum number of items the cache stores before self.cache = SimpleCache(default_timeout=self.cache_timeout) def _configure_cache_redis(self, url): from werkzeug.contrib.cache import RedisCache from redis import from_url #'cache_url': 'redis://localhost:6379', client = from_url(url) self.cache = RedisCache(host=client, default_timeout=self.cache_timeout, key_prefix=self.cache_prefix ) def get(self, *args, **kwargs): "Proxy function for internal cache object." return self.cache.get(*args, **kwargs) def set(self, key, value, timeout=None): "Proxy function for internal cache object." if not key: raise Exception("Not valid key") if logger.isEnabledFor(logging.DEBUG): logger.debug("set cache key[%s]" % key) self.cache.set(key, value, timeout=timeout) def add(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.add(*args, **kwargs) def delete(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete(*args, **kwargs) def delete_many(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete_many(*args, **kwargs) def clear(self): "Proxy function for internal cache object." self.cache.clear() def get_many(self, *args, **kwargs): "Proxy function for internal cache object." return self.cache.get_many(*args, **kwargs) def set_many(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.set_many(*args, **kwargs)
def dump_object(self, value): serialized_str = RedisCache.dump_object(self, value) try: return zlib.compress(serialized_str) except zlib.error: return serialized_str