def __init__(self, table, params, key_prefix='', version=1, key_func=None): BaseCache.__init__(self, params, key_prefix, version, key_func) self._table = table class CacheEntry(object): _meta = Options(table) self.cache_model_class = CacheEntry
def __init__(self, table, params): BaseCache.__init__(self, params) self._table = table class CacheEntry(object): _meta = Options(table) self.cache_model_class = CacheEntry
def __init__(self, server, params): BaseCache.__init__(self, params) self.server = redis.Redis( host=server.split(':')[0], port=server.split(':')[1], db=params.get('db',0) )
def __init__(self, location, params): options = params.get('OPTIONS', {}) if not 'timeout' in params and not 'TIMEOUT' in params: params['TIMEOUT'] = None if not 'max_entries' in params and not 'MAX_ENTRIES' in options: options['MAX_ENTRIES'] = -1 BaseCache.__init__(self, params) self._host, self._port = get_host_and_port(location) self._database = options.get('DATABASE', None) self._username = options.get('USERNAME') or None self._password = options.get('PASSWORD') or None self._collection_name = options.get('COLLECTION', None) or 'django_cache' if self._max_entries is not None and self._max_entries <= 0: self._max_entries = None if self.default_timeout is not None and self.default_timeout <= 0: self.default_timeout = None if self.default_timeout is not None and self._max_entries is not None: raise ImproperlyConfigured('MongoDBCache shall be configured either with TIMEOUT or MAX_ENTRIES, not both.') if self.default_timeout is None and self._max_entries is None: raise ImproperlyConfigured('MongoDBCache shall be configured with TIMEOUT or MAX_ENTRIES. Specify one or the other.')
def __init__(self, host, params, *args, **kwargs): BaseCache.__init__(self, params, *args, **kwargs) self.caches = {} for rule, cache in params.get('RULES', {}).items(): # print('creating cache rule: %s -> %s' % (rule, cache)) self.caches.update({rule: lazy_cache(cache)}) self.default_cache = lazy_cache(params.get('DEFAULT', 'default'))
def __init__(self, server, params, timeout=0.5): "Connect to Tokyo Tyrant, and set up cache backend." BaseCache.__init__(self, params) host, port = server.split(':') self.host = host self.port = int(port) self.timeout = timeout self.reset_cache_connection()
def __init__(self, location, params): BaseCache.__init__(self, params) self.location = location options = params.get('OPTIONS', {}) self._host = options.get('HOST', 'localhost') self._port = options.get('PORT', 27017) self._database = options.get('DATABASE', 'django_cache') self._collection = location
def test_cache_key_memcache_validation(self): """ Handle cache key creation correctly, see #17861. """ name = b"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/" + chr(22) + chr(180) cache_key = storage.staticfiles_storage.cache_key(name) cache_validator = BaseCache({}) cache_validator.validate_key(cache_key) self.assertEqual(cache_key, 'staticfiles:e95bbc36387084582df2a70750d7b351')
def test_cache_key_memcache_validation(self): """ Handle cache key creation correctly, see #17861. """ name = "/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/" + "\x16" + "\xb4" cache_key = storage.staticfiles_storage.cache_key(name) cache_validator = BaseCache({}) cache_validator.validate_key(cache_key) self.assertEqual(cache_key, 'staticfiles:821ea71ef36f95b3922a77f7364670e7')
def __init__(self, location, params): """ location is not used but otherwise Django crashes. """ BaseCache.__init__(self, params) # looks like Amazon or boto has a maximum limit of 1000 for # get_all_keys() which is not documented, so we play it safe here. if self._max_entries > 1000: self._max_entries = 1000 self._options = params.get('OPTIONS', {}) # we use S3 compatible varibale names while django-storages doesn't _ACCESS_KEY_ID = self._options.get('ACCESS_KEY_ID', s3boto.ACCESS_KEY_NAME) # NB _ID vs. _NAME _SECRET_ACCESS_KEY = self._options.get('SECRET_ACCESS_KEY', s3boto.SECRET_KEY_NAME) # NB _ACCESS_KEY vs. _KEY_NAME _HEADERS = self._options.get('HEADERS', s3boto.HEADERS) _STORAGE_BUCKET_NAME = self._options.get('STORAGE_BUCKET_NAME', s3boto.STORAGE_BUCKET_NAME) _AUTO_CREATE_BUCKET = self._options.get('AUTO_CREATE_BUCKET', s3boto.AUTO_CREATE_BUCKET) _DEFAULT_ACL = self._options.get('DEFAULT_ACL', 'private') _BUCKET_ACL = self._options.get('BUCKET_ACL', _DEFAULT_ACL) _QUERYSTRING_AUTH = self._options.get('QUERYSTRING_AUTH', s3boto.QUERYSTRING_AUTH) _QUERYSTRING_EXPIRE = self._options.get('QUERYSTRING_EXPIRE', s3boto.QUERYSTRING_EXPIRE) _REDUCED_REDUNDANCY = self._options.get('REDUCED_REDUNDANCY', s3boto.REDUCED_REDUNDANCY) _LOCATION = self._options.get('LOCATION', s3boto.LOCATION) _CUSTOM_DOMAIN = self._options.get('CUSTOM_DOMAIN', s3boto.CUSTOM_DOMAIN) _CALLING_FORMAT = self._options.get('CALLING_FORMAT', s3boto.CALLING_FORMAT) _SECURE_URLS = self._options.get('SECURE_URLS', s3boto.SECURE_URLS) _FILE_NAME_CHARSET = self._options.get('FILE_NAME_CHARSET', s3boto.FILE_NAME_CHARSET) _FILE_OVERWRITE = self._options.get('FILE_OVERWRITE', s3boto.FILE_OVERWRITE) _IS_GZIPPED = self._options.get('IS_GZIPPED', s3boto.IS_GZIPPED) _PRELOAD_METADATA = self._options.get('PRELOAD_METADATA', s3boto.PRELOAD_METADATA) _GZIP_CONTENT_TYPES = self._options.get('GZIP_CONTENT_TYPES', s3boto.GZIP_CONTENT_TYPES) self._storage = s3boto.S3BotoStorage( bucket=_STORAGE_BUCKET_NAME, access_key=_ACCESS_KEY_ID, secret_key=_SECRET_ACCESS_KEY, bucket_acl=_BUCKET_ACL, acl=_DEFAULT_ACL, headers=_HEADERS, gzip=_IS_GZIPPED, gzip_content_types=_GZIP_CONTENT_TYPES, querystring_auth=_QUERYSTRING_AUTH, querystring_expire=_QUERYSTRING_EXPIRE, reduced_redundancy=_REDUCED_REDUNDANCY, custom_domain=_CUSTOM_DOMAIN, secure_urls=_SECURE_URLS, location=_LOCATION, file_name_charset=_FILE_NAME_CHARSET, preload_metadata=_PRELOAD_METADATA, calling_format=_CALLING_FORMAT )
def __init__(self, server, params): "Connect to Redis, and set up cache backend." BaseCache.__init__(self, params) if 'db' in params: db = int(params['db']) else: db = 1 self._cache = redis.Redis(server.split(':')[0], db=db) self._headers = {'zlib': '!zlib!', 'pickle': '!pickle!'}
def __init__(self, _location, params): """ location is not used but otherwise Django crashes. """ BaseCache.__init__(self, params) # Amazon and boto have a maximum limit of 1000 for get_all_keys(). See: # http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html # This implementation of the GET operation returns some or all (up to 1000) # of the objects in a bucket.... if self._max_entries > 1000: self._max_entries = 1000 self._options = params.get('OPTIONS', {}) # backward compatible syntax for s3cache users before v1.2 for easy upgrades # in v1.2 we update to latest django-storages 1.1.8 which changes variable names # in non-backward compatible fashion if 'ACCESS_KEY' not in self._options.keys(): self._options['ACCESS_KEY'] = self._options.get('ACCESS_KEY_ID', None) if 'SECRET_KEY' not in self._options.keys(): self._options['SECRET_KEY'] = self._options.get('SECRET_ACCESS_KEY', None) if 'BUCKET_NAME' not in self._options.keys(): self._options['BUCKET_NAME'] = self._options.get('STORAGE_BUCKET_NAME', None) # we use S3 compatible varibale names while django-storages doesn't _bucket_name = self._options.get('BUCKET_NAME', None) _default_acl = self._options.get('DEFAULT_ACL', 'private') _bucket_acl = self._options.get('BUCKET_ACL', _default_acl) # in case it was not specified in OPTIONS default to 'private' self._options['BUCKET_ACL'] = _bucket_acl self._location = self._options.get('LOCATION', self._options.get('location', '')) # sanitize location by removing leading and traling slashes self._options['LOCATION'] = self._location.strip('/') # S3BotoStorage wants lower case names lowercase_options = [] for name, value in self._options.items(): if value: # skip None values lowercase_options.append((name.lower(), value)) # this avoids RuntimeError: dictionary changed size during iteration # with Python 3 if we assign to the dictionary directly for _n, _v in lowercase_options: self._options[_n] = _v self._storage = s3boto.S3BotoStorage( acl=_default_acl, bucket=_bucket_name, **self._options )
def __init__(self, location, params): BaseCache.__init__(self, params) self.location = location options = params.get('OPTIONS', {}) self._host = options.get('HOST', 'localhost') self._port = options.get('PORT', 27017) self._database = options.get('DATABASE', 'django_cache') self._rshosts = options.get('RSHOSTS') self._rsname = options.get('RSNAME') self._read_preference = options.get("READ_PREFERENCE") self._collection = location
def test_cache_key_memcache_validation(self): """ Handle cache key creation correctly, see #17861. """ name = "/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/" + chr(22) + chr(180) cache_key = storage.staticfiles_storage.cache_key(name) self.save_warnings_state() cache_validator = BaseCache({}) warnings.filterwarnings('error', category=CacheKeyWarning) cache_validator.validate_key(cache_key) self.restore_warnings_state() self.assertEqual(cache_key, 'staticfiles:e95bbc36387084582df2a70750d7b351')
def __init__(self, table, params): BaseCache.__init__(self, params) self._table = table max_entries = params.get('max_entries', 300) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = params.get('cull_frequency', 3) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3
def __init__(self, _, params): BaseCache.__init__(self, params) self._params = params self._cache = {} # entry:(val,expire_time) try: self._max_entries = int(params.get('max_entries')) except: self._max_entries = MAX_KEYS self._call_seq = {} self._call_list = [] self._lock = RWLock()
def __init__(self, location, params): """ location is not used but otherwise Django crashes. """ BaseCache.__init__(self, params) # Amazon and boto has a maximum limit of 1000 for get_all_keys(). See: # http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html # This implementation of the GET operation returns some or all (up to 1000) of the objects in a bucket.... if self._max_entries > 1000: self._max_entries = 1000 self._options = params.get('OPTIONS', {}) # backward compatible syntax for s3cache users before v1.2 for easy upgrades # in v1.2 we update to latest django-storages 1.1.8 which changes variable names # in non-backward compatible fashion if 'ACCESS_KEY' not in self._options.keys(): self._options['ACCESS_KEY'] = self._options.get('ACCESS_KEY_ID', None) if 'SECRET_KEY' not in self._options.keys(): self._options['SECRET_KEY'] = self._options.get('SECRET_ACCESS_KEY', None) if 'BUCKET_NAME' not in self._options.keys(): self._options['BUCKET_NAME'] = self._options.get('STORAGE_BUCKET_NAME', None) # we use S3 compatible varibale names while django-storages doesn't _BUCKET_NAME = self._options.get('BUCKET_NAME', None) _DEFAULT_ACL = self._options.get('DEFAULT_ACL', 'private') _BUCKET_ACL = self._options.get('BUCKET_ACL', _DEFAULT_ACL) # in case it was not specified in OPTIONS default to 'private' self._options['BUCKET_ACL'] = _BUCKET_ACL self._LOCATION = self._options.get('LOCATION', self._options.get('location', '')) # sanitize location by removing leading and traling slashes self._options['LOCATION'] = self._LOCATION.strip('/') # S3BotoStorage wants lower case names options_lowercase = dict() for name, value in self._options.items(): options_lowercase[name.lower()] = value self._options = options_lowercase self._storage = s3boto.S3BotoStorage( acl=_DEFAULT_ACL, bucket=_BUCKET_NAME, **self._options )
def __init__(self, host, params): BaseCache.__init__(self, params) self._cache = {} self._expire_info = {} max_entries = params.get('max_entries', 300) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = params.get('cull_frequency', 3) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3
def __init__(self, server, params): from django.conf import settings BaseCache.__init__(self, params) if is_pylibmc: # use the binary protocol by default. binary = getattr(settings, "PYLIBMC_USE_BINARY", True) # default is to use compression for objects larger than 10kb. self.compress_at = getattr(settings, "PYLIBMC_COMPRESS_AT", 10 * 1024) # Manage behaviors. behaviors = getattr(settings, "PYLIBMC_BEHAVIORS", {}) behaviors = dict(DEFAULT_BEHAVIORS, **behaviors) self._cache = memcache.Client(server.split(';'), binary=binary) self._cache.behaviors = behaviors else: self._cache = memcache.Client(server.split(';'))
def __init__(self, server, params): """ Connect to Redis, and sets up cache backend. Additional params: key: ``db`` type: ``int`` default: ``1`` description: ``Specifies redis db number to use`` key: ``fail_silently`` type: ``int`` default: ``0`` description: ``When non-zero integer, swallows exceptions, emitting them as warnings instead. Allows cache server to go down without killing the site`` """ BaseCache.__init__(self, params) db = params.get('db', 1) try: db = int(db) except (ValueError, TypeError): db = 1 fail_silently = params.get('fail_silently', 0) try: self.fail_silently = bool(int(fail_silently)) except (ValueError, TypeError): self.fail_silently = False password = params.get('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = 'localhost' port = 6379 try: self._cache = redis.Redis(host=host, port=port, db=db, password=password) except Exception as err: self.warn_or_error(err)
def __getattribute__(self, attr): if attr == 'cache': return BaseCache.__getattribute__(self, attr) # Don't wrap default_timeout in a functools.partial object (see wrap) # This ensures compatibility with django-parler 1.6 under Django 1.9 if attr == 'default_timeout': return self.cache.default_timeout return wrap(getattr(self.cache, attr), key(self.cache, attr))
def _init(self, server, params): """ OPTIONS have HOST PORT NAMESPACE SET BIN TIMEOUT """ BaseCache.__init__(self, params) self._server = server self._params = params if ':' in self.server: host, port = self.server.rsplit(':', 1) try: port = int(port) except (ValueError, TypeError): raise ImproperlyConfigured("port value must be an integer") else: host, port = None, None config = { "hosts": [ (host, port) ], "policies": { "shm": { "max_nodes": 16, "max_namespaces": 8 }, 'timeout': 10000 } } self._client = aerospike.client(config) #community edition does not need username/password if self.username is None and self.password is None: self._client.connect() #check for username/password for enterprise versions else: self._client.connect(self.username, self.password) logging.debug("Aerospike client connection object for %s initialized" % self.server)
def __init__(self, location, params): BaseCache.__init__(self, params) self.location = location options = params.get('OPTIONS', {}) self._host = options.get('HOST', 'localhost') self._port = options.get('PORT', 27017) self._database = options.get('DATABASE', 'django_cache') self._rshosts = options.get('RSHOSTS') self._rsname = options.get('RSNAME') self._user = options.get('USER', None) self._password = options.get('PASSWORD', None) self._socket_timeout_ms = options.get('SOCKET_TIMEOUT_MS', None) self._connect_timeout_ms = options.get('CONNECT_TIMEOUT_MS', 20000) self.compression_level = options.get('COMPRESSION_LEVEL', 0) self._tag_sets = options.get('TAG_SETS', None) self._read_preference = options.get("READ_PREFERENCE") self._collection = location self.log = logging.getLogger(__name__)
def __init__(self, table, params): BaseCache.__init__(self, params) self._table = table class CacheEntry(object): _meta = Options(table) self.cache_model_class = CacheEntry max_entries = params.get('max_entries', 300) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = params.get('cull_frequency', 3) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3
def __init__(self, dir, params): BaseCache.__init__(self, params) max_entries = params.get('max_entries', 300) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = params.get('cull_frequency', 3) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3 self._dir = dir if not os.path.exists(self._dir): self._createdir()
def _init(self, server, params): """ OPTIONS have HOST PORT NAMESPACE SET BIN TIMEOUT """ BaseCache.__init__(self, params) self._server = server self._params = params if ':' in self.server: host, port = self.server.rsplit(':', 1) try: port = int(port) except (ValueError, TypeError): raise ImproperlyConfigured("port value must be an integer") else: host, port = None, None config = { "hosts": [ ( host, port ) ], "policies": { #aerospike timeout has no equivalent in django cache #"timeout": self.timeout # milliseconds } } self._client = aerospike.client(config) #community edition does not need username/password if self.username is None and self.password is None: self._client.connect() #check for username/password for enterprise versions else: self._client.connect(self.username, self.password)
def __init__(self, name, params, key_prefix='', version=1, key_func=None): BaseCache.__init__(self, params) global _caches, _expire_info, _locks self._cache = _caches.setdefault(name, {}) self._expire_info = _expire_info.setdefault(name, {}) self._lock = _locks.setdefault(name, RWLock()) max_entries = params.get('max_entries', 300) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 self._cull_frequency = params.get('cull_frequency', None) if self._cull_frequency is None: try: self._cull_frequency = int(max_entries) except (ValueError, TypeError): self._cull_frequency = 3 self.key_prefix = smart_str(key_prefix) self.version = version self.key_func = key_func or default_key_func
def __init__(self, server, params): """ Connect to Redis, and set up cache backend. """ BaseCache.__init__(self, params) db = params.get('db', 1) try: db = int(db) except (ValueError, TypeError): db = 1 password = params.get('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = 'localhost' port = 6379 self._cache = redis.Redis(host=host, port=port, db=db, password=password)
__FILENAME__ = cachebe import hashlib from django.core.cache import cache from django.core.cache.backends.base import BaseCache from brake.backends import BaseBackend CACHE_PREFIX = 'rl:' BASE_CACHE = BaseCache({}) IP_PREFIX = 'ip:' KEY_TEMPLATE = 'func:%s:%s%s:%s%s' PERIOD_PREFIX = 'period:' class CacheBackend(BaseBackend): def get_ip(self, request): """This gets the IP we wish to use for ratelimiting. It defaults to 'REMOTE_ADDR'. It's recommended that you override this function if you're using loadbalancers or any kind of upstream proxy service to route requests to Django. """ return request.META['REMOTE_ADDR'] def _keys(self, func_name, request, ip=True, field=None, period=None): keys = [] if ip: keys.append(KEY_TEMPLATE % (func_name, PERIOD_PREFIX, period, IP_PREFIX, self.get_ip(request)))
def __init__(self, name, params): BaseCache.__init__(self, params) self._cache = _caches.setdefault(name, LRU(self._max_entries)) self._lock = _locks.setdefault(name, RWLock())
def __init__(self, server, params): BaseCache.__init__(self, params) self._cache = memcache
def __init__(self, dir, params): BaseCache.__init__(self, params) self._dir = dir if not os.path.exists(self._dir): self._createdir()
def __init__(self, table, params): BaseCache.__init__(self, params) self._table = table
def __init__(self, server, params): BaseCache.__init__(self, params) self._wrapped_cache = PylibmcCacheClass(server, params) if not hasattr(settings, 'CACHE_PREFIX'): settings.CACHE_PREFIX = ''
def __init__(self, server, params): BaseCache.__init__(self, params) self._cache = memcache.Client(server.split(';'))
def __init__(self, name, params): BaseCache.__init__(self, params) self._cache = _caches.setdefault(name, {}) self._expire_info = _expire_info.setdefault(name, {}) self._lock = _locks.setdefault(name, RWLock())
def __init__(self, server, params): BaseCache.__init__(self, params) self._cache = uwsgi self._server = server
def __getattribute__(self, attr): if attr == 'cache': return BaseCache.__getattribute__(self, attr) return wrap(getattr(self.cache, attr), key(self.cache, attr))
def __init__(self, host, *args, **kwargs): BaseCache.__init__(self, *args, **kwargs)
def __init__(self, server, params): "Connect to Redis, and set up cache backend." BaseCache.__init__(self, params) self._cache = redis.Redis(server.split(':')[0], db=int(params.get('db', 1))) self._headers = {'zlib': '!zlib!', 'pickle': '!pickle!'}