def install(self, params): from django.core.cache import _create_cache params.update({ 'KEY_PREFIX': '', 'VERSION': '', 'KEY_FUNCTION': self.make_key}) self._cache = _create_cache(self.backend, **params)
def __init__(self, location, params): # Do a .copy() dance to avoid modifying `OPTIONS` in the actual # settings object. options = params['OPTIONS'].copy() actual_backend = options.pop('ACTUAL_BACKEND') self._real_cache = _create_cache(actual_backend, **options) self.requests_log = [] self._setup_proxies()
def create_cache(backend, **kwargs): """ Create cache backend. Using this custom function to avoid deprecation warnings. """ cache = _create_cache(backend, **kwargs) # Some caches -- python-memcached in particular -- need to do a cleanup at the # end of a request cycle. If not implemented in a particular backend # cache.close is a no-op signals.request_finished.connect(cache.close) return cache
def get_cache(backend, **kwargs): """ Compatibility wrapper for getting Django's cache backend instance """ cache = _create_cache(backend, **kwargs) # Some caches -- python-memcached in particular -- need to do a cleanup # at the end of a request cycle. If not implemented in a particular # backend, cache.close() is a no-op signals.request_finished.connect(cache.close) return cache
def __new__(cls, name): # 创建一个新的缓存实例 cache_instance = _create_cache('default') # 根据缓存实例名称生成该缓存的版本号键 version = cls.get_version(name) cache_instance.version = version setattr(cache_instance, 'reset', types.MethodType(delete_cache, cache_instance)) return cache_instance
def test_get_no_retry_on_broken_connection(self, mock_get): from django.core.cache import _create_cache cache_backend = _create_cache(self.cache_name) # raise connection error (code 3) mock_get.side_effect = MemcachedError() mock_get.side_effect.retcode = 3 cache_backend.get('test') self.assertEqual(mock_get.call_count, 1)
def get_cache_backend(): """ Compatibilty wrapper for getting Django's cache backend instance """ from django.core.cache import _create_cache cache = _create_cache(settings.DBTEMPLATES_CACHE_BACKEND) # Some caches -- python-memcached in particular -- need to do a cleanup at # the end of a request cycle. If not implemented in a particular backend # cache.close is a no-op signals.request_finished.connect(cache.close) return cache
def get_cache(backend, **kwargs): """ Django cache backend compatibility """ try: from django.core.cache import _create_cache except ImportError: from django.core.cache import get_cache as _get_cache return _get_cache(backend, **kwargs) cache = _create_cache(backend, **kwargs) signals.request_finished.connect(cache.close) return cache
def __init__(self, location, params): custom_params = params.copy() options = custom_params['OPTIONS'].copy() custom_params['BACKEND'] = options.pop('ACTUAL_BACKEND') custom_params['OPTIONS'] = options # Patch back in the `location` for memcached backend to pick up. custom_params['LOCATION'] = location self._real_cache = _create_cache(custom_params['BACKEND'], **custom_params) self.requests_log = [] self._setup_proxies()
def __init__(self, location, params): custom_params = params.copy() options = custom_params['OPTIONS'].copy() custom_params['BACKEND'] = options.pop('ACTUAL_BACKEND') custom_params['OPTIONS'] = options # Patch back in the `location` for memcached backend to pick up. custom_params['LOCATION'] = location self._real_cache = _create_cache( custom_params['BACKEND'], **custom_params) self.requests_log = [] self._setup_proxies()
def __new__(cls, name): # 创建一个新的缓存实例 cache_instance = _create_cache('default') # 根据缓存实例名称生成该缓存的版本号键 version_key = cls.get_version_key(name) cache_instance.version_key = version_key # 根据版本号键查询对应缓存版本 version = cache.get(version_key) if version is None: # 查不到则生成一个唯一版本号, 并不过期的保存版本号(memcached最长30天,redis?) version = cls.get_unique_version() cache.set(version_key, version, None) cache_instance.version = version return cache_instance
def get_cache(backend, **kwargs): from django.core import cache as dj_cache if django.VERSION <= (1, 6): cache = dj_cache.get_cache(backend, **kwargs) elif django.VERSION >= (3, 2): cache = dj_cache.caches.create_connection(backend) else: # Django 1.7 to 3.1 cache = dj_cache._create_cache(backend, **kwargs) # Some caches -- python-memcached in particular -- need to do a cleanup at the # end of a request cycle. If not implemented in a particular backend # cache.close is a no-op. Not available in Django 1.5 if hasattr(cache, "close"): signals.request_finished.connect(cache.close) return cache
def get_cache(backend, **kwargs): """ Compatibilty wrapper for getting Django's cache backend instance original source: https://github.com/vstoykov/django-imagekit/commit/c26f8a0538778969a64ee471ce99b25a04865a8e """ try: from django.core.cache import _create_cache except ImportError: # Django < 1.7 from django.core.cache import get_cache as _get_cache return _get_cache(backend, **kwargs) cache = _create_cache(backend, **kwargs) # Some caches -- python-memcached in particular -- need to do a cleanup at the # end of a request cycle. If not implemented in a particular backend # cache.close is a no-op signals.request_finished.connect(cache.close) return cache
def get_cache(backend, **kwargs): """ Compatibilty wrapper for getting Django's cache backend instance original source: https://github.com/vstoykov/django-imagekit/commit/c26f8a0538778969a64ee471ce99b25a04865a8e """ from django.core import cache # Django < 1.7 if not hasattr(cache, '_create_cache'): return cache.get_cache(backend, **kwargs) cache = cache._create_cache(backend, **kwargs) # Some caches -- python-memcached in particular -- need to do a cleanup at the # end of a request cycle. If not implemented in a particular backend # cache.close is a no-op. Not available in Django 1.5 if hasattr(cache, 'close'): signals.request_finished.connect(cache.close) return cache
def django_cache_add_xdist_key_prefix(request): skip_if_no_django() xdist_prefix = getattr(request.config, 'slaveinput', {}).get('slaveid') if not xdist_prefix: return from django.conf import settings from django.core.cache import caches, _create_cache for cache_alias, cache_settings in settings.CACHES.items(): new_prefix = xdist_prefix + '_' + cache_settings.get('KEY_PREFIX', '') cache_settings['KEY_PREFIX'] = new_prefix new_cache = copy(settings.CACHES[cache_alias]) new_cache['KEY_PREFIX'] = new_prefix settings.CACHES[cache_alias] = new_cache if getattr(caches._caches, 'caches', None) is None: continue caches._caches.caches[cache_alias] = _create_cache(cache_alias)
def __init__(self, name, server): self._server = server self._name = name self._conf = settings.GET_CACHE_CONF(server) settings.CACHES[self._name] = self._conf self._cache = _create_cache(self._name)
def import_caches(import_dir="./cached_data"): """ Import the cached data from file system to current cache servers """ keys_file = os.path.join(import_dir, "cached_keys.json") session_cache_dir = os.path.join(import_dir, "session") user_cache_dir = os.path.join(import_dir, "user") data_cache_dir = os.path.join(import_dir, "data") settings.CACHES["__import_session"] = { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': session_cache_dir, "OPTIONS": { "MAX_ENTRIES": 40000000 } } settings.CACHES["__import_user"] = { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': user_cache_dir, "OPTIONS": { "MAX_ENTRIES": 40000000 } } settings.CACHES["__import_data"] = { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': data_cache_dir, "OPTIONS": { "MAX_ENTRIES": 40000000 } } session_cache = _create_cache("__import_session") user_cache = _create_cache("__import_user") data_cache = _create_cache("__import_data") expired_keys = [] session_keys = 0 guest_session_keys = 0 user_keys = 0 data_keys = 0 process_seq_keys = 0 processed_keys = 0 session_report = {} with open(keys_file, 'rt') as f: data = f.readline() while data: data = data.strip() try: if data: #print("Processing key:{}".format(data)) datatype, cachekey, expireat = json.loads(data) expireat = timezone.make_aware( datetime.strptime( expireat, "%Y-%m-%d %H:%M:%S.%f")) if expireat else None if expireat and expireat < timezone.now(): expired_keys.append("{}={}, expireat".format( cachekey, value, expireat.strftime("%Y-%m-%d %H:%M:%S.%f"))) continue value = None if datatype == "session": value = session_cache.get(cachekey["key"]) if value.get(USER_SESSION_KEY): _save_session(cachekey["key"], value, expireat) session_report[value.get( USER_SESSION_KEY)] = session_report.get( value.get(USER_SESSION_KEY), 0) + 1 session_keys += 1 elif len(value.keys()) == 0: session_report["EMPTY"] = session_report.get( "EMPTY", 0) + 1 guest_session_keys += 1 else: keys = [k for k in value.keys()] keys.sort() keys = tuple(keys) session_report[keys] = session_report.get(keys, 0) + 1 session_report["GUEST"] = session_report.get( "GUEST", 0) + 1 guest_session_keys += 1 elif datatype == "user": value = user_cache.get(cachekey["key"]) _save_user(cachekey["key"], value, expireat) user_keys += 1 elif datatype == "data": value = data_cache.get(cachekey["key"]) _save_data(cachekey["key"], value, expireat) data_keys += 1 elif datatype == "process_seq": value = session_cache.get(cachekey["key"]) _save_process_seq(cachekey["key"], value, expireat) process_seq_keys += 1 #print("{} : {}={}".format(datatype,cachekey["key"],value)) finally: processed_keys += 1 if processed_keys % 1000 == 0: print( "Processed {} keys, Session Keys : {} , Guest Session Keys : {} , User Keys : {} , Process Seq Keys : {} , Data Keys : {} , Expired Keys : {}" .format(processed_keys, session_keys, guest_session_keys, user_keys, process_seq_keys, data_keys, len(expired_keys))) data = f.readline() print("""Imported Cache Data: Total Imported Keys : {} Imported Session Keys : {} Ignored Guest Session Keys : {} Imported User Keys : {} Imported Data Keys : {} Imported Process Seq Keys : {} Imported Expired Keys : {} """.format(processed_keys, session_keys, guest_session_keys, user_keys, data_keys, process_seq_keys, len(expired_keys))) print( " =========================================================================" ) print(" Expired Session Keys : {}".format(session_keys)) for k, v in session_report.items(): if k in ("GUEST", "EMPTY"): continue print(" User({}) : {}".format(k, v)) print(" User(GUEST) : {}".format(session_report.get("GUEST", 0))) print(" User(EMPTY) : {}".format(session_report.get("EMPTY", 0))) if expired_keys: print( " =========================================================================" ) print(" Expired Keys") for key in expired_keys: print(" {}".format(key))
def export_caches(source_caches, default_cache_key_re, user_cache_key_re, session_cache_key_re, exported_dir="./cached_data", process_seq_key_re=process_seq_key_v1_re): """ Export cache data from source cache servers to file system; if source cache servers is empty, use current cache servers as the source servers Only support memcached and redis """ if not source_caches: source_caches = [] if settings.CACHE_SERVER: source_caches.append(settings.CACHE_SERVER) if settings.CACHE_USER_SERVER: for s in settings.CACHE_USER_SERVER: if s not in source_caches: source_caches.append(s) if settings.CACHE_SESSION_SERVER: for s in settings.CACHE_SESSION_SERVER: if s not in source_caches: source_caches.append(s) print("Source caches = {}".format(source_caches)) unrecognized_keys = [] session_keys = 0 guest_session_keys = 0 user_keys = 0 data_keys = 0 process_seq_keys = 0 empty_keys = [] #prepare the file system cache to store the cached data if os.path.exists(exported_dir): raise Exception( "Exported folder{} already exists".format(exported_dir)) os.makedirs(exported_dir, exist_ok=True) keys_file = os.path.join(exported_dir, "cached_keys.json") session_cache_dir = os.path.join(exported_dir, "session") os.mkdir(session_cache_dir) user_cache_dir = os.path.join(exported_dir, "user") os.mkdir(user_cache_dir) data_cache_dir = os.path.join(exported_dir, "data") os.mkdir(data_cache_dir) settings.CACHES["__exported_session"] = { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': session_cache_dir, "OPTIONS": { "MAX_ENTRIES": 40000000 } } settings.CACHES["__exported_user"] = { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': user_cache_dir, "OPTIONS": { "MAX_ENTRIES": 40000000 } } settings.CACHES["__exported_data"] = { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': data_cache_dir, "OPTIONS": { "MAX_ENTRIES": 40000000 } } session_cache = _create_cache("__exported_session") user_cache = _create_cache("__exported_user") data_cache = _create_cache("__exported_data") processed_keys = 0 with open(keys_file, 'wt') as f: cacheid = 0 for source_cache in source_caches: cachename = "__sourcecache{}".format(cacheid) cacheid += 1 cache_server_client = CacheServerClient.create_server_client( cachename, source_cache) try: for cachekey, value, expireat in cache_server_client.items(): try: if not value: empty_keys.append("{} expireat:{}".format( cachekey, expireat.strftime("%Y-%m-%d %H:%M:%S.%f") if expireat else None)) continue m = session_cache_key_re.search(cachekey) if m: if value.get(USER_SESSION_KEY): f.write( json.dumps([ "session", m.groupdict(), expireat.strftime( "%Y-%m-%d %H:%M:%S.%f") if expireat else None ])) f.write("\n") if not session_cache.add( m.group("key"), value, timeout=None): raise Exception( "Failed to save the key({}) to file system cache." .format(m.group("key"))) session_keys += 1 else: guest_session_keys += 1 continue m = user_cache_key_re.search(cachekey) if m: f.write( json.dumps([ "user", m.groupdict(), expireat.strftime("%Y-%m-%d %H:%M:%S.%f") if expireat else None ])) f.write("\n") if not user_cache.add( m.group("key"), value, timeout=None): raise Exception( "Failed to save the key({}) to file system cache." .format(m.group("key"))) user_keys += 1 continue m = process_seq_key_re.search(cachekey) if m: f.write( json.dumps([ "process_seq", m.groupdict(), expireat.strftime("%Y-%m-%d %H:%M:%S.%f") if expireat else None ])) f.write("\n") if not session_cache.add( m.group("key"), value, timeout=None): raise Exception( "Failed to save the key({}) to file system cache." .format(m.group("key"))) process_seq_keys += 1 continue m = default_cache_key_re.search(cachekey) if m: f.write( json.dumps([ "data", m.groupdict(), expireat.strftime("%Y-%m-%d %H:%M:%S.%f") if expireat else None ])) f.write("\n") if not data_cache.add( m.group("key"), value, timeout=None): raise Exception( "Failed to save the key({}) to file system cache." .format(m.group("key"))) data_keys += 1 continue unrecognized_keys.append("{}={}, expireat:{}".format( cachekey, value, expireat.strftime("%Y-%m-%d %H:%M:%S.%f") if expireat else None)) finally: processed_keys += 1 if processed_keys % 1000 == 0: print( "Processed {} keys, Session Keys : {} , Guest Session Keys : {} , User Keys : {} , Process Seq Keys : {} , Data Keys : {}" .format(processed_keys, session_keys, guest_session_keys, user_keys, process_seq_keys, data_keys)) finally: cache_server_client.close() print("""Exported Cached Data: Total Exported Keys : {} Exported Session Keys : {} Ignored Guest Session Keys : {} Exported User Keys : {} Exported Data Keys : {} Exported Process Seq Keys : {} """.format(processed_keys, session_keys, guest_session_keys, user_keys, data_keys, process_seq_keys)) if empty_keys: print( " =========================================================================" ) print(" Empty Keys") for key in empty_keys: print(" {}".format(key)) if unrecognized_keys: print( " =========================================================================" ) print(" Unrecognized Keys") for key in unrecognized_keys: print(" {}".format(key))