def clean_cache_backend(self): """Validates that the specified cache backend is parseable by Django.""" backend = self.cleaned_data['cache_backend'].strip() if backend: try: parse_backend_uri(backend) except InvalidCacheBackendError, e: raise forms.ValidationError(e)
def create_test_db(self, verbosity=1, autoclobber=False): """ Copied from Django 1.2.4 django.db.backends.creation.BaseDatabaseCreation Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. """ if verbosity >= 1: print "Creating test database '%s'..." % self.connection.alias test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict["NAME"] = test_database_name can_rollback = self.connection.creation._rollback_works() self.connection.settings_dict["SUPPORTS_TRANSACTIONS"] = can_rollback call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith('db://'): from django.core.cache import parse_backend_uri, cache from django.db import router if router.allow_syncdb(self.connection.alias, cache.cache_model_class): _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command('createcachetable', cache_name, database=self.connection.alias) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def on_task_init(self, task_id, task): """This method is called before a task is executed. Does everything necessary for Django to work in a long-living, multiprocessing environment. """ # See: http://groups.google.com/group/django-users/browse_thread/ # thread/78200863d0c07c6d/38402e76cf3233e8?hl=en&lnk=gst& # q=multiprocessing#38402e76cf3233e8 from django.db import connection connection.close() # Reset cache connection only if using memcached/libmemcached from django.core import cache # XXX At Opera we use a custom memcached backend that uses libmemcached # instead of libmemcache (cmemcache). Should find a better solution for # this, but for now "memcached" should probably be unique enough of a # string to not make problems. cache_backend = cache.settings.CACHE_BACKEND if hasattr(cache, "parse_backend_uri"): cache_scheme = cache.parse_backend_uri(cache_backend)[0] else: # Django <= 1.0.2 cache_scheme = cache_backend.split(":", 1)[0] if "memcached" in cache_scheme: cache.cache.close()
def normalize_cache_backend(cache_backend, cache_name=DEFAULT_CACHE_ALIAS): """Return a new-style ``CACHES`` dictionary from any given cache_backend. Over time, Django has had support for two formats for a cache backend. The old-style :django:setting:`CACHE_BACKEND` string, and the new-style :django:setting:`CACHES` dictionary. This function will accept either as input and return a dictionary containing a single normalized entry for the given cache backend. This can be merged into the existing :django:setting:`CACHES` setting. If a :django:setting:`CACHES` dictionary is passed, the "default" cache will be the result. Args: cache_backend (dict or str): The new-style or old-style cache backend dictionary or str to normalize. cache_name (str): The name of the cache backend to look up in ``cache_backend``, if a new-style dictionary is provided. Returns: A new-style cache backend dictionary containing the single cache backend referenced. If there were any parsing issues, an empty dictionary will be returned. """ if not cache_backend: return {} if isinstance(cache_backend, dict): backend_info = cache_backend.get(cache_name, {}) backend_name = backend_info.get('BACKEND') if backend_name in RENAMED_BACKENDS: backend_info['BACKEND'] = RENAMED_BACKENDS[backend_name] return backend_info try: engine, host, params = parse_backend_uri(cache_backend) except InvalidCacheBackendError as e: logger.error( 'Invalid cache backend (%s) found while loading ' 'siteconfig: %s', cache_backend, e) return {} if engine in BACKEND_CLASSES: engine = 'django.core.cache.backends.%s' % BACKEND_CLASSES[engine] else: engine = '%s.CacheClass' % engine defaults = { 'BACKEND': engine, 'LOCATION': host, } defaults.update(params) return defaults
def get_redis_backend(backend_uri): # From django-redis-cache # This is temporary https://github.com/washort/nuggets/pull/1 _, server, params = parse_backend_uri(backend_uri) db = params.pop('db', 1) try: db = int(db) except (ValueError, TypeError): db = 0 try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = server port = 6379 return redislib.Redis(host=host, port=port, db=db, password=password, socket_timeout=socket_timeout)
def get_redis_backend(): """Connect to redis from a string like CACHE_BACKEND.""" # From django-redis-cache. _, server, params = parse_backend_uri(settings.REDIS_BACKEND) db = params.pop('db', 1) try: db = int(db) except (ValueError, TypeError): db = 1 try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = 'localhost' port = 6379 return redislib.Redis(host=host, port=port, db=db, password=password, socket_timeout=socket_timeout)
def normalize_cache_backend(cache_backend): """Returns a new-style CACHES dictionary from any given cache_backend. Django has supported two formats for a cache backend. The old-style CACHE_BACKEND string, and the new-style CACHES dictionary. This function will accept either as input and return a cahe backend in the form of a CACHES dictionary as a result. The result won't be a full-on CACHES, with named cache entries inside. Rather, it will be a cache entry. If a CACHES dictionary is passed, the "default" cache will be the result. """ if not cache_backend: return {} if isinstance(cache_backend, dict): if DEFAULT_CACHE_ALIAS in cache_backend: return cache_backend[DEFAULT_CACHE_ALIAS] return {} try: engine, host, params = parse_backend_uri(cache_backend) except InvalidCacheBackendError, e: logging.error('Invalid cache backend (%s) found while loading ' 'siteconfig: %s' % (cache_backend, e)) return {}
def normalize_cache_backend(cache_backend, cache_name=DEFAULT_CACHE_ALIAS): """Return a new-style ``CACHES`` dictionary from any given cache_backend. Over time, Django has had support for two formats for a cache backend. The old-style :django:setting:`CACHE_BACKEND` string, and the new-style :django:setting:`CACHES` dictionary. This function will accept either as input and return a dictionary containing a single normalized entry for the given cache backend. This can be merged into the existing :django:setting:`CACHES` setting. If a :django:setting:`CACHES` dictionary is passed, the "default" cache will be the result. Args: cache_backend (dict or str): The new-style or old-style cache backend dictionary or str to normalize. cache_name (str): The name of the cache backend to look up in ``cache_backend``, if a new-style dictionary is provided. Returns: A new-style cache backend dictionary containing the single cache backend referenced. If there were any parsing issues, an empty dictionary will be returned. """ if not cache_backend: return {} if isinstance(cache_backend, dict): backend_info = cache_backend.get(cache_name, {}) backend_name = backend_info.get('BACKEND') if backend_name in RENAMED_BACKENDS: backend_info['BACKEND'] = RENAMED_BACKENDS[backend_name] return backend_info try: engine, host, params = parse_backend_uri(cache_backend) except InvalidCacheBackendError as e: logger.error('Invalid cache backend (%s) found while loading ' 'siteconfig: %s', cache_backend, e) return {} if engine in BACKEND_CLASSES: engine = 'django.core.cache.backends.%s' % BACKEND_CLASSES[engine] else: engine = '%s.CacheClass' % engine defaults = { 'BACKEND': engine, 'LOCATION': host, } defaults.update(params) return defaults
def get_cache_server_list(): """Returns configured memcached servers. Works with both old-style (CACHE_BACKEND) and new-style (CACHES) cache configurations. """ engine = '' # Django >= 1.3 # # If somebody uses CACHE_BACKEND instead of CACHES in 1.3, it # automatically converts their CACHE_BACKEND configuration to the # appropriate CACHES configuration. So we can safely use # parse_backend_conf here and it'll work with both old and new styles. try: from django.core.cache import parse_backend_conf, DEFAULT_CACHE_ALIAS engine, hosts, params = parse_backend_conf(DEFAULT_CACHE_ALIAS) # Django < 1.3 # # No parse_backend_conf and DEFAULT_CACHE_ALIAS. except ImportError: from django.core.cache import parse_backend_uri engine, hosts, params = parse_backend_uri(settings.CACHE_BACKEND) if 'memcached' not in engine: raise ImproperlyConfigured( "django-memcached2 only works with memcached. Currently using '%s'" % engine) return hosts if isinstance(hosts, list) else hosts.split(';')
def create_test_db(self, verbosity=1, autoclobber=False): """ Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. """ if verbosity >= 1: print "Creating test database '%s'..." % self.connection.alias test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict["NAME"] = test_database_name can_rollback = self._rollback_works() self.connection.settings_dict["SUPPORTS_TRANSACTIONS"] = can_rollback call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith('db://'): from django.core.cache import parse_backend_uri _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command('createcachetable', cache_name) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def get_cache_server_list(): """Returns configured memcached servers. Works with both old-style (CACHE_BACKEND) and new-style (CACHES) cache configurations. """ engine = '' # Django >= 1.3 # # If somebody uses CACHE_BACKEND instead of CACHES in 1.3, it # automatically converts their CACHE_BACKEND configuration to the # appropriate CACHES configuration. So we can safely use # parse_backend_conf here and it'll work with both old and new styles. try: from django.core.cache import parse_backend_conf, DEFAULT_CACHE_ALIAS engine, hosts, params = parse_backend_conf(DEFAULT_CACHE_ALIAS) # Django < 1.3 # # No parse_backend_conf and DEFAULT_CACHE_ALIAS. except ImportError: from django.core.cache import parse_backend_uri engine, hosts, params = parse_backend_uri(settings.CACHE_BACKEND) if 'memcached' not in engine: raise ImproperlyConfigured("django-memcached2 only works with memcached. Currently using '%s'" % engine) return hosts if isinstance(hosts, list) else hosts.split(';')
def create_test_db(self, verbosity=1, autoclobber=False): """ Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. """ if verbosity >= 1: print "Creating test database '%s'..." % self.connection.alias test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict["NAME"] = test_database_name # Confirm the feature set of the test database self.connection.features.confirm() # Report syncdb messages at one level lower than that requested. # This ensures we don't get flooded with messages during testing # (unless you really ask to be flooded) call_command("syncdb", verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith("db://"): from django.core.cache import parse_backend_uri, cache from django.db import router if router.allow_syncdb(self.connection.alias, cache.cache_model_class): _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command("createcachetable", cache_name, database=self.connection.alias) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def get_caches(): caches = [] schema, hosts, params = parse_backend_uri(conf.BACKEND) for host in hosts.split(';'): caches.append( (host, get_cache('%s://%s?%s' % (schema, host, params))._cache)) return caches
def monitor(request, format=None): # For each check, a boolean pass/fail status to show in the template status_summary = {} status = 200 # Check all memcached servers scheme, servers, _ = parse_backend_uri(settings.CACHE_BACKEND) memcache_results = [] status_summary['memcache'] = True if 'memcached' in scheme: hosts = servers.split(';') for host in hosts: ip, port = host.split(':') try: s = socket.socket() s.connect((ip, int(port))) except Exception, e: result = False status_summary['memcache'] = False monitor_log.critical('Failed to connect to memcached (%s): %s' % (host, e)) else: result = True finally:
def monitor(request): # For each check, a boolean pass/fail to show in the template. status_summary = {} status = 200 # Check all memcached servers. scheme, servers, _ = parse_backend_uri(settings.CACHE_BACKEND) memcache_results = [] status_summary['memcache'] = True if 'memcached' in scheme: hosts = servers.split(';') for host in hosts: ip, port = host.split(':') try: s = socket.socket() s.connect((ip, int(port))) except Exception, e: result = False status_summary['memcache'] = False log.critical('Failed to connect to memcached (%s): %s' % (host, e)) else: result = True finally:
def get_redis_backend(backend_uri): _, server, params = parse_backend_uri(backend_uri) db = params.pop('db', 0) try: db = int(db) except (ValueError, TypeError): db = 0 try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = server port = 6379 return redislib.Redis(host=host, port=port, db=db, password=password, socket_timeout=socket_timeout)
def create_test_db(self, verbosity=1, autoclobber=False): """ Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. This method is overloaded to load up the SpatiaLite initialization SQL prior to calling the `syncdb` command. """ if verbosity >= 1: print "Creating test database '%s'..." % self.connection.alias test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict["NAME"] = test_database_name # Confirm the feature set of the test database self.connection.features.confirm() # Need to load the SpatiaLite initialization SQL before running `syncdb`. self.load_spatialite_sql() call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith('db://'): from django.core.cache import parse_backend_uri _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command('createcachetable', cache_name) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def redis_client(name): """Get a Redis client. Uses the name argument to lookup the connection string in the settings.REDIS_BACKEND dict. """ uri = settings.REDIS_BACKENDS[name] _, server, params = parse_backend_uri(uri) db = params.pop('db', 1) try: db = int(db) except (ValueError, TypeError): db = 1 try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = server port = 6379 return Redis(host=host, port=port, db=db, password=password, socket_timeout=socket_timeout)
def create_test_db(self, verbosity=1, autoclobber=False): """ Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. """ if verbosity >= 1: print "Creating test database..." test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() settings.DATABASE_NAME = test_database_name self.connection.settings_dict["DATABASE_NAME"] = test_database_name can_rollback = self._rollback_works() settings.DATABASE_SUPPORTS_TRANSACTIONS = can_rollback self.connection.settings_dict["DATABASE_SUPPORTS_TRANSACTIONS"] = can_rollback call_command('syncdb', verbosity=verbosity, interactive=False) if settings.CACHE_BACKEND.startswith('db://'): from django.core.cache import parse_backend_uri _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command('createcachetable', cache_name) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def create_test_db(self, verbosity=1, autoclobber=False): """ Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. This method is overloaded to load up the SpatiaLite initialization SQL prior to calling the `syncdb` command. """ if verbosity >= 1: print "Creating test database '%s'..." % self.connection.alias test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict["NAME"] = test_database_name can_rollback = self._rollback_works() self.connection.settings_dict["SUPPORTS_TRANSACTIONS"] = can_rollback # Need to load the SpatiaLite initialization SQL before running `syncdb`. self.load_spatialite_sql() call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith('db://'): from django.core.cache import parse_backend_uri _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command('createcachetable', cache_name) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def get_caches(self): caches = [] schema, hosts, params = parse_backend_uri(conf.BACKEND) for host in hosts.split(';'): try: caches.append((host, get_cache('%s://%s?%s' % (schema, host, params))._cache)) except Exception, e: self.logger.exception(e)
def openMongoDB(new = False): global _mongo if _mongo and new == False: return _mongo (type, servers, params) = parse_backend_uri(SESSION_BACKEND_MEMCACHE) if type != "memcached": raise InvalidCacheBackendError("Unsupported CACHE_BACKEND type") (host, port) = parse_backend_uri(SESSION_BACKEND_MONGO)[1].split(";")[0].split(":") mongo = pymongo.Connection(host, int(port)) if new == False: _mongo = mongo return mongo
def jail(task_id, task_name, func, args, kwargs): """Wraps the task in a jail, which catches all exceptions, and saves the status and result of the task execution to the task meta backend. If the call was successful, it saves the result to the task result backend, and sets the task status to ``"DONE"``. If the call results in an exception, it saves the exception as the task result, and sets the task status to ``"FAILURE"``. :param task_id: The id of the task. :param task_name: The name of the task. :param func: Callable object to execute. :param args: List of positional args to pass on to the function. :param kwargs: Keyword arguments mapping to pass on to the function. :returns: the function return value on success, or the exception instance on failure. """ ignore_result = getattr(func, "ignore_result", False) timer_stat = TaskTimerStats.start(task_id, task_name, args, kwargs) # See: http://groups.google.com/group/django-users/browse_thread/ # thread/78200863d0c07c6d/38402e76cf3233e8?hl=en&lnk=gst& # q=multiprocessing#38402e76cf3233e8 from django.db import connection connection.close() # Reset cache connection only if using memcached/libmemcached from django.core import cache # XXX At Opera we use a custom memcached backend that uses libmemcached # instead of libmemcache (cmemcache). Should find a better solution for # this, but for now "memcached" should probably be unique enough of a # string to not make problems. cache_backend = cache.settings.CACHE_BACKEND if hasattr(cache, "parse_backend_uri"): cache_scheme = cache.parse_backend_uri(cache_backend)[0] else: # Django <= 1.0.2 cache_scheme = cache_backend.split(":", 1)[0] if "memcached" in cache_scheme: cache.cache.close() # Backend process cleanup default_backend.process_cleanup() try: result = func(*args, **kwargs) except (SystemExit, KeyboardInterrupt): raise except Exception, exc: stored_exc = default_backend.mark_as_failure(task_id, exc) type_, _, tb = sys.exc_info() retval = ExceptionInfo((type_, stored_exc, tb))
def get_memcached_hosts(): """ Returns the hosts currently configured for memcached. """ if not memcache: return None scheme, host, params = parse_backend_uri(settings.CACHE_BACKEND) if scheme == "memcached": return host.split(";") return None
def openMemcache(new = False): global _mc if _mc and new == False: return _mc (type, servers, params) = parse_backend_uri(SESSION_BACKEND_MEMCACHE) if type != "memcached": raise InvalidCacheBackendError("Unsupported CACHE_BACKEND type") mc = memcache.Client(servers.split(";")) if new == False: _mc = mc return mc
def create_test_db(self, verbosity=1, autoclobber=False): """ Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. """ if verbosity >= 1: print "Creating test database '%s'..." % self.connection.alias test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict["NAME"] = test_database_name can_rollback = self._rollback_works() self.connection.settings_dict["SUPPORTS_TRANSACTIONS"] = can_rollback call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias, load_initial_data=False) # We need to then do a flush to ensure that any data installed by # custom SQL has been removed. The only test data should come from # test fixtures, or autogenerated from post_syncdb triggers. # This has the side effect of loading initial data (which was # intentionally skipped in the syncdb). call_command('flush', verbosity=verbosity, interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith('db://'): from django.core.cache import parse_backend_uri, cache from django.db import router if router.allow_syncdb(self.connection.alias, cache.cache_model_class): _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command('createcachetable', cache_name, database=self.connection.alias) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def redis_client(name): """Get a Redis client. Uses the name argument to lookup the connection string in the settings.REDIS_BACKEND dict. """ if name not in settings.REDIS_BACKENDS: raise RedisError( '{k} is not defined in settings.REDIS_BACKENDS'.format(k=name)) uri = settings.REDIS_BACKENDS[name] _, server, params = parse_backend_uri(uri) db = params.pop('db', 1) try: db = int(db) except (ValueError, TypeError): db = 1 try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = server port = 6379 redis = Redis(host=host, port=port, db=db, password=password, socket_timeout=socket_timeout) try: # Make a cheap call to verify we can connect. redis.exists('dummy-key') except ConnectionError: raise RedisError( 'Unable to connect to redis backend: {k}'.format(k=name)) return RetryForeverRedis(redis)
def normalize_cache_backend(cache_backend, cache_name=DEFAULT_CACHE_ALIAS): """Returns a new-style CACHES dictionary from any given cache_backend. Django has supported two formats for a cache backend. The old-style CACHE_BACKEND string, and the new-style CACHES dictionary. This function will accept either as input and return a cahe backend in the form of a CACHES dictionary as a result. The result won't be a full-on CACHES, with named cache entries inside. Rather, it will be a cache entry. If a CACHES dictionary is passed, the "default" cache will be the result. """ if not cache_backend: return {} if isinstance(cache_backend, dict): backend_info = cache_backend.get(cache_name, {}) backend_name = backend_info.get('BACKEND') if backend_name in RENAMED_BACKENDS: backend_info['BACKEND'] = RENAMED_BACKENDS[backend_name] return backend_info try: engine, host, params = parse_backend_uri(cache_backend) except InvalidCacheBackendError as e: logging.error('Invalid cache backend (%s) found while loading ' 'siteconfig: %s' % (cache_backend, e)) return {} if engine in BACKEND_CLASSES: engine = 'django.core.cache.backends.%s' % BACKEND_CLASSES[engine] else: engine = '%s.CacheClass' % engine defaults = { 'BACKEND': engine, 'LOCATION': host, } defaults.update(params) return defaults
def create_test_db(self, verbosity=1, autoclobber=False): """ Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. """ if verbosity >= 1: print ("Creating test database '%s'..." % self.connection.alias) test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict["NAME"] = test_database_name can_rollback = self._rollback_works() self.connection.settings_dict["SUPPORTS_TRANSACTIONS"] = can_rollback call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias, load_initial_data=False) # We need to then do a flush to ensure that any data installed by # custom SQL has been removed. The only test data should come from # test fixtures, or autogenerated from post_syncdb triggers. # This has the side effect of loading initial data (which was # intentionally skipped in the syncdb). call_command('flush', verbosity=verbosity, interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith('db://'): from django.core.cache import parse_backend_uri, cache from django.db import router if router.allow_syncdb(self.connection.alias, cache.cache_model_class): _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command('createcachetable', cache_name, database=self.connection.alias) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def redis_client(name): """Get a Redis client. Uses the name argument to lookup the connection string in the settings.REDIS_BACKEND dict. """ if name not in settings.REDIS_BACKENDS: raise RedisError( '{k} is not defined in settings.REDIS_BACKENDS'.format(k=name)) uri = settings.REDIS_BACKENDS[name] _, server, params = parse_backend_uri(uri) db = params.pop('db', 1) try: db = int(db) except (ValueError, TypeError): db = 1 try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = server port = 6379 redis = Redis(host=host, port=port, db=db, password=password, socket_timeout=socket_timeout) try: # Make a cheap call to verify we can connect. redis.exists('dummy-key') except ConnectionError: raise RedisError( 'Unable to connect to redis backend: {k}'.format(k=name)) return redis
def test_mysterious_contribution(self, urlopen): scheme, servers, _ = parse_backend_uri(settings.CACHE_BACKEND) if 'dummy' in scheme: raise SkipTest() urlopen.return_value = self.urlopener('VERIFIED') key = "%s%s:%s" % (settings.CACHE_PREFIX, 'contrib', self.item) data = {'txn_id': 100, 'payer_email': '*****@*****.**', 'receiver_email': '*****@*****.**', 'mc_gross': '99.99', 'item_number': self.item, 'payment_status': 'Completed'} response = self.client.post(self.url, data) assert isinstance(response, http.HttpResponseServerError) eq_(cache.get(key), 1) cache.set(key, 10, 1209600) response = self.client.post(self.url, data) assert isinstance(response, http.HttpResponse) eq_(cache.get(key), None)
def create_test_db(self, verbosity=1, autoclobber=False): """ Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created. """ if verbosity >= 1: print "Creating test database '%s'..." % self.connection.alias test_database_name = self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict["NAME"] = test_database_name # Confirm the feature set of the test database self.connection.features.confirm() # Report syncdb messages at one level lower than that requested. # This ensures we don't get flooded with messages during testing # (unless you really ask to be flooded) call_command('syncdb', verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith('db://'): from django.core.cache import parse_backend_uri, cache from django.db import router if router.allow_syncdb(self.connection.alias, cache.cache_model_class): _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) call_command('createcachetable', cache_name, database=self.connection.alias) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. cursor = self.connection.cursor() return test_database_name
def get_cache(): """Get a Jinja2 bytecode cache which uses the configured Django cache.""" from django.conf import settings from django.core import cache cache_backend = cache.parse_backend_uri(settings.CACHE_BACKEND)[0] memcached_client = None if cache_backend == 'memcached': # We can get the actual memcached client object itself. This will # avoid the Django problem of storing binary data (as Django tries to # coerce everything to Unicode). # Here, we look for either `cache.cache._cache` or # `cache.cache._client`; I believe there is some discrepancy between # different versions of Django and where they put this. memcached_client = getattr( cache.cache, '_cache', getattr(cache.cache, '_client', None)) memcached_client = memcached_client or B64CacheClient(cache.cache) return jinja2.MemcachedBytecodeCache(memcached_client)
def __getitem__(self, name): try: return self.connections[name] except KeyError: for alias, backend in settings.REDIS_BACKENDS.items(): if alias != name: continue _, server, params = parse_backend_uri(backend) try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None try: db = int(params.pop('db')) except (KeyError, ValueError): db = 0 password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = 'localhost' port = 6379 self.connections[alias] = redislib.Redis( host=host, port=port, db=db, password=password, socket_timeout=socket_timeout ) break else: raise ImproperlyConfigured('No backend called %s' % name) return self.connections[name]
from django.db import models from django.db.models import signals from django.db.models.sql import query from django.utils import encoding, translation log = logging.getLogger('caching') FOREVER = 0 NO_CACHE = -1 CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FLUSH = CACHE_PREFIX + ':flush:' CACHE_COUNT_TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) CACHE_BACKEND = getattr(settings, 'CACHE_BACKEND', '') if CACHE_BACKEND: scheme, _, _ = parse_backend_uri(CACHE_BACKEND) else: scheme, _, args = parse_backend_conf('default') CACHE_COUNT_TIMEOUT = args.get('TIMEOUT', None) cache.scheme = scheme class CachingManager(models.Manager): # Tell Django to use this manager when resolving foreign keys. use_for_related_fields = True def get_query_set(self): return CachingQuerySet(self.model) def contribute_to_class(self, cls, name):
def is_dict(obj): return isinstance(obj, collections.Mapping) if not JINJA2_CACHE_ENABLED or JINJA2_CACHE_BACKEND is None: cache = None bccache = None else: cache_kwargs = {} if is_dict(JINJA2_CACHE_BACKEND) and 'BACKEND' in JINJA2_CACHE_BACKEND: cache_kwargs = copy.deepcopy(JINJA2_CACHE_BACKEND) cache_backend = cache_kwargs.pop('BACKEND') cache = get_cache(cache_backend, **cache_kwargs) elif isinstance(JINJA2_CACHE_BACKEND, basestring): scheme, host, params = parse_backend_uri(JINJA2_CACHE_BACKEND) if scheme == 'locmem': from .backends.locmem import LocMemCache cache = LocMemCache('templates', {'timeout': None}) bccache = MemcachedBytecodeCache(cache) else: cache = get_cache(JINJA2_CACHE_BACKEND) if hasattr(cache, '_cache'): bccache = MemcachedBytecodeCache(cache._cache) else: bccache = MemcachedBytecodeCache(cache) if not JINJA2_CACHE_ENABLED or cache is None: cache_loader = None else:
from django.conf import settings try: memcache_settings = settings.memcache_settings except: mdefault = "memcached://127.0.0.1:11211/" memcache_settings = { "CACHE_BACKEND": mdefault, "PAGE_CACHE_BACKEND": mdefault } memcache_settings.update({"NINGX_CACHE_BACKEND": mdefault}) try: import pylibmc scheme, host, params = parse_backend_uri( memcache_settings.get("CACHE_BACKEND")) model_cache = pylibmc.Client([host]) except Exception, e: import memcache scheme, host, params = parse_backend_uri( memcache_settings.get("CACHE_BACKEND")) model_cache = memcache.Client([host]) try: import pylibmc scheme, host, params = parse_backend_uri( memcache_setting.get("NINGX_CACHE_BACKEND")) nginx_cache = pylibmc.Client([host]) except Exception, e:
except ZeroDivisionError: stats['hit_rate'] = stats['get_hits'] return stats ########NEW FILE######## __FILENAME__ = views from django.http import Http404 from django.shortcuts import render_to_response from django.conf import settings from django.template import RequestContext from django.core.cache import parse_backend_uri from django_memcached.util import get_memcached_stats from django.contrib.auth.decorators import user_passes_test _, hosts, _ = parse_backend_uri(settings.CACHE_BACKEND) SERVERS = hosts.split(';') def server_list(request): statuses = zip(range(len(SERVERS)), SERVERS, map(get_memcached_stats, SERVERS)) context = { 'statuses': statuses, } return render_to_response( 'memcached/server_list.html', context, context_instance=RequestContext(request) ) def server_status(request, index): try:
def emit(self, record): pass log = logging.getLogger('caching') log.setLevel(logging.INFO) log.addHandler(NullHandler()) FOREVER = 0 NO_CACHE = -1 CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FLUSH = CACHE_PREFIX + ':flush:' CACHE_SET_TIMEOUT = getattr(settings, 'CACHE_SET_TIMEOUT', 1) scheme, _, _ = parse_backend_uri(settings.CACHE_BACKEND) cache.scheme = scheme class CachingManager(models.Manager): # Tell Django to use this manager when resolving foreign keys. use_for_related_fields = True def get_query_set(self): return CachingQuerySet(self.model) def contribute_to_class(self, cls, name): signals.post_save.connect(self.post_save, sender=cls) signals.post_delete.connect(self.post_delete, sender=cls) return super(CachingManager, self).contribute_to_class(cls, name)
class NullHandler(logging.Handler): def emit(self, record): pass log = logging.getLogger('caching') log.setLevel(logging.INFO) log.addHandler(NullHandler()) FOREVER = 0 NO_CACHE = -1 CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FLUSH = CACHE_PREFIX + ':flush:' scheme, _, _ = parse_backend_uri(settings.CACHE_BACKEND) cache.scheme = scheme class CachingManager(models.Manager): # Tell Django to use this manager when resolving foreign keys. use_for_related_fields = True def get_query_set(self): return CachingQuerySet(self.model) def contribute_to_class(self, cls, name): signals.post_save.connect(self.post_save, sender=cls) signals.post_delete.connect(self.post_delete, sender=cls) return super(CachingManager, self).contribute_to_class(cls, name)
def get_caches(): caches = [] schema, hosts, params = parse_backend_uri(conf.BACKEND) for host in hosts.split(';'): caches.append((host, get_cache('%s://%s?%s' % (schema, host, params))._cache)) return caches
from django.conf import settings from django.core.cache import parse_backend_uri try: import redis as redislib except: redislib = None connections = {} if not connections: # don't set this repeatedly for alias, backend in settings.REDIS_BACKENDS.items(): _, server, params = parse_backend_uri(backend) try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = 'localhost' port = 6379 connections[alias] = redislib.Redis(host=host, port=port, db=0, password=password,
from django.conf import settings from django.core.cache import parse_backend_uri try: import redis as redislib except: redislib = None connections = {} if not connections: # don't set this repeatedly for alias, backend in settings.REDIS_BACKENDS.items(): _, server, params = parse_backend_uri(backend) try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = 'localhost' port = 6379 connections[alias] = redislib.Redis(host=host, port=port,