class BaseConfig: """Base class all configuration inherits from.""" DEBUG = True CACHE = NullCache() CACHE_TIMEOUT = 0 SECRET_KEY = None
def apply_cache_strategy(self, **kwargs): # default_timeout: the default timeout seconds that is used if no timeout is default_timeout = kwargs.get('default_timeout') if not default_timeout: default_timeout = 0 self.cache_strategy = NullCache(default_timeout=default_timeout)
def __init__(self, resource, name=None, logger=None, cache=None, dispatcher=None): """ Initiate the XMLResolver """ if dispatcher is None: inventory_collection = TextInventoryCollection(identifier="defaultTic") ti = TextInventory("default") ti.parent = inventory_collection ti.set_label("Default collection", "eng") self.dispatcher = CollectionDispatcher(inventory_collection) else: self.dispatcher = dispatcher self.__inventory__ = None self.__texts__ = [] self.name = name self.logger = logger if not logger: self.logger = logging.getLogger(name) if not name: self.name = "repository" if cache is None: cache = NullCache() self.__cache__ = cache self.__resources__ = resource self.inventory_cache_key = _cache_key("Nautilus", self.name, "Inventory", "Resources") self.texts_parsed_cache_key = _cache_key("Nautilus", self.name, "Inventory", "TextsParsed")
def connect(self): try: conf = app.config.get('MEMCACHED') cache = MemcachedCache(**conf) except: cache = NullCache() return cache
def test_set_cache(self): """Test BaseModel.set_cache""" self.assertIsInstance(BaseModel._cache, FileSystemCache) old_cache = BaseModel._cache BaseModel.set_cache(NullCache()) self.assertIsInstance(BaseModel._cache, NullCache) SampleModel.set_cache(old_cache) self.assertIsInstance(BaseModel._cache, NullCache) self.assertIsInstance(SampleModel._cache, FileSystemCache)
def github(): """Provides the 'github' module with everything mocked""" original_cache = github_module.cache original_get_issues = github_module.get_issues github_module.cache = NullCache() github_module.get_issues = lambda self, *args, **kwargs: [] yield github_module github_module.cache = original_cache github_module.get_issues = original_get_issues
def decorated_function(*args, **kwargs): if cache_key is None: ck = 'view:%s?%s' % (request.path, request.query_string) else: ck = cache_key ek = '%s.expires' % ck response = None expires = None # pylibmc will throw an error when trying to communicate with memcached, not upon a bad connection try: cached = app.cache.get_many(ck, ek) if cached[0] is not None: response = cached[0] app.logger.debug( 'Cache hit for %s, returning cached content, expires=%d', ck, cached[1]) if cached[1] is not None and set_expires: expires = cached[1] else: response = f(*args, **kwargs) expires = int(time() + timeout) app.cache.set_many({ ck: response, ek: expires }, timeout=timeout) app.logger.debug( 'Cache miss for %s, refreshed content and saved in cache, expires=%d', ck, expires) if set_expires and expires is not None: response.headers['Expires'] = email.utils.formatdate( expires) except Exception as e: app.logger.error('Cache error, returning miss: %s', e) if response is None: response = f(*args, **kwargs) if (type(app.cache) is not NullCache): if (app.cache_retry < MAX_CACHE_RETRIES): app.cache_retry += 1 app.logger.error('Attempting to restore cache') _setup_cache(app) else: app.logger.error( 'Exhausted retry attempts. Converting cache to NullCache. Fix ASAP!' ) app.cache = NullCache() return response
def _setup_cache(app): """ If a test is being run or we don't want cache, NullCache will be initialized just as a dummy. If running locally without the 'DISABLE_CACHE' env variable and without a memcached instance running, MemcachedCache and it's underlying pylibmc will give no warning on connection, but will throw exceptions when trying to work with the cache. A few connection retires will be made in that scenario, and eventually the cache will be replaced with a NullCache. Binary communications must be used for SASL. """ # initialize the retry count if it's our first time here if not hasattr(app, 'cache_retry'): app.cache_retry = 0 # Setup cache if app.config['TESTING'] or os.environ.get('DISABLE_CACHE', None) is not None: app.cache = NullCache() app.logger.debug('Cache initialized as NullCache') else: MEMCACHED_SERVERS = os.environ.get('MEMCACHEDCLOUD_SERVERS', '127.0.0.1:11211') try: memcached_client = Client( servers=MEMCACHED_SERVERS.split(','), username=os.environ.get('MEMCACHEDCLOUD_USERNAME'), password=os.environ.get('MEMCACHEDCLOUD_PASSWORD'), binary=True) app.cache = MemcachedCache(memcached_client) app.logger.debug( 'Cache initialized as MemcachedCache with servers: %s', MEMCACHED_SERVERS) except Exception as e: # very unlikely to have an exception here. pylibmc mostly throws when trying to communicate, not connect app.logger.error('Error initializing MemcachedCache: %s', e) app.logger.error('Initializing cache as NullCache. Fix ASAP!') app.cache = NullCache()
def _commandline(repositories, port=8000, host="127.0.0.1", debug=False, cache=None, cache_path="./cache", redis=None): """ Run a CTS API from command line. .. warning:: This function should not be used in the production context :param repositories: :param port: :param ip: :param debug: :param cache: :param cache_path: :return: """ if cache == "redis": nautilus_cache = RedisCache(redis) cache_type = "redis" elif cache == "filesystem": nautilus_cache = FileSystemCache(cache_path) cache_type = "simple" else: nautilus_cache = NullCache() cache_type = "simple" app = Flask("Nautilus") if debug: app.logger.setLevel(logging.INFO) resolver = NautilusCTSResolver(resource=repositories) nautilus = FlaskNautilus( app=app, resolver=resolver #parser_cache=WerkzeugCacheWrapper(nautilus_cache), #logger=None ) nautilus.resolver.parse() if debug: app.run(debug=debug, port=port, host=host) else: app.debug = debug http_server = HTTPServer(WSGIContainer(app)) http_server.bind(port=port, address=host) http_server.start(0) IOLoop.current().start()
def __init__(self, api_uri: str, matchbox_path: str, ignition_dict: dict, extra_selector_dict=None): """ :param api_uri: http://1.1.1.1:5000 :param matchbox_path: /var/lib/matchbox :param ignition_dict: ignition.yaml """ self.api_uri = api_uri os.environ["API_URI"] = self.api_uri self.matchbox_path = matchbox_path self.ignition_dict = ignition_dict self._reporting_ignitions() self.extra_selector = extra_selector_dict if extra_selector_dict else {} # inMemory cache for http queries if EC.sync_cache_ttl > 0: self._cache_query = SimpleCache(default_timeout=EC.sync_cache_ttl) else: self._cache_query = NullCache()
def _set_cache(self): if self.app.config['TESTING']: self.cache = NullCache() else: import_me = self.app.config['CACHE_TYPE'] if '.' not in import_me: import_me = 'flaskext.cache.backends.' + \ import_me cache_obj = import_string(import_me) cache_args = self.app.config['CACHE_ARGS'][:] cache_options = dict(default_timeout= \ self.app.config['CACHE_DEFAULT_TIMEOUT']) if self.app.config['CACHE_OPTIONS']: cache_options.update(self.app.config['CACHE_OPTIONS']) self.cache = cache_obj(self.app, cache_args, cache_options) if not isinstance(self.cache, BaseCache): raise TypeError("Cache object must subclass " "werkzeug.contrib.cache.BaseCache")
def __init__(self, app, config_prefix='AUTHLIB', **kwargs): deprecate(DEPRECATE_MESSAGE, 0.7) self.config_prefix = config_prefix self.config = app.config cache_type = self._config('type') kwargs.update( dict(default_timeout=self._config('DEFAULT_TIMEOUT', 100))) if cache_type == 'null': self.cache = NullCache() elif cache_type == 'simple': kwargs.update(dict(threshold=self._config('threshold', 500))) self.cache = SimpleCache(**kwargs) elif cache_type == 'memcache': kwargs.update( dict( servers=self._config('MEMCACHED_SERVERS'), key_prefix=self._config('KEY_PREFIX', None), )) self.cache = MemcachedCache(**kwargs) elif cache_type == 'redis': kwargs.update( dict( host=self._config('REDIS_HOST', 'localhost'), port=self._config('REDIS_PORT', 6379), password=self._config('REDIS_PASSWORD', None), db=self._config('REDIS_DB', 0), key_prefix=self._config('KEY_PREFIX', None), )) self.cache = RedisCache(**kwargs) elif cache_type == 'filesystem': kwargs.update(dict(threshold=self._config('threshold', 500), )) self.cache = FileSystemCache(self._config('DIR'), **kwargs) else: raise RuntimeError('`%s` is not a valid cache type!' % cache_type) app.extensions[config_prefix.lower() + '_cache'] = self.cache
# -*- coding: utf-8 -*- """ Utilities for interacting with the filesystem. """ import os from functools import wraps from werkzeug.contrib.cache import SimpleCache, NullCache LOG = __import__('logging').getLogger() MINUTE = 60 CACHE = SimpleCache() if os.getenv('FLASK_TESTING'): CACHE = NullCache() def cached(timeout=0, ignore=None): """Caches Result of function call. The cache key is generated from the function name any arguments. Args: timeout (int): Time in seconds to store the response in cache ignore (list(int), optional): List of values that would not be cached. Returns: function: Wrapped function """
class BaseModel(object): """Base class for models""" _table = None _pk = None _cache = NullCache() _data = {} @classmethod def set_cache(cls, cache): """Set a class level cache instance""" if isinstance(cache, BaseCache): cls._cache = cache def clean_data(self, data): """Clean the given dict from non-savable data""" cache_key = 'DESCRIBE_%s' % self._table desc = self.__class__._cache.get(cache_key) if type(desc) is not dict: desc = DBHelper().describe_table(self._table) self.__class__._cache.set(cache_key, desc) clean_data = {} for key in data.keys(): if key == self._pk: continue elif key in desc: #TODO: type cast? clean_data[key] = data[key] return clean_data def get_table(self): """""" return self._table or self.__class__._table def __init__(self, data={}): """Initialize class instance""" self._data = data.copy() self._init() def _init(self): """Internal constuctor""" pass def id(self): """Return primary key value of this model instance""" return self.get_data(self.__class__._pk) def get_data(self, key=None): """Retrieve attribute values from this model""" if key is None: return self._data elif key in self._data: return self._data[key] else: return None def set_data(self, key, value=None): """Update attribute values of this model""" if type(key) is dict: self._data = key else: self._data[key] = value return self def has_data(self, key): """Check if this model has data for a given key""" return key in self._data def unset_data(self, key=None): """Remove attribute values from this model""" if type(key) is None: self._data = {} elif key in self._data: del self._data[key] return self def add_data(self, data): """Add attribute values to this model""" self._data.update(data) return self def __getattr__(self, name): """Magic proxy for 'get_data' and 'set_data'""" def getfnc(value=type(None)): if value is type(None): return self.get_data(name) else: return self.set_data(name, value) return getfnc def _db_select(self, key=None): """Return a DBSelect querying for this model""" if not key: key = self.__class__._pk where = '%s = ?' % (DBHelper.quote_identifier(key), ) return DBSelect(self.get_table()).where(where, self.get_data(key)).limit(1) def load(self, value, key=None): """Load data matching 'value' into this model""" if not key: key = self.__class__._pk self.set_data(key, value) data = self._db_select(key).query().fetchone() if type(data) is dict: self.add_data(data) else: self.unset_data(key) return self def save(self): """Store this models data""" data = self.clean_data(self._data) if self.id(): self._db_select().query_update(data) else: ids = DBHelper().insert(self.get_table(), data) self.set_data(self.__class__._pk, ids[0]) return self def delete(self): """Remove this model from the database""" if self.id(): self._db_select().query_delete() self.unset_data(self.__class__._pk) return self @classmethod def _install(cls): """Return a list of install routines""" return () @classmethod def install(cls): """Install model""" InstallHelper.install(cls._table, cls._install()) @classmethod def all(cls): """Get a model collection""" return BaseModelSet(cls)
def github(): """Provides the 'github' module with caching disabled""" original_cache = github_module.cache github_module.cache = NullCache() yield github_module github_module.cache = original_cache
from werkzeug.contrib.cache import MemcachedCache, NullCache import os if os.getenv("DEBUG") == 'true': cache = NullCache() else: cache = MemcachedCache()
def null(config, *args, **kwargs): return NullCache()
""" global _memoized def deletes(item): if item[0] == 'memoize' and item[1] in keys: cache.delete(item) return True return False _memoized[:] = [x for x in _memoized if not deletes(x)] #: the cache system factories. CACHE_SYSTEMS = { 'null': lambda: NullCache(), 'simple': lambda: SimpleCache(ctx.cfg['caching.timeout']), 'memcached': lambda: MemcachedCache([ x.strip() for x in ctx.cfg['caching.memcached_servers'].split(',') ], ctx.cfg['caching.timeout']), 'filesystem': lambda: FileSystemCache(join(ctx.cfg['caching.filesystem_cache_path']), threshold=500, default_timeout=ctx.cfg['caching.timeout']), 'database': lambda: DatabaseCache(ctx.cfg['caching.timeout']), 'gaememcached': lambda: GAEMemcachedCache(ctx.cfg['caching.timeout']) }
def __init__(self, conf): self.lock = threading.Lock() self.cache = Cache(NullCache())
def _configure_null_cache(self): from werkzeug.contrib.cache import NullCache self.cache = NullCache(default_timeout=self.cache_timeout) logger.warning("cache disable")
def cache(self): if self.config['DEBUG']: from werkzeug.contrib.cache import NullCache return NullCache() return SimpleCache()
if use_cache and response.status_code == 200: response.freeze() request.app.cache.set(key, response, timeout) response.make_conditional(request) return response oncall.__name__ = f.__name__ oncall.__module__ = f.__module__ oncall.__doc__ = f.__doc__ return oncall return decorator #: the cache system factories. systems = { 'null': lambda app: NullCache(), 'simple': lambda app: SimpleCache(app.cfg['cache_timeout']), 'memcached': lambda app: MemcachedCache( [x.strip() for x in app.cfg['memcached_servers']], app.cfg['cache_timeout']), 'filesystem': lambda app: FileSystemCache( os.path.join(app.instance_folder, app.cfg['filesystem_cache_path']), 500, app.cfg['cache_timeout']) }
db = SQLAlchemy(app) if app.config["CACHE_TYPE"].lower() == "memcached": from werkzeug.contrib.cache import MemcachedCache flask_cache = MemcachedCache([ '{0}:{1}'.format(app.config["MEMCACHED_HOST"], app.config["MEMCACHED_PORT"]) ], key_prefix=app.config["MEMCACHED_KEY_PREFIX"]) elif app.config["CACHE_TYPE"].lower() == "simple": from werkzeug.contrib.cache import SimpleCache flask_cache = SimpleCache() else: from werkzeug.contrib.cache import NullCache flask_cache = NullCache() if app.config["PROXY_SETUP"]: app.wsgi_app = ProxyFix(app.wsgi_app) if app.config["OPENID_ENABLED"]: from flask_openid import OpenID from openid_teams import teams oid = OpenID(app, safe_roots=[], extension_responses=[teams.TeamsResponse]) from login import login app.register_blueprint(login) from dumpdirs import dumpdirs app.register_blueprint(dumpdirs, url_prefix="/dumpdirs") from reports import reports app.register_blueprint(reports, url_prefix="/reports")
def _null(self, **kwargs): """Returns a :class:`NullCache` instance""" return NullCache()
# For test purposes from capitains_nautilus.flask_ext import WerkzeugCacheWrapper, FlaskNautilus from flask import Flask from werkzeug.contrib.cache import NullCache nautilus_cache = WerkzeugCacheWrapper(NullCache()) app = Flask("Nautilus") nautilus = FlaskNautilus(app=app, resources=["./tests/test_data/latinLit"], parser_cache=nautilus_cache) app.debug = True app.run("0.0.0.0", 5000)
def __init__(self, cache=None, prefix='mysession:'): if cache is None: cache = NullCache() self.cache = cache self.prefix = prefix
def null(app, config, args, kwargs): return NullCache()