def init_app(self, app): self.app = app config = app.config.copy() appenv = config.get('APPENV') (_k, _v) = ('key31415926', '3.1415926') if config['DEBUG']: _cache = NullCache() else: if appenv == 'bae': from bae_memcache.cache import BaeMemcache CACHE_USER = config.get('CACHE_USER') CACHE_PWD = config.get('CACHE_PWD') CACHE_ID = config.get('CACHE_ID') CACHE_ADDR = config.get('CACHE_ADDR') _cache = BaeMemcache(CACHE_ID, CACHE_ADDR, CACHE_USER, CACHE_PWD) try: _cache.set(_k, _v) except: _cache = NullCache() elif appenv == 'sae': import pylibmc as memcache _cache = memcache.Client() try: _cache.set(_k, _v) except: _cache = NullCache() elif appenv == 'production': _cache = SimpleCache() else: _cache = NullCache() app.extensions['cache'] = _cache
def apply_cache_strategy(self, **kwargs): # default_timeout: the default timeout seconds that is used if no timeout is default_timeout = kwargs.get('default_timeout') if not default_timeout: default_timeout = 0 self.cache_strategy = NullCache(default_timeout=default_timeout)
class BaseConfig: """Base class all configuration inherits from.""" DEBUG = True CACHE = NullCache() CACHE_TIMEOUT = 0 SECRET_KEY = None
def __init__(self, resource, name=None, logger=None, cache=None, dispatcher=None): """ Initiate the XMLResolver """ if dispatcher is None: inventory_collection = TextInventoryCollection(identifier="defaultTic") ti = TextInventory("default") ti.parent = inventory_collection ti.set_label("Default collection", "eng") self.dispatcher = CollectionDispatcher(inventory_collection) else: self.dispatcher = dispatcher self.__inventory__ = None self.__texts__ = [] self.name = name self.logger = logger if not logger: self.logger = logging.getLogger(name) if not name: self.name = "repository" if cache is None: cache = NullCache() self.__cache__ = cache self.__resources__ = resource self.inventory_cache_key = _cache_key("Nautilus", self.name, "Inventory", "Resources") self.texts_parsed_cache_key = _cache_key("Nautilus", self.name, "Inventory", "TextsParsed")
def __init__(self, api_uri: str, matchbox_path: str, ignition_dict: dict, extra_selector_dict=None): """ :param api_uri: http://1.1.1.1:5000 :param matchbox_path: /var/lib/matchbox :param ignition_dict: ignition.yaml """ self.api_uri = api_uri os.environ["API_URI"] = self.api_uri self.matchbox_path = matchbox_path self.ignition_dict = ignition_dict self._reporting_ignitions() self.extra_selector = extra_selector_dict if extra_selector_dict else {} # inMemory cache for http queries if EC.sync_cache_ttl > 0: self._cache_query = SimpleCache(default_timeout=EC.sync_cache_ttl) else: self._cache_query = NullCache()
def connect(self): try: conf = app.config.get('MEMCACHED') cache = MemcachedCache(**conf) except: cache = NullCache() return cache
def test_set_cache(self): """Test BaseModel.set_cache""" self.assertIsInstance(BaseModel._cache, FileSystemCache) old_cache = BaseModel._cache BaseModel.set_cache(NullCache()) self.assertIsInstance(BaseModel._cache, NullCache) SampleModel.set_cache(old_cache) self.assertIsInstance(BaseModel._cache, NullCache) self.assertIsInstance(SampleModel._cache, FileSystemCache)
def github(): """Provides the 'github' module with everything mocked""" original_cache = github_module.cache original_get_issues = github_module.get_issues github_module.cache = NullCache() github_module.get_issues = lambda self, *args, **kwargs: [] yield github_module github_module.cache = original_cache github_module.get_issues = original_get_issues
def _set_cache(self): if self.app.config['TESTING']: self.cache = NullCache() else: import_me = self.app.config['CACHE_TYPE'] if '.' not in import_me: import_me = 'flaskext.cache.backends.' + \ import_me cache_obj = import_string(import_me) cache_args = self.app.config['CACHE_ARGS'][:] cache_options = dict(default_timeout= \ self.app.config['CACHE_DEFAULT_TIMEOUT']) if self.app.config['CACHE_OPTIONS']: cache_options.update(self.app.config['CACHE_OPTIONS']) self.cache = cache_obj(self.app, cache_args, cache_options) if not isinstance(self.cache, BaseCache): raise TypeError("Cache object must subclass " "werkzeug.contrib.cache.BaseCache")
def decorated_function(*args, **kwargs): if cache_key is None: ck = 'view:%s?%s' % (request.path, request.query_string) else: ck = cache_key ek = '%s.expires' % ck response = None expires = None # pylibmc will throw an error when trying to communicate with memcached, not upon a bad connection try: cached = app.cache.get_many(ck, ek) if cached[0] is not None: response = cached[0] app.logger.debug( 'Cache hit for %s, returning cached content, expires=%d', ck, cached[1]) if cached[1] is not None and set_expires: expires = cached[1] else: response = f(*args, **kwargs) expires = int(time() + timeout) app.cache.set_many({ ck: response, ek: expires }, timeout=timeout) app.logger.debug( 'Cache miss for %s, refreshed content and saved in cache, expires=%d', ck, expires) if set_expires and expires is not None: response.headers['Expires'] = email.utils.formatdate( expires) except Exception as e: app.logger.error('Cache error, returning miss: %s', e) if response is None: response = f(*args, **kwargs) if (type(app.cache) is not NullCache): if (app.cache_retry < MAX_CACHE_RETRIES): app.cache_retry += 1 app.logger.error('Attempting to restore cache') _setup_cache(app) else: app.logger.error( 'Exhausted retry attempts. Converting cache to NullCache. Fix ASAP!' ) app.cache = NullCache() return response
def _setup_cache(app): """ If a test is being run or we don't want cache, NullCache will be initialized just as a dummy. If running locally without the 'DISABLE_CACHE' env variable and without a memcached instance running, MemcachedCache and it's underlying pylibmc will give no warning on connection, but will throw exceptions when trying to work with the cache. A few connection retires will be made in that scenario, and eventually the cache will be replaced with a NullCache. Binary communications must be used for SASL. """ # initialize the retry count if it's our first time here if not hasattr(app, 'cache_retry'): app.cache_retry = 0 # Setup cache if app.config['TESTING'] or os.environ.get('DISABLE_CACHE', None) is not None: app.cache = NullCache() app.logger.debug('Cache initialized as NullCache') else: MEMCACHED_SERVERS = os.environ.get('MEMCACHEDCLOUD_SERVERS', '127.0.0.1:11211') try: memcached_client = Client( servers=MEMCACHED_SERVERS.split(','), username=os.environ.get('MEMCACHEDCLOUD_USERNAME'), password=os.environ.get('MEMCACHEDCLOUD_PASSWORD'), binary=True) app.cache = MemcachedCache(memcached_client) app.logger.debug( 'Cache initialized as MemcachedCache with servers: %s', MEMCACHED_SERVERS) except Exception as e: # very unlikely to have an exception here. pylibmc mostly throws when trying to communicate, not connect app.logger.error('Error initializing MemcachedCache: %s', e) app.logger.error('Initializing cache as NullCache. Fix ASAP!') app.cache = NullCache()
def _commandline(repositories, port=8000, host="127.0.0.1", debug=False, cache=None, cache_path="./cache", redis=None): """ Run a CTS API from command line. .. warning:: This function should not be used in the production context :param repositories: :param port: :param ip: :param debug: :param cache: :param cache_path: :return: """ if cache == "redis": nautilus_cache = RedisCache(redis) cache_type = "redis" elif cache == "filesystem": nautilus_cache = FileSystemCache(cache_path) cache_type = "simple" else: nautilus_cache = NullCache() cache_type = "simple" app = Flask("Nautilus") if debug: app.logger.setLevel(logging.INFO) resolver = NautilusCTSResolver(resource=repositories) nautilus = FlaskNautilus( app=app, resolver=resolver #parser_cache=WerkzeugCacheWrapper(nautilus_cache), #logger=None ) nautilus.resolver.parse() if debug: app.run(debug=debug, port=port, host=host) else: app.debug = debug http_server = HTTPServer(WSGIContainer(app)) http_server.bind(port=port, address=host) http_server.start(0) IOLoop.current().start()
def _set_cache(self): if self.app.config["TESTING"]: self.cache = NullCache() else: import_me = self.app.config["CACHE_TYPE"] if "." not in import_me: import_me = "flaskext.cache.backends." + import_me cache_obj = import_string(import_me) cache_args = self.app.config["CACHE_ARGS"][:] cache_options = dict(default_timeout=self.app.config["CACHE_DEFAULT_TIMEOUT"]) if self.app.config["CACHE_OPTIONS"]: cache_options.update(self.app.config["CACHE_OPTIONS"]) self.cache = cache_obj(self.app, cache_args, cache_options) if not isinstance(self.cache, BaseCache): raise TypeError("Cache object must subclass " "werkzeug.contrib.cache.BaseCache")
def __init__(self, app, config_prefix='AUTHLIB', **kwargs): deprecate(DEPRECATE_MESSAGE, 0.7) self.config_prefix = config_prefix self.config = app.config cache_type = self._config('type') kwargs.update( dict(default_timeout=self._config('DEFAULT_TIMEOUT', 100))) if cache_type == 'null': self.cache = NullCache() elif cache_type == 'simple': kwargs.update(dict(threshold=self._config('threshold', 500))) self.cache = SimpleCache(**kwargs) elif cache_type == 'memcache': kwargs.update( dict( servers=self._config('MEMCACHED_SERVERS'), key_prefix=self._config('KEY_PREFIX', None), )) self.cache = MemcachedCache(**kwargs) elif cache_type == 'redis': kwargs.update( dict( host=self._config('REDIS_HOST', 'localhost'), port=self._config('REDIS_PORT', 6379), password=self._config('REDIS_PASSWORD', None), db=self._config('REDIS_DB', 0), key_prefix=self._config('KEY_PREFIX', None), )) self.cache = RedisCache(**kwargs) elif cache_type == 'filesystem': kwargs.update(dict(threshold=self._config('threshold', 500), )) self.cache = FileSystemCache(self._config('DIR'), **kwargs) else: raise RuntimeError('`%s` is not a valid cache type!' % cache_type) app.extensions[config_prefix.lower() + '_cache'] = self.cache
class Cache(object): """ This class is used to control the cache objects. If TESTING is True it will use NullCache. """ def __init__(self, app=None): self.cache = None if app is not None: self.init_app(app) else: self.app = None self._memoized = [] def init_app(self, app): "This is used to initialize cache with your app object" app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300) app.config.setdefault('CACHE_THRESHOLD', 500) app.config.setdefault('CACHE_KEY_PREFIX', None) app.config.setdefault('CACHE_MEMCACHED_SERVERS', None) app.config.setdefault('CACHE_DIR', None) app.config.setdefault('CACHE_OPTIONS', None) app.config.setdefault('CACHE_ARGS', []) app.config.setdefault('CACHE_TYPE', 'null') self.app = app self._set_cache() def _set_cache(self): if self.app.config['TESTING']: self.cache = NullCache() else: import_me = self.app.config['CACHE_TYPE'] if '.' not in import_me: import_me = 'flaskext.cache.backends.' + \ import_me cache_obj = import_string(import_me) cache_args = self.app.config['CACHE_ARGS'][:] cache_options = dict(default_timeout= \ self.app.config['CACHE_DEFAULT_TIMEOUT']) if self.app.config['CACHE_OPTIONS']: cache_options.update(self.app.config['CACHE_OPTIONS']) self.cache = cache_obj(self.app, cache_args, cache_options) if not isinstance(self.cache, BaseCache): raise TypeError("Cache object must subclass " "werkzeug.contrib.cache.BaseCache") def get(self, *args, **kwargs): "Proxy function for internal cache object." return self.cache.get(*args, **kwargs) def set(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.set(*args, **kwargs) def add(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.add(*args, **kwargs) def delete(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete(*args, **kwargs) def cached(self, timeout=None, key_prefix='view/%s', unless=None): """ Decorator. Use this to cache a function. By default the cache key is `view/request.path`. You are able to use this decorator with any function by changing the `key_prefix`. If the token `%s` is located within the `key_prefix` then it will replace that with `request.path` Example:: # An example view function @cache.cached(timeout=50) def big_foo(): return big_bar_calc() # An example misc function to cache. @cache.cached(key_prefix='MyCachedList') def get_list(): return [random.randrange(0, 1) for i in range(50000)] my_list = get_list() .. note:: You MUST have a request context to actually called any functions that are cached. .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. These attributes are readable/writable. **uncached** The original undecorated function **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. **make_cache_key** A function used in generating the cache_key used. :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param key_prefix: Default 'view/%(request.path)s'. Beginning key to . use for the cache key. .. versionadded:: 0.3.4 Can optionally be a callable which takes no arguments but returns a string that will be used as the cache_key. :param unless: Default None. Cache will *always* execute the caching facilities unless this callable is true. This will bypass the caching entirely. """ def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): #: Bypass the cache entirely. if callable(unless) and unless() is True: return f(*args, **kwargs) cache_key = decorated_function.make_cache_key(*args, **kwargs) rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=decorated_function.cache_timeout) return rv def make_cache_key(*args, **kwargs): if '%s' in key_prefix: cache_key = key_prefix % request.path elif callable(key_prefix): cache_key = key_prefix() else: cache_key = key_prefix cache_key = cache_key.encode('utf-8') return cache_key decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = make_cache_key return decorated_function return decorator def get_memoize_names(self): """ Returns all function names used for memoized functions. This *will* include multiple function names when the memoized function has been called with differing arguments. :return: set of function names """ return set([item[0] for item in self._memoized]) def get_memoize_keys(self): """ Returns all cache_keys used for memoized functions. :return: list generator of cache_keys """ return [item[1] for item in self._memoized] def memoize(self, timeout=None): """ Use this to cache the result of a function, taking its arguments into account in the cache key. Information on `Memoization <http://en.wikipedia.org/wiki/Memoization>`_. Example:: @cache.memoize(timeout=50) def big_foo(a, b): return a + b + random.randrange(0, 1000) .. code-block:: pycon >>> big_foo(5, 2) 753 >>> big_foo(5, 3) 234 >>> big_foo(5, 2) 753 .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. These attributes are readable/writable. **uncached** The original undecorated function **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. **make_cache_key** A function used in generating the cache_key used. :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. """ def memoize(f): @wraps(f) def decorated_function(*args, **kwargs): cache_key = decorated_function.make_cache_key(*args, **kwargs) rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=decorated_function.cache_timeout) self._memoized.append((f.__name__, cache_key)) return rv def make_cache_key(*args, **kwargs): cache_key = hashlib.md5() try: updated = "{0}{1}{2}".format(f.__name__, args, kwargs) except AttributeError: updated = "%s%s%s" % (f.__name__, args, kwargs) cache_key.update(updated) cache_key = cache_key.digest().encode('base64')[:22] return cache_key decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = make_cache_key return decorated_function return memoize def delete_memoized(self, fname, *args, **kwargs): """ Deletes the specified functions caches, based by given parameters. If parameters are given, only the functions that were memoized with them will be erased. Otherwise all the versions of the caches will be deleted. Example:: @cache.memoize(50) def random_func(): return random.randrange(1, 50) @cache.memoize() def param_func(a, b): return a+b+random.randrange(1, 50) .. code-block:: pycon >>> random_func() 43 >>> random_func() 43 >>> cache.delete_memoized('random_func') >>> random_func() 16 >>> param_func(1, 2) 32 >>> param_func(1, 2) 32 >>> param_func(2, 2) 47 >>> cache.delete_memoized('param_func', 1, 2) >>> param_func(1, 2) 13 >>> param_func(2, 2) 47 :param fname: Name of the memoized function. :param \*args: A list of positional parameters used with memoized function. :param \**kwargs: A dict of named parameters used with memoized function. """ def deletes(item): # If no parameters given, delete all memoized versions of the function if not args and not kwargs: if item[0] == fname: self.cache.delete(item[1]) return True return False # Construct the cache key as in memoized function cache_key = hashlib.md5() try: updated = "{0}{1}{2}".format(fname, args, kwargs) except AttributeError: updated = "%s%s%s" % (fname, args, kwargs) cache_key.update(updated) cache_key = cache_key.digest().encode('base64')[:22] if item[1] == cache_key: self.cache.delete(item[1]) return True return False self._memoized[:] = [x for x in self._memoized if not deletes(x)]
class Cache(object): """ This class is used to control the cache objects. If TESTING is True it will use NullCache. """ def __init__(self, app=None, with_jinja2_ext=True): self.with_jinja2_ext = with_jinja2_ext self.cache = None if app is not None: self.init_app(app) else: self.app = None self._memoized = [] def init_app(self, app): "This is used to initialize cache with your app object" app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300) app.config.setdefault('CACHE_THRESHOLD', 500) app.config.setdefault('CACHE_KEY_PREFIX', None) app.config.setdefault('CACHE_MEMCACHED_SERVERS', None) app.config.setdefault('CACHE_DIR', None) app.config.setdefault('CACHE_OPTIONS', None) app.config.setdefault('CACHE_ARGS', []) app.config.setdefault('CACHE_TYPE', 'null') if self.with_jinja2_ext: setattr(app.jinja_env, JINJA_CACHE_ATTR_NAME, self) app.jinja_env.add_extension(CacheExtension) self.app = app self._set_cache() def _set_cache(self): if self.app.config['TESTING']: self.cache = NullCache() else: import_me = self.app.config['CACHE_TYPE'] if '.' not in import_me: import_me = 'flaskext.cache.backends.' + \ import_me cache_obj = import_string(import_me) cache_args = self.app.config['CACHE_ARGS'][:] cache_options = dict(default_timeout= \ self.app.config['CACHE_DEFAULT_TIMEOUT']) if self.app.config['CACHE_OPTIONS']: cache_options.update(self.app.config['CACHE_OPTIONS']) self.cache = cache_obj(self.app, cache_args, cache_options) if not isinstance(self.cache, BaseCache): raise TypeError("Cache object must subclass " "werkzeug.contrib.cache.BaseCache") def get(self, *args, **kwargs): "Proxy function for internal cache object." return self.cache.get(*args, **kwargs) def set(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.set(*args, **kwargs) def add(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.add(*args, **kwargs) def delete(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete(*args, **kwargs) def delete_many(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete_many(*args, **kwargs) def cached(self, timeout=None, key_prefix='view/%s', unless=None): """ Decorator. Use this to cache a function. By default the cache key is `view/request.path`. You are able to use this decorator with any function by changing the `key_prefix`. If the token `%s` is located within the `key_prefix` then it will replace that with `request.path` Example:: # An example view function @cache.cached(timeout=50) def big_foo(): return big_bar_calc() # An example misc function to cache. @cache.cached(key_prefix='MyCachedList') def get_list(): return [random.randrange(0, 1) for i in range(50000)] my_list = get_list() .. note:: You MUST have a request context to actually called any functions that are cached. .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. These attributes are readable/writable. **uncached** The original undecorated function **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. **make_cache_key** A function used in generating the cache_key used. :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param key_prefix: Default 'view/%(request.path)s'. Beginning key to . use for the cache key. .. versionadded:: 0.3.4 Can optionally be a callable which takes no arguments but returns a string that will be used as the cache_key. :param unless: Default None. Cache will *always* execute the caching facilities unless this callable is true. This will bypass the caching entirely. """ def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): #: Bypass the cache entirely. if callable(unless) and unless() is True: return f(*args, **kwargs) cache_key = decorated_function.make_cache_key(*args, **kwargs) rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=decorated_function.cache_timeout) return rv def make_cache_key(*args, **kwargs): if '%s' in key_prefix: cache_key = key_prefix % request.path elif callable(key_prefix): cache_key = key_prefix() else: cache_key = key_prefix cache_key = cache_key.encode('utf-8') return cache_key decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = make_cache_key return decorated_function return decorator def _memvname(self, funcname): return funcname + '_memver' def memoize_make_version_hash(self): return uuid.uuid4().bytes.encode('base64')[:6] def memoize_make_cache_key(self, fname, make_name=None): """ Function used to create the cache_key for memoized functions. """ def make_cache_key(f, *args, **kwargs): version_key = self._memvname(fname) version_data = self.cache.get(version_key) if version_data is None: version_data = self.memoize_make_version_hash() self.cache.set(version_key, version_data) cache_key = hashlib.md5() #: this should have to be after version_data, so that it #: does not break the delete_memoized functionality. if callable(make_name): altfname = make_name(fname) else: altfname = fname if callable(f): args, kwargs = self.memoize_kwargs_to_args(f, *args, **kwargs) try: updated = "{0}{1}{2}".format(altfname, args, kwargs) except AttributeError: updated = "%s%s%s" % (altfname, args, kwargs) cache_key.update(updated) cache_key = cache_key.digest().encode('base64')[:16] cache_key += version_data return cache_key return make_cache_key def memoize_kwargs_to_args(self, f, *args, **kwargs): #: Inspect the arguments to the function #: This allows the memoization to be the same #: whether the function was called with #: 1, b=2 is equivilant to a=1, b=2, etc. new_args = [] arg_num = 0 m_args = inspect.getargspec(f)[0] for i in range(len(m_args)): if m_args[i] in kwargs: new_args.append(kwargs[m_args[i]]) elif arg_num < len(args): new_args.append(args[arg_num]) arg_num += 1 return tuple(new_args), {} def memoize(self, timeout=None, make_name=None, unless=None): """ Use this to cache the result of a function, taking its arguments into account in the cache key. Information on `Memoization <http://en.wikipedia.org/wiki/Memoization>`_. Example:: @cache.memoize(timeout=50) def big_foo(a, b): return a + b + random.randrange(0, 1000) .. code-block:: pycon >>> big_foo(5, 2) 753 >>> big_foo(5, 3) 234 >>> big_foo(5, 2) 753 .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. **uncached** The original undecorated function. readable only **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. readable and writable **make_cache_key** A function used in generating the cache_key used. readable and writable :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param make_name: Default None. If set this is a function that accepts a single argument, the function name, and returns a new string to be used as the function name. If not set then the function name is used. :param unless: Default None. Cache will *always* execute the caching facilities unelss this callable is true. This will bypass the caching entirely. .. versionadded:: 0.5 params ``make_name``, ``unless`` """ def memoize(f): @wraps(f) def decorated_function(*args, **kwargs): #: bypass cache if callable(unless) and unless() is True: return f(*args, **kwargs) cache_key = decorated_function.make_cache_key( f, *args, **kwargs) rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=decorated_function.cache_timeout) return rv decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = self.memoize_make_cache_key( f.__name__, make_name) return decorated_function return memoize def delete_memoized(self, fname, *args, **kwargs): """ Deletes the specified functions caches, based by given parameters. If parameters are given, only the functions that were memoized with them will be erased. Otherwise all the versions of the caches will be deleted. Example:: @cache.memoize(50) def random_func(): return random.randrange(1, 50) @cache.memoize() def param_func(a, b): return a+b+random.randrange(1, 50) .. code-block:: pycon >>> random_func() 43 >>> random_func() 43 >>> cache.delete_memoized('random_func') >>> random_func() 16 >>> param_func(1, 2) 32 >>> param_func(1, 2) 32 >>> param_func(2, 2) 47 >>> cache.delete_memoized('param_func', 1, 2) >>> param_func(1, 2) 13 >>> param_func(2, 2) 47 :param fname: Name of the memoized function, or a reference to the function. :param \*args: A list of positional parameters used with memoized function. :param \**kwargs: A dict of named parameters used with memoized function. .. note:: Flask-Cache uses inspect to order kwargs into positional args when the function is memoized. If you pass a function reference into ``fname`` instead of the function name, Flask-Cache will be able to place the args/kwargs in the proper order, and delete the positional cache. However, if ``delete_memozied`` is just called with the name of the function, be sure to pass in potential arguments in the same order as defined in your function as args only, otherwise Flask-Cache will not be able to compute the same cache key. .. note:: Flask-Cache maintains an internal random version hash for the function. Using delete_memoized will only swap out the version hash, causing the memoize function to recompute results and put them into another key. This leaves any computed caches for this memoized function within the caching backend. It is recommended to use a very high timeout with memoize if using this function, so that when the version has is swapped, the old cached results would eventually be reclaimed by the caching backend. """ if callable(fname): assert hasattr(fname, 'uncached') f = fname.uncached _fname = f.__name__ else: f = None _fname = fname if not args and not kwargs: version_key = self._memvname(fname) version_data = self.memoize_make_version_hash() self.cache.set(version_key, version_data) else: cache_key = self.memoize_make_cache_key(_fname)(f, *args, **kwargs) self.cache.delete(cache_key)
# -*- coding: utf-8 -*- """ Utilities for interacting with the filesystem. """ import os from functools import wraps from werkzeug.contrib.cache import SimpleCache, NullCache LOG = __import__('logging').getLogger() MINUTE = 60 CACHE = SimpleCache() if os.getenv('FLASK_TESTING'): CACHE = NullCache() def cached(timeout=0, ignore=None): """Caches Result of function call. The cache key is generated from the function name any arguments. Args: timeout (int): Time in seconds to store the response in cache ignore (list(int), optional): List of values that would not be cached. Returns: function: Wrapped function """
class BaseModel(object): """Base class for models""" _table = None _pk = None _cache = NullCache() _data = {} @classmethod def set_cache(cls, cache): """Set a class level cache instance""" if isinstance(cache, BaseCache): cls._cache = cache def clean_data(self, data): """Clean the given dict from non-savable data""" cache_key = 'DESCRIBE_%s' % self._table desc = self.__class__._cache.get(cache_key) if type(desc) is not dict: desc = DBHelper().describe_table(self._table) self.__class__._cache.set(cache_key, desc) clean_data = {} for key in data.keys(): if key == self._pk: continue elif key in desc: #TODO: type cast? clean_data[key] = data[key] return clean_data def get_table(self): """""" return self._table or self.__class__._table def __init__(self, data={}): """Initialize class instance""" self._data = data.copy() self._init() def _init(self): """Internal constuctor""" pass def id(self): """Return primary key value of this model instance""" return self.get_data(self.__class__._pk) def get_data(self, key=None): """Retrieve attribute values from this model""" if key is None: return self._data elif key in self._data: return self._data[key] else: return None def set_data(self, key, value=None): """Update attribute values of this model""" if type(key) is dict: self._data = key else: self._data[key] = value return self def has_data(self, key): """Check if this model has data for a given key""" return key in self._data def unset_data(self, key=None): """Remove attribute values from this model""" if type(key) is None: self._data = {} elif key in self._data: del self._data[key] return self def add_data(self, data): """Add attribute values to this model""" self._data.update(data) return self def __getattr__(self, name): """Magic proxy for 'get_data' and 'set_data'""" def getfnc(value=type(None)): if value is type(None): return self.get_data(name) else: return self.set_data(name, value) return getfnc def _db_select(self, key=None): """Return a DBSelect querying for this model""" if not key: key = self.__class__._pk where = '%s = ?' % (DBHelper.quote_identifier(key), ) return DBSelect(self.get_table()).where(where, self.get_data(key)).limit(1) def load(self, value, key=None): """Load data matching 'value' into this model""" if not key: key = self.__class__._pk self.set_data(key, value) data = self._db_select(key).query().fetchone() if type(data) is dict: self.add_data(data) else: self.unset_data(key) return self def save(self): """Store this models data""" data = self.clean_data(self._data) if self.id(): self._db_select().query_update(data) else: ids = DBHelper().insert(self.get_table(), data) self.set_data(self.__class__._pk, ids[0]) return self def delete(self): """Remove this model from the database""" if self.id(): self._db_select().query_delete() self.unset_data(self.__class__._pk) return self @classmethod def _install(cls): """Return a list of install routines""" return () @classmethod def install(cls): """Install model""" InstallHelper.install(cls._table, cls._install()) @classmethod def all(cls): """Get a model collection""" return BaseModelSet(cls)
""" global _memoized def deletes(item): if item[0] == 'memoize' and item[1] in keys: cache.delete(item) return True return False _memoized[:] = [x for x in _memoized if not deletes(x)] #: the cache system factories. CACHE_SYSTEMS = { 'null': lambda: NullCache(), 'simple': lambda: SimpleCache(ctx.cfg['caching.timeout']), 'memcached': lambda: MemcachedCache([ x.strip() for x in ctx.cfg['caching.memcached_servers'].split(',') ], ctx.cfg['caching.timeout']), 'filesystem': lambda: FileSystemCache(join(ctx.cfg['caching.filesystem_cache_path']), threshold=500, default_timeout=ctx.cfg['caching.timeout']), 'database': lambda: DatabaseCache(ctx.cfg['caching.timeout']), 'gaememcached': lambda: GAEMemcachedCache(ctx.cfg['caching.timeout']) }
def github(): """Provides the 'github' module with caching disabled""" original_cache = github_module.cache github_module.cache = NullCache() yield github_module github_module.cache = original_cache
class Cache(object): """ This class is used to control the cache objects. If TESTING is True it will use NullCache. """ def __init__(self, app=None, with_jinja2_ext=True): self.with_jinja2_ext = with_jinja2_ext self.cache = None if app is not None: self.init_app(app) else: self.app = None self._memoized = [] def init_app(self, app): "This is used to initialize cache with your app object" app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300) app.config.setdefault('CACHE_THRESHOLD', 500) app.config.setdefault('CACHE_KEY_PREFIX', None) app.config.setdefault('CACHE_MEMCACHED_SERVERS', None) app.config.setdefault('CACHE_DIR', None) app.config.setdefault('CACHE_OPTIONS', None) app.config.setdefault('CACHE_ARGS', []) app.config.setdefault('CACHE_TYPE', 'null') if self.with_jinja2_ext: setattr(app.jinja_env, JINJA_CACHE_ATTR_NAME, self) app.jinja_env.add_extension(CacheExtension) self.app = app self._set_cache() def _set_cache(self): if self.app.config['TESTING']: self.cache = NullCache() else: import_me = self.app.config['CACHE_TYPE'] if '.' not in import_me: import_me = 'flaskext.cache.backends.' + \ import_me cache_obj = import_string(import_me) cache_args = self.app.config['CACHE_ARGS'][:] cache_options = dict(default_timeout= \ self.app.config['CACHE_DEFAULT_TIMEOUT']) if self.app.config['CACHE_OPTIONS']: cache_options.update(self.app.config['CACHE_OPTIONS']) self.cache = cache_obj(self.app, cache_args, cache_options) if not isinstance(self.cache, BaseCache): raise TypeError("Cache object must subclass " "werkzeug.contrib.cache.BaseCache") def get(self, *args, **kwargs): "Proxy function for internal cache object." return self.cache.get(*args, **kwargs) def set(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.set(*args, **kwargs) def add(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.add(*args, **kwargs) def delete(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete(*args, **kwargs) def delete_many(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete_many(*args, **kwargs) def cached(self, timeout=None, key_prefix='view/%s', unless=None): """ Decorator. Use this to cache a function. By default the cache key is `view/request.path`. You are able to use this decorator with any function by changing the `key_prefix`. If the token `%s` is located within the `key_prefix` then it will replace that with `request.path` Example:: # An example view function @cache.cached(timeout=50) def big_foo(): return big_bar_calc() # An example misc function to cache. @cache.cached(key_prefix='MyCachedList') def get_list(): return [random.randrange(0, 1) for i in range(50000)] my_list = get_list() .. note:: You MUST have a request context to actually called any functions that are cached. .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. These attributes are readable/writable. **uncached** The original undecorated function **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. **make_cache_key** A function used in generating the cache_key used. :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param key_prefix: Default 'view/%(request.path)s'. Beginning key to . use for the cache key. .. versionadded:: 0.3.4 Can optionally be a callable which takes no arguments but returns a string that will be used as the cache_key. :param unless: Default None. Cache will *always* execute the caching facilities unless this callable is true. This will bypass the caching entirely. """ def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): #: Bypass the cache entirely. if callable(unless) and unless() is True: return f(*args, **kwargs) cache_key = decorated_function.make_cache_key(*args, **kwargs) rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=decorated_function.cache_timeout) return rv def make_cache_key(*args, **kwargs): if callable(key_prefix): cache_key = key_prefix() elif '%s' in key_prefix: cache_key = key_prefix % request.path else: cache_key = key_prefix cache_key = cache_key.encode('utf-8') return cache_key decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = make_cache_key return decorated_function return decorator def _memvname(self, funcname): return funcname + '_memver' def memoize_make_version_hash(self): return uuid.uuid4().bytes.encode('base64')[:6] def memoize_make_cache_key(self, fname, make_name=None): """ Function used to create the cache_key for memoized functions. """ def make_cache_key(f, *args, **kwargs): version_key = self._memvname(fname) version_data = self.cache.get(version_key) if version_data is None: version_data = self.memoize_make_version_hash() self.cache.set(version_key, version_data) cache_key = hashlib.md5() #: this should have to be after version_data, so that it #: does not break the delete_memoized functionality. if callable(make_name): altfname = make_name(fname) else: altfname = fname if callable(f): args, kwargs = self.memoize_kwargs_to_args(f, *args, **kwargs) try: updated = "{0}{1}{2}".format(altfname, args, kwargs) except AttributeError: updated = "%s%s%s" % (altfname, args, kwargs) cache_key.update(updated) cache_key = cache_key.digest().encode('base64')[:16] cache_key += version_data return cache_key return make_cache_key def memoize_kwargs_to_args(self, f, *args, **kwargs): #: Inspect the arguments to the function #: This allows the memoization to be the same #: whether the function was called with #: 1, b=2 is equivilant to a=1, b=2, etc. new_args = [] arg_num = 0 m_args = inspect.getargspec(f)[0] for i in range(len(m_args)): if m_args[i] in kwargs: new_args.append(kwargs[m_args[i]]) elif arg_num < len(args): new_args.append(args[arg_num]) arg_num += 1 return tuple(new_args), {} def memoize(self, timeout=None, make_name=None, unless=None): """ Use this to cache the result of a function, taking its arguments into account in the cache key. Information on `Memoization <http://en.wikipedia.org/wiki/Memoization>`_. Example:: @cache.memoize(timeout=50) def big_foo(a, b): return a + b + random.randrange(0, 1000) .. code-block:: pycon >>> big_foo(5, 2) 753 >>> big_foo(5, 3) 234 >>> big_foo(5, 2) 753 .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. **uncached** The original undecorated function. readable only **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. readable and writable **make_cache_key** A function used in generating the cache_key used. readable and writable :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param make_name: Default None. If set this is a function that accepts a single argument, the function name, and returns a new string to be used as the function name. If not set then the function name is used. :param unless: Default None. Cache will *always* execute the caching facilities unelss this callable is true. This will bypass the caching entirely. .. versionadded:: 0.5 params ``make_name``, ``unless`` """ def memoize(f): @wraps(f) def decorated_function(*args, **kwargs): #: bypass cache if callable(unless) and unless() is True: return f(*args, **kwargs) cache_key = decorated_function.make_cache_key(f, *args, **kwargs) rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=decorated_function.cache_timeout) return rv decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = self.memoize_make_cache_key(f.__name__, make_name) return decorated_function return memoize def delete_memoized(self, fname, *args, **kwargs): """ Deletes the specified functions caches, based by given parameters. If parameters are given, only the functions that were memoized with them will be erased. Otherwise all the versions of the caches will be deleted. Example:: @cache.memoize(50) def random_func(): return random.randrange(1, 50) @cache.memoize() def param_func(a, b): return a+b+random.randrange(1, 50) .. code-block:: pycon >>> random_func() 43 >>> random_func() 43 >>> cache.delete_memoized('random_func') >>> random_func() 16 >>> param_func(1, 2) 32 >>> param_func(1, 2) 32 >>> param_func(2, 2) 47 >>> cache.delete_memoized('param_func', 1, 2) >>> param_func(1, 2) 13 >>> param_func(2, 2) 47 :param fname: Name of the memoized function, or a reference to the function. :param \*args: A list of positional parameters used with memoized function. :param \**kwargs: A dict of named parameters used with memoized function. .. note:: Flask-Cache uses inspect to order kwargs into positional args when the function is memoized. If you pass a function reference into ``fname`` instead of the function name, Flask-Cache will be able to place the args/kwargs in the proper order, and delete the positional cache. However, if ``delete_memozied`` is just called with the name of the function, be sure to pass in potential arguments in the same order as defined in your function as args only, otherwise Flask-Cache will not be able to compute the same cache key. .. note:: Flask-Cache maintains an internal random version hash for the function. Using delete_memoized will only swap out the version hash, causing the memoize function to recompute results and put them into another key. This leaves any computed caches for this memoized function within the caching backend. It is recommended to use a very high timeout with memoize if using this function, so that when the version has is swapped, the old cached results would eventually be reclaimed by the caching backend. """ if callable(fname): assert hasattr(fname, 'uncached') f = fname.uncached _fname = f.__name__ else: f = None _fname = fname if not args and not kwargs: version_key = self._memvname(fname) version_data = self.memoize_make_version_hash() self.cache.set(version_key, version_data) else: cache_key = self.memoize_make_cache_key(_fname)(f, *args, **kwargs) self.cache.delete(cache_key)
from werkzeug.contrib.cache import MemcachedCache, NullCache import os if os.getenv("DEBUG") == 'true': cache = NullCache() else: cache = MemcachedCache()
def null(config, *args, **kwargs): return NullCache()
def __init__(self, config): self._config = config self.cache = NullCache() self.statsd = NullStats()
class Cache(object): """ This class is used to control the cache objects. If TESTING is True it will use NullCache. """ def __init__(self, app=None): self.cache = None if app is not None: self.init_app(app) else: self.app = None self._memoized = [] def init_app(self, app): "This is used to initialize cache with your app object" app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300) app.config.setdefault('CACHE_THRESHOLD', 500) app.config.setdefault('CACHE_KEY_PREFIX', None) app.config.setdefault('CACHE_MEMCACHED_SERVERS', None) app.config.setdefault('CACHE_DIR', None) app.config.setdefault('CACHE_OPTIONS', None) app.config.setdefault('CACHE_ARGS', []) app.config.setdefault('CACHE_TYPE', 'null') self.app = app self._set_cache() def _set_cache(self): if self.app.config['TESTING']: self.cache = NullCache() else: import_me = self.app.config['CACHE_TYPE'] if '.' not in import_me: import_me = 'flaskext.cache.backends.' + \ import_me cache_obj = import_string(import_me) cache_args = self.app.config['CACHE_ARGS'][:] cache_options = dict(default_timeout= \ self.app.config['CACHE_DEFAULT_TIMEOUT']) if self.app.config['CACHE_OPTIONS']: cache_options.update(self.app.config['CACHE_OPTIONS']) self.cache = cache_obj(self.app, cache_args, cache_options) if not isinstance(self.cache, BaseCache): raise TypeError("Cache object must subclass " "werkzeug.contrib.cache.BaseCache") def get(self, *args, **kwargs): "Proxy function for internal cache object." return self.cache.get(*args, **kwargs) def set(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.set(*args, **kwargs) def add(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.add(*args, **kwargs) def delete(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete(*args, **kwargs) def cached(self, timeout=None, key_prefix='view/%s', unless=None): """ Decorator. Use this to cache a function. By default the cache key is `view/request.path`. You are able to use this decorator with any function by changing the `key_prefix`. If the token `%s` is located within the `key_prefix` then it will replace that with `request.path` Example:: # An example view function @cache.cached(timeout=50) def big_foo(): return big_bar_calc() # An example misc function to cache. @cache.cached(key_prefix='MyCachedList') def get_list(): return [random.randrange(0, 1) for i in range(50000)] .. code-block:: pycon >>> my_list = get_list() .. note:: You MUST have a request context to actually called any functions that are cached. :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param key_prefix: Default 'view/%(request.path)s'. Beginning key to . use for the cache key. :param unless: Default None. Cache will *always* execute the caching facilities unless this callable is true. This will bypass the caching entirely. """ def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): #: Bypass the cache entirely. if callable(unless) and unless() is True: return f(*args, **kwargs) if '%s' in key_prefix: cache_key = key_prefix % request.path else: cache_key = key_prefix cache_key = cache_key.encode('utf-8') rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=timeout) return rv return decorated_function return decorator def get_memoize_names(self): """ Returns all function names used for memoized functions. This *will* include multiple function names when the memoized function has been called with differing arguments. :return: set of function names """ return set([item[0] for item in self._memoized]) def get_memoize_keys(self): """ Returns all cache_keys used for memoized functions. :return: list generator of cache_keys """ return [item[1] for item in self._memoized] def memoize(self, timeout=None): """ Use this to cache the result of a function, taking its arguments into account in the cache key. Information on `Memoization <http://en.wikipedia.org/wiki/Memoization>`_. Example:: @cache.memoize(timeout=50) def big_foo(a, b): return a + b + random.randrange(0, 1000) .. code-block:: pycon >>> big_foo(5, 2) 753 >>> big_foo(5, 3) 234 >>> big_foo(5, 2) 753 :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. """ def memoize(f): @wraps(f) def decorated_function(*args, **kwargs): cache_key = hashlib.md5() try: updated = "{0}{1}{2}".format(f.__name__, args, kwargs) except AttributeError: updated = "%s%s%s" % (f.__name__, args, kwargs) cache_key.update(updated) cache_key = cache_key.digest().encode('base64')[:22] rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=timeout) self._memoized.append((f.__name__, cache_key)) return rv return decorated_function return memoize def delete_memoized(self, fname, *args, **kwargs): """ Deletes the specified functions caches, based by given parameters. If parameters are given, only the functions that were memoized with them will be erased. Otherwise all the versions of the caches will be deleted. Example:: @cache.memoize(50) def random_func(): return random.randrange(1, 50) @cache.memoize() def param_func(a, b): return a+b+random.randrange(1, 50) .. code-block:: pycon >>> random_func() 43 >>> random_func() 43 >>> cache.delete_memoized('random_func') >>> random_func() 16 >>> param_func(1, 2) 32 >>> param_func(1, 2) 32 >>> param_func(2, 2) 47 >>> cache.delete_memoized('param_func', 1, 2) >>> param_func(1, 2) 13 >>> param_func(2, 2) 47 :param fname: Name of the memoized function. :param \*args: A list of positional parameters used with memoized function. :param \**kwargs: A dict of named parameters used with memoized function. """ def deletes(item): # If no parameters given, delete all memoized versions of the function if not args and not kwargs: if item[0] == fname: self.cache.delete(item[1]) return True return False # Construct the cache key as in memoized function cache_key = hashlib.md5() try: updated = "{0}{1}{2}".format(fname, args, kwargs) except AttributeError: updated = "%s%s%s" % (fname, args, kwargs) cache_key.update(updated) cache_key = cache_key.digest().encode('base64')[:22] if item[1] == cache_key: self.cache.delete(item[1]) return True return False self._memoized[:] = [x for x in self._memoized if not deletes(x)]
class ConfigSyncSchedules(object): __name__ = "ConfigSyncSchedules" sub_ips = EC.sub_ips range_nb_ips = EC.range_nb_ips skip_ips = EC.skip_ips def __init__(self, api_uri: str, matchbox_path: str, ignition_dict: dict, extra_selector_dict=None): """ :param api_uri: http://1.1.1.1:5000 :param matchbox_path: /var/lib/matchbox :param ignition_dict: ignition.yaml """ self.api_uri = api_uri os.environ["API_URI"] = self.api_uri self.matchbox_path = matchbox_path self.ignition_dict = ignition_dict self._reporting_ignitions() self.extra_selector = extra_selector_dict if extra_selector_dict else {} # inMemory cache for http queries if EC.sync_cache_ttl > 0: self._cache_query = SimpleCache(default_timeout=EC.sync_cache_ttl) else: self._cache_query = NullCache() def _reporting_ignitions(self): for k, v in self.ignition_dict.items(): f = "%s/ignition/%s.yaml" % (self.matchbox_path, v) if os.path.isfile(f) is False: logger.error("%s:%s -> %s is not here" % (k, v, f)) raise IOError(f) with open(f, 'rb') as ignition_file: blob = ignition_file.read() data = {v: blob.decode()} url = "%s/ignition/version/%s" % (self.api_uri, v) try: req = requests.post(url, data=json.dumps(data)) req.close() response = json.loads(req.content.decode()) logger.info("%s:%s -> %s is here content reported: %s" % (k, v, f, response)) except requests.exceptions.ConnectionError as e: logger.error("%s:%s -> %s is here content NOT reported: %s" % (k, v, f, e)) @staticmethod def get_dns_attr(fqdn: str): """ TODO: Use LLDP to avoid vendor specific usage :param fqdn: e.g: r13-srv3.dc-1.foo.bar.cr :return: """ d = { "shortname": "", "dc": "", "domain": "", "rack": "", "pos": "", } s = fqdn.split(".") d["shortname"] = s[0] try: d["dc"] = s[1] except IndexError: logger.error("IndexError %s[1] after split(.)" % fqdn) return d d["domain"] = ".".join(s[1:]) try: rack, pos = s[0].split("-") d["rack"] = re.sub("[^0-9]+", "", rack) d["pos"] = re.sub("[^0-9]+", "", pos) except ValueError: logger.error("error during the split rack/pos %s" % s[0]) return d @staticmethod def _cni_ipam(host_cidrv4: str, host_gateway: str): """ see: https://github.com/containernetworking/cni/blob/master/SPEC.md#ip-allocation see: https://github.com/containernetworking/plugins/tree/master/plugins/ipam/host-local With the class variables provide a way to generate a static host-local ipam :param host_cidrv4: an host IP with its CIDR prefixlen, eg: '10.0.0.42/8' :param host_gateway: an host IP for the gateway, eg: '10.0.0.1' :return: dict """ interface = IPv4Interface(host_cidrv4) subnet = interface.network try: assert 0 <= ConfigSyncSchedules.sub_ips <= 256 assert (lambda x: x & (x - 1) == 0)(ConfigSyncSchedules.sub_ips) except AssertionError: raise ValueError( 'sub_ips must be a power of two, in [0, 256] interval') if ConfigSyncSchedules.sub_ips > 0: ip_last_decimal_field = int(str(interface.ip).split('.')[-1]) interface = IPv4Interface(interface.network.network_address + ip_last_decimal_field * ConfigSyncSchedules.sub_ips) range_start = interface.ip + ConfigSyncSchedules.skip_ips range_end = range_start + ConfigSyncSchedules.range_nb_ips ipam = { "type": "host-local", "subnet": "%s" % (str(subnet)), "rangeStart": str(range_start), "rangeEnd": str(range_end), "gateway": host_gateway, "routes": [ { "dst": "%s/32" % EC.perennial_local_host_ip, "gw": str(IPv4Interface(host_cidrv4).ip) }, { "dst": "0.0.0.0/0" }, ], "dataDir": "/var/lib/cni/networks" } return ipam @staticmethod def get_extra_selectors(extra_selectors: dict): """ Extra selectors are passed to Matchbox :param extra_selectors: dict :return: """ if extra_selectors: if type(extra_selectors) is dict: logger.debug("extra selectors: %s" % extra_selectors) return extra_selectors logger.error("invalid extra selectors: %s" % extra_selectors) raise TypeError("%s %s is not type dict" % (extra_selectors, type(extra_selectors))) logger.debug("no extra selectors") return {} @property def etcd_member_ip_list(self): return self._query_ip_list(schedulerv2.ScheduleRoles.etcd_member) @property def kubernetes_control_plane_ip_list(self): return self._query_ip_list( schedulerv2.ScheduleRoles.kubernetes_control_plane) @property def kubernetes_nodes_ip_list(self): return self._query_ip_list(schedulerv2.ScheduleRoles.kubernetes_node) @staticmethod def order_http_uri(ips: list, ec_value: int, secure=False): ips.sort() e = [ "http{}://%s:%d".format("s" if secure else "") % (k, ec_value) for k in ips ] return e @staticmethod def order_etcd_named(ips: list, ec_value: int, secure=False): ips.sort() e = [ "%s=http{}://%s:%d".format("s" if secure else "") % (k, k, ec_value) for k in ips ] return ",".join(e) @property def kubernetes_etcd_initial_cluster(self): return self.order_etcd_named(self.etcd_member_ip_list, EC.kubernetes_etcd_peer_port, secure=True) @property def vault_etcd_initial_cluster(self): return self.order_etcd_named(self.etcd_member_ip_list, EC.vault_etcd_peer_port, secure=True) @property def fleet_etcd_initial_cluster(self): return self.order_etcd_named(self.etcd_member_ip_list, EC.fleet_etcd_peer_port, secure=True) @property def kubernetes_etcd_member_client_uri_list(self): return self.order_http_uri(self.etcd_member_ip_list, EC.kubernetes_etcd_client_port, secure=True) @property def vault_etcd_member_client_uri_list(self): return self.order_http_uri(self.etcd_member_ip_list, EC.vault_etcd_client_port, secure=True) @property def fleet_etcd_member_client_uri_list(self): return self.order_http_uri(self.etcd_member_ip_list, EC.fleet_etcd_client_port, secure=True) @property def kubernetes_etcd_member_peer_uri_list(self): return self.order_http_uri(self.etcd_member_ip_list, EC.kubernetes_etcd_peer_port, secure=True) @property def vault_etcd_member_peer_uri_list(self): return self.order_http_uri(self.etcd_member_ip_list, EC.vault_etcd_peer_port, secure=True) @property def fleet_etcd_member_peer_uri_list(self): return self.order_http_uri(self.etcd_member_ip_list, EC.fleet_etcd_peer_port, secure=True) @property def kubernetes_control_plane(self): return self.order_http_uri(self.kubernetes_control_plane_ip_list, EC.kubernetes_apiserver_insecure_port) @staticmethod def compute_disks_size(disks: list): total_size_gb = 0 if not disks: return "inMemory" for d in disks: total_size_gb += d["size-bytes"] >> 30 ladder = list(EC.disks_ladder_gb.items()) ladder.sort(key=lambda x: x[1]) for k, v in ladder: if total_size_gb < v: return k return ladder[-1][0] def produce_matchbox_data(self, marker: str, i: int, m: dict, automatic_name: str, update_extra_metadata=None): fqdn = automatic_name try: if m["fqdn"]: fqdn = m["fqdn"] except KeyError as e: logger.warning("%s for %s" % (e, m["mac"])) etc_hosts = [k for k in EC.etc_hosts] dns_attr = self.get_dns_attr(fqdn) etc_hosts.append("127.0.1.1 %s %s" % (fqdn, dns_attr["shortname"])) cni_attr = self._cni_ipam(m["cidrv4"], m["gateway"]) extra_metadata = { "etc_hosts": etc_hosts, # Etcd "etcd_name": m["ipv4"], "kubernetes_etcd_initial_cluster": self.kubernetes_etcd_initial_cluster, "vault_etcd_initial_cluster": self.vault_etcd_initial_cluster, "fleet_etcd_initial_cluster": self.fleet_etcd_initial_cluster, "kubernetes_etcd_initial_advertise_peer_urls": "https://%s:%d" % (m["ipv4"], EC.kubernetes_etcd_peer_port), "vault_etcd_initial_advertise_peer_urls": "https://%s:%d" % (m["ipv4"], EC.vault_etcd_peer_port), "fleet_etcd_initial_advertise_peer_urls": "https://%s:%d" % (m["ipv4"], EC.fleet_etcd_peer_port), "kubernetes_etcd_member_client_uri_list": ",".join(self.kubernetes_etcd_member_client_uri_list), "vault_etcd_member_client_uri_list": ",".join(self.vault_etcd_member_client_uri_list), "fleet_etcd_member_client_uri_list": ",".join(self.fleet_etcd_member_client_uri_list), "kubernetes_etcd_data_dir": EC.kubernetes_etcd_data_dir, "vault_etcd_data_dir": EC.vault_etcd_data_dir, "fleet_etcd_data_dir": EC.fleet_etcd_data_dir, "kubernetes_etcd_client_port": EC.kubernetes_etcd_client_port, "vault_etcd_client_port": EC.vault_etcd_client_port, "fleet_etcd_client_port": EC.fleet_etcd_client_port, "kubernetes_etcd_advertise_client_urls": "https://%s:%d" % (m["ipv4"], EC.kubernetes_etcd_client_port), "vault_etcd_advertise_client_urls": "https://%s:%d" % (m["ipv4"], EC.vault_etcd_client_port), "fleet_etcd_advertise_client_urls": "https://%s:%d" % (m["ipv4"], EC.fleet_etcd_client_port), # Kubernetes "kubernetes_apiserver_insecure_port": EC.kubernetes_apiserver_insecure_port, "kubernetes_node_ip": "%s" % m["ipv4"], "kubernetes_node_name": "%s" % m["ipv4"] if fqdn == automatic_name else fqdn, "kubernetes_service_cluster_ip_range": EC.kubernetes_service_cluster_ip_range, # Vault are located with the etcd members "vault_ip_list": ",".join(self.etcd_member_ip_list), "vault_port": EC.vault_port, "kubelet_healthz_port": EC.kubelet_healthz_port, "etcd_member_kubernetes_control_plane_ip_list": ",".join(self.etcd_member_ip_list), "etcd_member_kubernetes_control_plane_ip": self.etcd_member_ip_list, "hyperkube_image_url": EC.hyperkube_image_url, "cephtools_image_url": EC.cephtools_image_url, # IPAM "cni": json.dumps(cni_attr, sort_keys=True), "network": { "cidrv4": m["cidrv4"], "gateway": m["gateway"], "ip": m["ipv4"], "subnet": cni_attr["subnet"], "perennial_host_ip": EC.perennial_local_host_ip, "ip_or_fqdn": fqdn if EC.sync_replace_ip_by_fqdn else m["ipv4"], }, # host "hostname": dns_attr["shortname"], "dns_attr": dns_attr, "nameservers": " ".join(EC.nameservers), "ntp": " ".join(EC.ntp), "fallbackntp": " ".join(EC.fallbackntp), "vault_polling_sec": EC.vault_polling_sec, "lifecycle_update_polling_sec": EC.lifecycle_update_polling_sec, "disk_profile": self.compute_disks_size(m["disks"]), } selector = {"mac": m["mac"]} selector.update(self.get_extra_selectors(self.extra_selector)) if update_extra_metadata: extra_metadata.update(update_extra_metadata) gen = generator.Generator( api_uri=self.api_uri, group_id="%s-%d" % (marker, i), # one per machine profile_id=marker, # link to ignition name=marker, ignition_id="%s.yaml" % self.ignition_dict[marker], matchbox_path=self.matchbox_path, selector=selector, extra_metadata=extra_metadata, ) gen.dumps() def etcd_member_kubernetes_control_plane(self): marker = self.etcd_member_kubernetes_control_plane.__name__ roles = schedulerv2.EtcdMemberKubernetesControlPlane.roles machine_roles = self._query_roles(*roles) for i, m in enumerate(machine_roles): update_md = { # Roles "roles": ",".join(roles), # Etcd Members "kubernetes_etcd_member_peer_uri_list": ",".join(self.kubernetes_etcd_member_peer_uri_list), "vault_etcd_member_peer_uri_list": ",".join(self.vault_etcd_member_peer_uri_list), "fleet_etcd_member_peer_uri_list": ",".join(self.fleet_etcd_member_peer_uri_list), "kubernetes_etcd_peer_port": EC.kubernetes_etcd_peer_port, "vault_etcd_peer_port": EC.vault_etcd_peer_port, "fleet_etcd_peer_port": EC.fleet_etcd_peer_port, # K8s Control Plane "kubernetes_apiserver_count": len(machine_roles), "kubernetes_apiserver_insecure_bind_address": EC.kubernetes_apiserver_insecure_bind_address, } self.produce_matchbox_data( marker=marker, i=i, m=m, automatic_name="cp-%d-%s" % (i, m["ipv4"].replace(".", "-")), update_extra_metadata=update_md, ) logger.info("synced %d" % len(machine_roles)) return len(machine_roles) def kubernetes_nodes(self): marker = self.kubernetes_nodes.__name__ roles = schedulerv2.KubernetesNode.roles machine_roles = self._query_roles(*roles) for i, m in enumerate(machine_roles): update_md = { # Roles "roles": ",".join(roles), } self.produce_matchbox_data( marker=marker, i=i, m=m, automatic_name="no-%d-%s" % (i, m["ipv4"].replace(".", "-")), update_extra_metadata=update_md, ) logger.info("synced %d" % len(machine_roles)) return len(machine_roles) def notify(self): """ TODO if we need to notify the API for any reason :return: """ req = requests.post("%s/sync-notify" % self.api_uri) req.close() logger.debug("notified API") def apply(self, nb_try=2, seconds_sleep=0): logger.info("start syncing...") for i in range(nb_try): try: nb = self.etcd_member_kubernetes_control_plane() nb += self.kubernetes_nodes() self.notify() return nb except Exception as e: logger.error("fail to apply the sync %s %s" % (type(e), e)) if i + 1 == nb_try: raise logger.warning("retry %d/%d in %d s" % (i + 1, nb_try, seconds_sleep)) time.sleep(seconds_sleep) raise RuntimeError("fail to apply after %d try" % nb_try) def _query_roles(self, *roles): roles = "&".join(roles) url = "/scheduler/%s" % roles logger.debug("roles='%s'" % roles) data = self._cache_query.get(url) if data is None: # not in cache or evicted logger.debug("cache is empty for %s" % url) req = requests.get("%s%s" % (self.api_uri, url)) data = json.loads(req.content.decode()) req.close() data.sort(key=lambda k: k["mac"]) self._cache_query.set(url, data) return data def _query_ip_list(self, role): logger.debug("role='%s'" % role) url = "/scheduler/ip-list/%s" % role data = self._cache_query.get(url) if data is None: # not in cache or evicted logger.debug("cache is empty for %s" % url) req = requests.get("%s%s" % (self.api_uri, url)) data = json.loads(req.content.decode()) req.close() data.sort() self._cache_query.set(url, data) return data
def _configure_null_cache(self): from werkzeug.contrib.cache import NullCache self.cache = NullCache(default_timeout=self.cache_timeout) logger.warning("cache disable")
def cache(self): if self.config['DEBUG']: from werkzeug.contrib.cache import NullCache return NullCache() return SimpleCache()
class Cache(object): """ This class is used to control the cache objects. If TESTING is True it will use NullCache. """ def __init__(self, app=None): self.cache = None if app is not None: self.init_app(app) else: self.app = None self._memoized = [] def init_app(self, app): "This is used to initialize cache with your app object" app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300) app.config.setdefault('CACHE_THRESHOLD', 500) app.config.setdefault('CACHE_KEY_PREFIX', None) app.config.setdefault('CACHE_MEMCACHED_SERVERS', None) app.config.setdefault('CACHE_DIR', None) app.config.setdefault('CACHE_OPTIONS', None) app.config.setdefault('CACHE_ARGS', []) app.config.setdefault('CACHE_TYPE', 'null') self.app = app self._set_cache() def _set_cache(self): if self.app.config['TESTING']: self.cache = NullCache() else: import_me = self.app.config['CACHE_TYPE'] if '.' not in import_me: import_me = 'flaskext.cache.backends.' + \ import_me cache_obj = import_string(import_me) cache_args = self.app.config['CACHE_ARGS'][:] cache_options = dict(default_timeout= \ self.app.config['CACHE_DEFAULT_TIMEOUT']) if self.app.config['CACHE_OPTIONS']: cache_options.update(self.app.config['CACHE_OPTIONS']) self.cache = cache_obj(self.app, cache_args, cache_options) if not isinstance(self.cache, BaseCache): raise TypeError("Cache object must subclass " "werkzeug.contrib.cache.BaseCache") def get(self, *args, **kwargs): "Proxy function for internal cache object." return self.cache.get(*args, **kwargs) def set(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.set(*args, **kwargs) def add(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.add(*args, **kwargs) def delete(self, *args, **kwargs): "Proxy function for internal cache object." self.cache.delete(*args, **kwargs) def cached(self, timeout=None, key_prefix='view/%s', unless=None): """ Decorator. Use this to cache a function. By default the cache key is `view/request.path`. You are able to use this decorator with any function by changing the `key_prefix`. If the token `%s` is located within the `key_prefix` then it will replace that with `request.path` Example:: # An example view function @cache.cached(timeout=50) def big_foo(): return big_bar_calc() # An example misc function to cache. @cache.cached(key_prefix='MyCachedList') def get_list(): return [random.randrange(0, 1) for i in range(50000)] .. code-block:: pycon >>> my_list = get_list() .. note:: You MUST have a request context to actually called any functions that are cached. :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param key_prefix: Default 'view/%(request.path)s'. Beginning key to . use for the cache key. :param unless: Default None. Cache will *always* execute the caching facilities unless this callable is true. This will bypass the caching entirely. """ def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): #: Bypass the cache entirely. if callable(unless) and unless() is True: return f(*args, **kwargs) if '%s' in key_prefix: cache_key = key_prefix % request.path else: cache_key = key_prefix rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=timeout) return rv return decorated_function return decorator def memoize(self, timeout=None): """ Use this to cache the result of a function, taking its arguments into account in the cache key. Information on `Memoization <http://en.wikipedia.org/wiki/Memoization>`_. Example:: @cache.memoize(timeout=50) def big_foo(a, b): return a + b + random.randrange(0, 1000) .. code-block:: pycon >>> big_foo(5, 2) 753 >>> big_foo(5, 3) 234 >>> big_foo(5, 2) 753 :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. """ def memoize(f): @wraps(f) def decorated_function(*args, **kwargs): cache_key = ('memoize', f.__name__, id(f), args, str(kwargs)) rv = self.cache.get(cache_key) if rv is None: rv = f(*args, **kwargs) self.cache.set(cache_key, rv, timeout=timeout) if cache_key not in self._memoized: self._memoized.append(cache_key) return rv return decorated_function return memoize def delete_memoized(self, *keys): """ Deletes all of the cached functions that used Memoize for caching. Example:: @cache.memoize(50) def random_func(): return random.randrange(1, 50) .. code-block:: pycon >>> random_func() 43 >>> random_func() 43 >>> cache.delete_memoized('random_func') >>> random_func() 16 :param \*keys: A list of function names to clear from cache. """ def deletes(item): if item[0] == 'memoize' and item[1] in keys: self.cache.delete(item) return True return False self._memoized[:] = [x for x in self._memoized if not deletes(x)]
if use_cache and response.status_code == 200: response.freeze() request.app.cache.set(key, response, timeout) response.make_conditional(request) return response oncall.__name__ = f.__name__ oncall.__module__ = f.__module__ oncall.__doc__ = f.__doc__ return oncall return decorator #: the cache system factories. systems = { 'null': lambda app: NullCache(), 'simple': lambda app: SimpleCache(app.cfg['cache_timeout']), 'memcached': lambda app: MemcachedCache( [x.strip() for x in app.cfg['memcached_servers']], app.cfg['cache_timeout']), 'filesystem': lambda app: FileSystemCache( os.path.join(app.instance_folder, app.cfg['filesystem_cache_path']), 500, app.cfg['cache_timeout']) }
db = SQLAlchemy(app) if app.config["CACHE_TYPE"].lower() == "memcached": from werkzeug.contrib.cache import MemcachedCache flask_cache = MemcachedCache([ '{0}:{1}'.format(app.config["MEMCACHED_HOST"], app.config["MEMCACHED_PORT"]) ], key_prefix=app.config["MEMCACHED_KEY_PREFIX"]) elif app.config["CACHE_TYPE"].lower() == "simple": from werkzeug.contrib.cache import SimpleCache flask_cache = SimpleCache() else: from werkzeug.contrib.cache import NullCache flask_cache = NullCache() if app.config["PROXY_SETUP"]: app.wsgi_app = ProxyFix(app.wsgi_app) if app.config["OPENID_ENABLED"]: from flask_openid import OpenID from openid_teams import teams oid = OpenID(app, safe_roots=[], extension_responses=[teams.TeamsResponse]) from login import login app.register_blueprint(login) from dumpdirs import dumpdirs app.register_blueprint(dumpdirs, url_prefix="/dumpdirs") from reports import reports app.register_blueprint(reports, url_prefix="/reports")
def _null(self, **kwargs): """Returns a :class:`NullCache` instance""" return NullCache()
# For test purposes from capitains_nautilus.flask_ext import WerkzeugCacheWrapper, FlaskNautilus from flask import Flask from werkzeug.contrib.cache import NullCache nautilus_cache = WerkzeugCacheWrapper(NullCache()) app = Flask("Nautilus") nautilus = FlaskNautilus(app=app, resources=["./tests/test_data/latinLit"], parser_cache=nautilus_cache) app.debug = True app.run("0.0.0.0", 5000)
def __init__(self, conf): self.lock = threading.Lock() self.cache = Cache(NullCache())
def null(app, config, args, kwargs): return NullCache()
def __init__(self, cache=None, prefix='mysession:'): if cache is None: cache = NullCache() self.cache = cache self.prefix = prefix
class Provider(object): def __init__(self, config): self._config = config self.cache = NullCache() self.statsd = NullStats() attribution = Attribution( licence_name="Open Government Licence", licence_url="http://www.nationalarchives.gov.uk/doc/open-government-licence/", attribution_text="Contains public sector information provided by the Met Office", ) def latest_observations(self): response = self.cache.get(self._CACHE_KEY.format(self._config["location_id"])) if not response: self.statsd.incr(__name__ + ".cache_miss") with self.statsd.timer(__name__ + ".request_time"): response = self._make_request() max_age = parse_cache_control_header(response.info().getparam("Cache-Control")).max_age response = json.load(response) self.cache.set(self._CACHE_KEY.format(self._config["location_id"]), response, max_age) else: self.statsd.incr(__name__ + ".cache_hit") source_period = response["SiteRep"]["DV"]["Location"]["Period"] if isinstance(source_period, list): source_period = source_period[-1] source_observation = source_period["Rep"][-1] minutes_since_midnight = timedelta(minutes=int(source_observation["$"])) obs_time = datetime(*time.strptime(source_period["value"], "%Y-%m-%dZ")[:6], tzinfo=utc) obs_time += minutes_since_midnight weather_type, weather_type_id = self.WEATHER_TYPES.get(source_observation["W"]) return { "type": weather_type, "type_id": weather_type_id, "temperature": u"{} °C".format(source_observation["T"]), "wind_speed": "{} mph".format(source_observation["S"]), "gust_speed": "{} mph".format(source_observation["G"]) if "G" in source_observation else "N/A", "wind_direction": source_observation["D"], "pressure": "{} mb".format(source_observation["P"]), "obs_location": capwords(response["SiteRep"]["DV"]["Location"]["name"]), "obs_time": obs_time.isoformat(), } def _make_request(self): return urlopen( "http://datapoint.metoffice.gov.uk/public/data/val/wxobs/all" + "/json/{location_id}?res=hourly&key={api_key}".format(**self._config) ) _CACHE_KEY = "weather/metoffice/{}" WEATHER_TYPES = { "NA": (_("Not available"), ""), "0": (_("Clear night"), "clear_night"), "1": (_("Sunny day"), "sun"), "2": (_("Partly cloudy"), "cloud"), "3": (_("Partly cloudy"), "cloud"), "5": (_("Mist"), "fog"), "6": (_("Fog"), "fog"), "7": (_("Cloudy"), "cloud"), "8": (_("Overcast"), "cloud"), "9": (_("Light rain shower"), "rain"), "10": (_("Light rain shower"), "rain"), "11": (_("Drizzle"), "rain"), "12": (_("Light rain"), "rain"), "13": (_("Heavy rain shower"), "rain"), "14": (_("Heavy rain shower"), "rain"), "15": (_("Heavy rain"), "rain"), "16": (_("Sleet shower"), "rain"), "17": (_("Sleet shower"), "rain"), "18": (_("Sleet"), "rain"), "19": (_("Hail shower"), "rain"), "20": (_("Hail shower"), "rain"), "21": (_("Hail"), "rain"), "22": (_("Light snow shower"), "snow"), "23": (_("Light snow shower"), "snow"), "24": (_("Light snow"), "snow"), "25": (_("Heavy snow shower"), "snow"), "26": (_("Heavy snow shower"), "snow"), "27": (_("Heavy snow"), "snow"), "28": (_("Thunder shower"), "thunder"), "29": (_("Thunder shower"), "thunder"), "30": (_("Thunder"), "thunder"), }