def setUp(self): """BaseModel test set up""" if os.path.isfile("/tmp/box.db"): os.unlink("/tmp/box.db") DBHelper().set_db("/tmp/box.db") InstallHelper.reset() cache = FileSystemCache("/tmp/werkzeug") cache.clear() BaseModel.set_cache(cache) SampleModel.install()
def setUp(self): """BaseModel test set up""" if os.path.isfile('/tmp/box.db'): os.unlink('/tmp/box.db') DBHelper().set_db('/tmp/box.db') InstallHelper.reset() cache = FileSystemCache('/tmp/werkzeug') cache.clear() BaseModel.set_cache(cache) SampleModel.install()
def test_filesystemcache_clear(): """ test if FileSystemCache.clear works """ tmp_dir = tempfile.mkdtemp() cache = FileSystemCache(cache_dir=tmp_dir) cache.set("foo", "bar") cache_files = os.listdir(tmp_dir) assert len(cache_files) == 1 cache.clear() cache_files = os.listdir(tmp_dir) assert len(cache_files) == 0 shutil.rmtree(tmp_dir)
def test_filesystemcache_clear(): """ test if FileSystemCache.clear works """ tmp_dir = tempfile.mkdtemp() cache = FileSystemCache(cache_dir=tmp_dir) cache.set('foo', 'bar') cache_files = os.listdir(tmp_dir) assert len(cache_files) == 1 cache.clear() cache_files = os.listdir(tmp_dir) assert len(cache_files) == 0 shutil.rmtree(tmp_dir)
def setUp(self): """ Set up a dummy application with a manager """ nautilus_cache = FileSystemCache("cache_dir") nautilus_cache.clear() app = Flask("Nautilus") resolver = NautilusCTSResolver(["./tests/test_data/latinLit"], cache=nautilus_cache, logger=logger) flask_nautilus = FlaskNautilus( app=app, resolver=resolver, flask_caching=Cache(config={'CACHE_TYPE': 'filesystem'}), logger=logger ) self.cache_manager = nautilus_cache self.nautilus = flask_nautilus self.resolver = resolver self.resolver.logger.disabled = True self.manager = FlaskNautilusManager(resolver, flask_nautilus)
class TestCache(TestCase): def setUp(self): output = call([python, "./tests/cts/run_cache.py"], cwd=cwd) if output != 0: raise Exception("Creating cache failed") self.cache = FileSystemCache(subprocess_cache_dir) self.resolver = NautilusCTSResolver(resource=subprocess_repository, cache=self.cache) self.resolver.logger.disabled = True def x(*k, **kw): raise Exception("Parse should not be called") self.resolver.parse = x def tearDown(self): self.cache.clear() def test_argumentless_metadata(self): inventory = self.resolver.getMetadata() self.assertIn("Divān (English)", inventory.export(Mimetypes.XML.CTS), "Metadata are there") self.assertEqual(len(inventory.readableDescendants), 4) def test_first_child(self): key = list(self.resolver.getMetadata().children.keys())[0] inventory = self.resolver.getMetadata(key) self.assertIn("Divān (English)", inventory.export(Mimetypes.XML.CTS), "Metadata are there") self.assertEqual(len(inventory.readableDescendants), 4) def test_textgroup(self): """ Found to fail originally because of different GRAPH constant used across modules (one from the cache vs. the world) """ inventory = self.resolver.getMetadata("urn:cts:farsiLit:hafez") self.assertIn("Divān (English)", inventory.export(Mimetypes.XML.CTS), "Metadata are there") self.assertEqual(len(inventory.readableDescendants), 3)
class WechatCache(WechatSogouBase): """基于文件的缓存 """ def __init__(self, cache_dir='cache', default_timeout=300): """初始化 cache_dir是缓存目录 """ self.cache = FileSystemCache(cache_dir, default_timeout=default_timeout) def clear(self): """清空缓存 """ return self.cache.clear() def get(self, key): """获取缓存 获取键值key的缓存值 如果没有对应缓存,返回None """ return self.cache.get(key) def add(self, key, value, timeout=None): """增加缓存 如果键值key对应的缓存不存在,那么增加值value到键值key,过期时间timeout,默认300秒 否则返回False(即不能覆盖设置缓存) """ return self.cache.add(key, value, timeout) def set(self, key, value, timeout=None): """设置缓存 设置键值key的缓存为value,过期时间300秒 """ return self.cache.set(key, value, timeout) def delete(self, key): """删除缓存 删除键值key存储的缓存 """ return self.cache.delete(key)
class WechatCache(object): """基于文件的缓存 """ def __init__(self, cache_dir='cache', default_timeout=300): """初始化 cache_dir是缓存目录 """ self.cache = FileSystemCache(cache_dir, default_timeout=default_timeout) def clear(self): """清空缓存 """ return self.cache.clear() def get(self, key): """获取缓存 获取键值key的缓存值 如果没有对应缓存,返回None """ return self.cache.get(key) def add(self, key, value, timeout=None): """增加缓存 如果键值key对应的缓存不存在,那么增加值value到键值key,过期时间timeout,默认300秒 否则返回False(即不能覆盖设置缓存) """ return self.cache.add(key, value, timeout) def set(self, key, value, timeout=None): """设置缓存 设置键值key的缓存为value,过期时间300秒 """ return self.cache.set(key, value, timeout) def delete(self, key): """删除缓存 删除键值key存储的缓存 """ return self.cache.delete(key)
class WechatCache: def __init__(self, cache_dir='cache', default_timeout=300): self.cache = FileSystemCache(cache_dir=cache_dir, default_timeout=default_timeout) def clear(self): return self.cache.clear() def get(self, key): return self.cache.get(key) def add(self, key, value, timeout=None): return self.cache.add(key, value, timeout) def set(self, key, value, timeout=None): return self.cache.set(key, value, timeout) def delete(self, key): return self.cache.delete(key)
class Cache(object): """Cache module based on werkzeug.contrib.cache. This is a mixed version of NullCache, SimpleCache, FileSystemCache, MemcachedCache, and RedisCache. :param app: Flask app instance. :param config_prefix: Define a prefix for Flask app config. :param kwargs: Extra parameters. You need to configure a type of the cache, and its related configurations. The default ``config_prefix`` is ``AUTHLIB``, so it requires a config of:: AUTHLIB_CACHE_TYPE = 'simple' If ``config_prefix`` is something else, like ``EXAMPLE``, it would be:: EXAMPLE_CACHE_TYPE = 'simple' The available cache types are: * null: It will not cache anything. No configuration. * simple: It caches things in memory. The only configuration is ``threshold``:: AUTHLIB_CACHE_THRESHOLD = 500 * memcache: It caches things in Memcache. Available configurations:: AUTHLIB_CACHE_MEMCACHED_SERVERS = [] AUTHLIB_CACHE_KEY_PREFIX = None * redis: It caches things in Redis. Available configurations:: AUTHLIB_CACHE_REDIS_HOST = 'localhost' AUTHLIB_CACHE_REDIS_PORT = 6379 AUTHLIB_CACHE_REDIS_PASSWORD = None AUTHLIB_CACHE_REDIS_DB = 0 AUTHLIB_CACHE_KEY_PREFIX = None * filesystem: It caches things in local filesystem. Available configurations:: AUTHLIB_CACHE_DIR = '' # required AUTHLIB_CACHE_THRESHOLD = 500 """ def __init__(self, app, config_prefix='AUTHLIB', **kwargs): deprecate(DEPRECATE_MESSAGE, 0.7) self.config_prefix = config_prefix self.config = app.config cache_type = self._config('type') kwargs.update( dict(default_timeout=self._config('DEFAULT_TIMEOUT', 100))) if cache_type == 'null': self.cache = NullCache() elif cache_type == 'simple': kwargs.update(dict(threshold=self._config('threshold', 500))) self.cache = SimpleCache(**kwargs) elif cache_type == 'memcache': kwargs.update( dict( servers=self._config('MEMCACHED_SERVERS'), key_prefix=self._config('KEY_PREFIX', None), )) self.cache = MemcachedCache(**kwargs) elif cache_type == 'redis': kwargs.update( dict( host=self._config('REDIS_HOST', 'localhost'), port=self._config('REDIS_PORT', 6379), password=self._config('REDIS_PASSWORD', None), db=self._config('REDIS_DB', 0), key_prefix=self._config('KEY_PREFIX', None), )) self.cache = RedisCache(**kwargs) elif cache_type == 'filesystem': kwargs.update(dict(threshold=self._config('threshold', 500), )) self.cache = FileSystemCache(self._config('DIR'), **kwargs) else: raise RuntimeError('`%s` is not a valid cache type!' % cache_type) app.extensions[config_prefix.lower() + '_cache'] = self.cache def _config(self, key, default=_missing): key = key.upper() prior = '%s_CACHE_%s' % (self.config_prefix, key) if prior in self.config: return self.config[prior] fallback = 'CACHE_%s' % key if fallback in self.config: return self.config[fallback] if default is _missing: raise RuntimeError('%s is missing.' % prior) return default def get(self, key): """Look up key in the cache and return the value for it. :param key: the key to be looked up. :returns: The value if it exists and is readable, else ``None``. """ return self.cache.get(key) def delete(self, key): """Delete `key` from the cache. :param key: the key to delete. :returns: Whether the key existed and has been deleted. """ return self.cache.delete(key) def get_many(self, *keys): """Returns a list of values for the given keys. For each key a item in the list is created:: foo, bar = cache.get_many("foo", "bar") Has the same error handling as :meth:`get`. :param keys: The function accepts multiple keys as positional arguments. """ return [self.cache.get(k) for k in keys] def get_dict(self, *keys): """Like :meth:`get_many` but return a dict:: d = cache.get_dict("foo", "bar") foo = d["foo"] bar = d["bar"] :param keys: The function accepts multiple keys as positional arguments. """ return self.cache.get_dict(*keys) def set(self, key, value, timeout=None): """Add a new key/value to the cache (overwrites value, if key already exists in the cache). :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 idicates that the cache never expires. :returns: ``True`` if key has been updated, ``False`` for backend errors. Pickling errors, however, will raise a subclass of ``pickle.PickleError``. """ return self.cache.set(key, value, timeout) def add(self, key, value, timeout=None): """Works like :meth:`set` but does not overwrite the values of already existing keys. :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 idicates that the cache never expires. :returns: Same as :meth:`set`, but also ``False`` for already existing keys. """ return self.cache.add(key, value, timeout) def set_many(self, mapping, timeout=None): """Sets multiple keys and values from a mapping. :param mapping: a mapping with the keys/values to set. :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 idicates that the cache never expires. :returns: Whether all given keys have been set. """ return self.cache.set_many(mapping, timeout) def delete_many(self, *keys): """Deletes multiple keys at once. :param keys: The function accepts multiple keys as positional arguments. :returns: Whether all given keys have been deleted. :rtype: boolean """ return self.cache.delete_many(*keys) def has(self, key): """Checks if a key exists in the cache without returning it. This is a cheap operation that bypasses loading the actual data on the backend. This method is optional and may not be implemented on all caches. :param key: the key to check """ return self.cache.has(key) def clear(self): """Clears the cache. Keep in mind that not all caches support completely clearing the cache. :returns: Whether the cache has been cleared. """ return self.cache.clear() def inc(self, key, delta=1): """Increments the value of a key by `delta`. If the key does not yet exist it is initialized with `delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to add. :returns: The new value or ``None`` for backend errors. """ return self.cache.inc(key, delta=delta) def dec(self, key, delta=1): """Decrements the value of a key by `delta`. If the key does not yet exist it is initialized with `-delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to subtract. :returns: The new value or `None` for backend errors. """ return self.cache.dec(key, delta=delta)
def gzip_cache(): """Set up a caching system for the gzipped assets""" cache = FileSystemCache(cache_dir='.cache/gzip') cache.clear() return cache
class TestSaxonStream(TestCase): def setUp(self): self.cache = FileSystemCache("./cache") self.saxon = SaxonStreamTransform("./jars/saxon.jar", "./tests/data/xsl/ciham.xsl", cache=self.cache) self.nautilus = NautilusRetriever(folders=["./tests/data/repo"]) self.nautilus.logger.setLevel(logging.ERROR) app = Flask("Nemo") app.debug = True nemo = Nemo(app=app, base_url="", retriever=self.nautilus, transform={"default": self.saxon.transform}) self.client = app.test_client() def tearDown(self): # We clean the cache folder to ensure that no cache is passed from one test to the other self.cache.clear() def test_simple_transformation(self): """ Test transformation works fine""" read = self.client.get("/read/froLit/jns915/jns1856/ciham-fro1/1") data = read.data.decode() self.assertIn('<span class="expan">et </span>', data, "Text content should be transformed") self.assertIn('Facsimilaire', data, "Other content should be added") cached = self.cache.get( "urn:cts:froLit:jns915.jns1856.ciham-fro1:1").decode() self.assertIn('<aside class="text-left">', cached, "Assert cache is made") def test_cache_retrieved(self): """ Test that cache is nicely used and built """ read = self.client.get("/read/froLit/jns915/jns1856/ciham-fro1/1") data = read.data.decode() self.assertIn('<span class="expan">et </span>', data, "Text content should be transformed") self.assertIn('Facsimilaire', data, "Other content should be added") cached = self.cache.get( "urn:cts:froLit:jns915.jns1856.ciham-fro1:1").decode() self.assertIn('<aside class="text-left">', cached, "Assert cache is made") with mock.patch("nemo_xslttwo_plugin.shell") as shell: read = self.client.get("/read/froLit/jns915/jns1856/ciham-fro1/1") cached_response = read.data.decode() self.assertEqual(cached_response, data, "Text content should the same in cache") self.assertEqual( shell.call_count, 0, "Shell should not be called because we use cache") def test_two_transformations(self): """ Test transformation works fine""" read = self.client.get("/read/froLit/jns915/jns1856/ciham-fro1/1") read = self.client.get("/read/froLit/jns915/jns1856/ciham-fro1/2") data = read.data.decode() self.assertIn('<span class="expan">et </span>', data, "Text content should be transformed") self.assertIn('Facsimilaire', data, "Other content should be added") cached = self.cache.get( "urn:cts:froLit:jns915.jns1856.ciham-fro1:1").decode() self.assertIn('<aside class="text-left">', cached, "Assert cache is made")
if not site or '.' not in site[1:-1] or ' ' in site or '\\' in site or len( site) < 4: return index_with_warning() domain = urlparse(site).netloc if not domain: site_with_protocol = '//' + site if not site[0] == '/' else '/' + site domain = urlparse(site_with_protocol).netloc if not domain: return index_with_warning() if ':' in domain: return index_with_warning() response_data = cache.get(domain) if not response_data: sb = parse_google_sb(domain) response_data = render_template('check.html', domain=domain, sb=sb) # cache response for 12 hours # cache.set(domain, response_data, timeout=43200) return response_data @app.errorhandler(404) def not_found(e): return render_template('404.html'), 404 if __name__ == '__main__': cache.clear() if 'liveconsole' not in gethostname(): app.debug = 'True' app.run()