def test_invenio_access_permission_cache_redis(app, dynamic_permission): """Caching the user using redis.""" cache = RedisCache() InvenioAccess(app, cache=cache) with app.test_request_context(): user_can_all = User(email='*****@*****.**') user_can_open = User(email='*****@*****.**') user_can_open_1 = User(email='*****@*****.**') db.session.add(user_can_all) db.session.add(user_can_open) db.session.add(user_can_open_1) db.session.add(ActionUsers(action='open', user=user_can_all)) db.session.flush() identity_open = FakeIdentity(UserNeed(user_can_open.id)) permission_open = dynamic_permission(ActionNeed('open')) assert not permission_open.allows(identity_open) assert current_access.get_action_cache('open') == ( set([Need(method='id', value=1)]), set([]) ) db.session.add(ActionUsers(action='open', user=user_can_open)) db.session.flush() permission_open = dynamic_permission(ActionNeed('open')) assert permission_open.allows(identity_open) assert current_access.get_action_cache('open') == ( set([Need(method='id', value=1), Need(method='id', value=2)]), set([]) ) db.session.add(ActionUsers(action='open', argument=1, user=user_can_open_1)) db.session.flush() identity_open_1 = FakeIdentity(UserNeed(user_can_open_1.id)) permission_open_1 = dynamic_permission( ParameterizedActionNeed('open', '1')) assert not permission_open.allows(identity_open_1) assert permission_open_1.allows(identity_open_1) assert current_access.get_action_cache('open::1') == ( set([Need(method='id', value=1), Need(method='id', value=2), Need(method='id', value=3)]), set([]) ) assert current_access.get_action_cache('open') == ( set([Need(method='id', value=1), Need(method='id', value=2)]), set([]) )
def _redis(self, **kwargs): """Returns a :class:`RedisCache` instance""" kwargs.update(dict( host=self._config('REDIS_HOST', 'localhost'), port=self._config('REDIS_PORT', 6379), password=self._config('REDIS_PASSWORD', None), db=self._config('REDIS_DB', 0), key_prefix=self._config('KEY_PREFIX', None), )) return RedisCache(**kwargs)
def __init__(self, host="localhost", port=6379, password=None, db=0, default_timeout=300, key_prefix=None, **kwargs): BaseCache.__init__(self, default_timeout=default_timeout) CachelibRedisCache.__init__(self, host=host, port=port, password=password, db=db, default_timeout=default_timeout, key_prefix=key_prefix, **kwargs) self._write_client = self._read_clients = self._client
def __flask_setup(): global app, cache app = Flask(__name__, static_folder='react_app/build', template_folder='react_app/build') app.wsgi_app = ProxyFix(app.wsgi_app) app.config['DEVELOPMENT'] = get_option('FLASK', 'DEVELOPMENT') app.config['DEBUG'] = get_option('FLASK', 'DEBUG') CORS(app, resources={r"/search-iframe*": {"origins": "*"}}) cache = RedisCache(REDIS_HOST, REDIS_PORT, REDIS_PASS)
def redis(config, *args, **kwargs): kwargs.setdefault('host', config.get('CACHE_REDIS_HOST', DEF_REDIS_HOST)) kwargs.setdefault('port', config.get('CACHE_REDIS_PORT', DEF_REDIS_PORT)) kwargs.setdefault('password', config.get('CACHE_REDIS_PASSWORD')) kwargs.setdefault('key_prefix', config.get('CACHE_KEY_PREFIX')) kwargs.setdefault('db', config.get('CACHE_REDIS_DB')) redis_url = config.get('CACHE_REDIS_URL') if redis_url: kwargs['host'] = from_url(redis_url, db=kwargs.pop('db', None)) return RedisCache(*args, **kwargs)
SUPERSET_RESULTS_BACKENDS = { "simple": lambda: SimpleCache(threshold=get_env( "SUPERSET_SIMPLE_RESULTS_BACKEND_THRESHOLD", default=10, cast=int), default_timeout=get_env( "SUPERSET_SIMPLE_RESULTS_BACKEND_DEFAULT_TIMEOUT", default=300, cast=float)), "redis": lambda: RedisCache( host=get_env("SUPERSET_REDIS_RESULTS_BACKEND_HOST"), port=get_env( "SUPERSET_REDIS_RESULTS_BACKEND_PORT", default=6379, cast=int), password=get_env("SUPERSET_REDIS_RESULTS_BACKEND_PASSWORD"), key_prefix=get_env("SUPERSET_REDIS_RESULTS_BACKEND_KEY_PREFIX", default="superset_results"), db=get_env("SUPERSET_REDIS_RESULTS_BACKEND_DB", default=0, cast=int), default_timeout=get_env( "SUPERSET_REDIS_RESULTS_BACKEND_DEFAULT_TIMEOUT", default=300, cast=float)), "memcached": lambda: MemcachedCache( servers=get_env("SUPERSET_MEMCACHED_RESULTS_BACKEND_SERVERS", default=[], cast=list), default_timeout=get_env( "SUPERSET_MEMCACHED_RESULTS_BACKEND_DEFAULT_TIMEOUT", default=300, cast=float), key_prefix=get_env("SUPERSET_MEMCACHED_RESULTS_BACKEND_KEY_PREFIX",
def _factory(self, *args, **kwargs): rc = RedisCache(*args, port=6360, **kwargs) rc._client.flushdb() return rc
def redis_connection(): g.redis_client = redis.Redis( connection_pool=app.config['REDIS_CONNECTION_POOL'] ) g.cache = RedisCache(host=g.redis_client)
'CACHE_DEFAULT_TIMEOUT': 24 * 60 * 60, # 1 day 'CACHE_KEY_PREFIX': 'data_', 'CACHE_REDIS_URL': 'redis://%s:%s/1' % (REDIS_HOST, REDIS_PORT) } THUMBNAIL_SELENIUM_USER = "******" THUMBNAIL_CACHE_CONFIG: CacheConfig = { 'CACHE_TYPE': 'redis', 'CACHE_DEFAULT_TIMEOUT': 24 * 60 * 60 * 30, 'CACHE_KEY_PREFIX': 'thumbnail_', 'CACHE_NO_NULL_WARNING': True, 'CACHE_REDIS_URL': 'redis://%s:%s/1' % (REDIS_HOST, REDIS_PORT) } SCREENSHOT_LOCATE_WAIT = 100 SCREENSHOT_LOAD_WAIT = 600 RESULTS_BACKEND = RedisCache(host=REDIS_HOST, port=REDIS_PORT, key_prefix='superset_results') class CeleryConfig(object): BROKER_URL = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) CELERY_IMPORTS = ( 'superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) CELERY_RESULT_BACKEND = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) CELERYD_PREFETCH_MULTIPLIER = 10 CELERY_ACKS_LATE = True CELERY_ANNOTATIONS = { 'sql_lab.get_sql_results': {
from cachelib import RedisCache MAPBOX_API_KEY = os.getenv('MAPBOX_API_KEY', '') CACHE_CONFIG = { 'CACHE_TYPE': 'redis', 'CACHE_DEFAULT_TIMEOUT': 300, 'CACHE_KEY_PREFIX': 'superset_', 'CACHE_REDIS_HOST': 'redis', 'CACHE_REDIS_PORT': 6379, 'CACHE_REDIS_DB': 1, 'CACHE_REDIS_URL': 'redis://redis:6379/1'} SQLALCHEMY_TRACK_MODIFICATIONS = True SECRET_KEY = 'thisISaSECRET_1234' class CeleryConfig(object): BROKER_URL = 'redis://redis:6379/0' CELERY_IMPORTS = ('superset.sql_lab', ) CELERY_RESULT_BACKEND = 'redis://redis:6379/0' CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}} CELERY_CONFIG = CeleryConfig RESULTS_BACKEND = RedisCache( host='redis', port=6379, key_prefix='superset_results' )
def create_cache_list(request, tmpdir): mc = MemcachedCache() mc._client.flush_all() rc = RedisCache(port=6360) rc._client.flushdb() request.cls.cache_list = [FileSystemCache(tmpdir), mc, rc, SimpleCache()]
import hashlib import logging import os from datetime import datetime from cachelib import RedisCache from flask import Flask from flask_wtf import CSRFProtect from werkzeug.security import safe_join from config import config csrf = CSRFProtect() cache = RedisCache(default_timeout=0, key_prefix='buzuki_cache_') class DoesNotExist(Exception): pass class InvalidNote(Exception): pass # Based on https://gist.github.com/mfenniak/2978805 # but we don't check the file modtime for performance. class FileHashFlask(Flask): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._hash_cache = {}
"CACHE_TYPE": "RedisCache", "CACHE_DEFAULT_TIMEOUT": 300, "CACHE_KEY_PREFIX": "superset_", "CACHE_REDIS_HOST": "redis", "CACHE_REDIS_PORT": 6379, "CACHE_REDIS_DB": 1, "CACHE_REDIS_URL": "redis://*****:*****@db:5432/superset" SQLALCHEMY_TRACK_MODIFICATIONS = True SECRET_KEY = "thisISaSECRET_1234" class CeleryConfig(object): BROKER_URL = "redis://redis:6379/0" CELERY_IMPORTS = ("superset.sql_lab", ) CELERY_RESULT_BACKEND = "redis://redis:6379/0" CELERY_ANNOTATIONS = {"tasks.add": {"rate_limit": "10/s"}} CELERY_CONFIG = CeleryConfig RESULTS_BACKEND = RedisCache(host="redis", port=6379, key_prefix="superset_results")
from flask_login import LoginManager from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from cachelib import RedisCache login = LoginManager() db = SQLAlchemy() migrate = Migrate() cache = RedisCache(host="redis")
import app.init as dbinit from werkzeug.utils import secure_filename from flask_wtf.csrf import CSRFProtect app = Flask(__name__, static_url_path="") app.secret_key = os.environ.get("SECRET_KEY") db = mariadb.connect(host="db", user="******", password=os.environ.get("MYSQL_ROOT_PASSWORD")) sql = db.cursor(buffered=True) dbinit.init() sql.execute("USE od") salt = gensalt(12) key = get_random_bytes(16) app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(minutes=60) cache = RedisCache(host='redis-cache', port=6379) ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'txt'} app.config['UPLOAD_FOLDER'] = 'app/files/' csrf = CSRFProtect(app) @app.route("/", methods=["GET"]) def index(): return render_template("home.html") @app.before_first_request def prepare(): save_user("user", "password", "*****@*****.**") save_user("admin", "admin", "*****@*****.**") save_user("ceo", "honeypot", "*****@*****.**")
import time import warnings warnings.simplefilter(action='ignore', category=FutureWarning) from cachelib import RedisCache from flask import Flask import pandas as pd from .model import build_model, best_hour app = Flask(__name__, static_url_path='', static_folder='static') cache = RedisCache('redis') model_identifier = 'emission-intensity-model' generating_identifier = 'emission-intensity-model-generating' forecast_identifier = 'emission-intensity-forecast' @app.route('/') def root(): return app.send_static_file('index.html') @app.route('/api/v1/current-emission-intensity') def current_emission_intensity(): wait_until_not_generating() model = cache.get(model_identifier) if model: return model