Exemple #1
0
 def __init__(self, app=None):
     """Initialize the cache."""
     super(ImageRedisCache, self).__init__(app=app)
     app = app or current_app
     redis_url = app.config['IIIF_CACHE_REDIS_URL']
     prefix = app.config.get('IIIF_CACHE_REDIS_PREFIX', 'iiif')
     self.cache = RedisCache(host=StrictRedis.from_url(redis_url),
                             key_prefix=prefix)
Exemple #2
0
class Cache:
    def __init__(self):
        host = getenv("REDIS_HOST")
        port = getenv("REDIS_PORT")
        self.redis = RedisCache(host=host, port=port)

    def get(self, key: str):
        return self.redis.get(key)

    def put(self, key: str, value, time: str) -> bool:
        self.redis.add(key, value, time)
        return True

    def forget(self, key: str) -> bool:
        return self.redis.delete(key)
def init_thumbnail_cache(app: Flask) -> RedisCache:
    return RedisCache(
        host=REDIS_HOST,
        port=REDIS_PORT,
        db=REDIS_CELERY_DB,
        key_prefix="superset_thumbnails_",
        default_timeout=10000,
    )
Exemple #4
0
    DATABASE_USER,
    DATABASE_PASSWORD,
    DATABASE_HOST,
    DATABASE_PORT,
    DATABASE_DB,
)

REDIS_HOST = get_env_variable('REDIS_HOST')
REDIS_PORT = get_env_variable('REDIS_PORT')
REDIS_CELERY_DB = get_env_variable('REDIS_CELERY_DB')
REDIS_RESULTS_DB = get_env_variable('REDIS_RESULTS_DB')

# From https://superset.apache.org/docs/installation/async-queries-celery
from cachelib.redis import RedisCache
RESULTS_BACKEND = RedisCache(host=REDIS_HOST,
                             port=REDIS_PORT,
                             key_prefix='superset_results')


class CeleryConfig(object):
    BROKER_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}'
    CELERY_IMPORTS = (
        'superset.sql_lab',
        'superset.tasks',
    )
    CELERY_RESULT_BACKEND = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}'
    CELERY_ANNOTATIONS = {
        'sql_lab.get_sql_results': {
            'rate_limit': '100/s',
        },
        'email_reports.send': {
Exemple #5
0
 def __init__(self):
     host = getenv("REDIS_HOST")
     port = getenv("REDIS_PORT")
     self.redis = RedisCache(host=host, port=port)
Exemple #6
0
class ImageRedisCache(ImageCache):
    """Redis image cache."""
    def __init__(self, app=None):
        """Initialize the cache."""
        super(ImageRedisCache, self).__init__(app=app)
        app = app or current_app
        redis_url = app.config['IIIF_CACHE_REDIS_URL']
        prefix = app.config.get('IIIF_CACHE_REDIS_PREFIX', 'iiif')
        self.cache = RedisCache(host=StrictRedis.from_url(redis_url),
                                key_prefix=prefix)

    def get(self, key):
        """Return the key value.

        :param key: the object's key
        :return: the stored object
        :rtype: `BytesIO` object
        """
        return self.cache.get(key)

    def set(self, key, value, timeout=None):
        """Cache the object.

        :param key: the object's key
        :param value: the stored object
        :type value: `BytesIO` object
        :param timeout: the cache timeout in seconds
        """
        timeout = timeout or self.timeout
        self.cache.set(key, value, timeout=timeout)
        self.set_last_modification(key, timeout=timeout)

    def get_last_modification(self, key):
        """Get last modification of cached file.

        :param key: the file object's key
        """
        return self.get(self._last_modification_key_name(key))

    def set_last_modification(self, key, last_modification=None, timeout=None):
        """Set last modification of cached file.

        :param key: the file object's key
        :param last_modification: Last modification date of
            file represented by the key
        :type last_modification: datetime.datetime
        :param timeout: the cache timeout in seconds
        """
        if not last_modification:
            last_modification = datetime.utcnow().replace(microsecond=0)
        timeout = timeout or self.timeout
        self.cache.set(self._last_modification_key_name(key),
                       last_modification, timeout)

    def delete(self, key):
        """Delete the specific key."""
        self.cache.delete(key)
        self.cache.delete(self._last_modification_key_name(key))

    def flush(self):
        """Flush the cache."""
        self.cache.clear()
Exemple #7
0
def init_thumbnail_cache(app: Flask) -> RedisCache:
    return RedisCache(host="localhost",
                      key_prefix="superset_thumbnails_",
                      default_timeout=10000)
Exemple #8
0
from cachelib.redis import RedisCache

MAPBOX_API_KEY = os.getenv('MAPBOX_API_KEY', '')
CACHE_CONFIG = {
    'CACHE_TYPE': 'redis',
    'CACHE_DEFAULT_TIMEOUT': 300,
    'CACHE_KEY_PREFIX': 'superset_',
    'CACHE_REDIS_HOST': 'redis',
    'CACHE_REDIS_PORT': 6379,
    'CACHE_REDIS_DB': 1,
    'CACHE_REDIS_URL': 'redis://*****:*****@postgres:5432/superset'
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = 'thisISaSECRET_1234'


class CeleryConfig(object):
    BROKER_URL = 'redis://redis:6379/0'
    CELERY_IMPORTS = ('superset.sql_lab', )
    CELERY_RESULT_BACKEND = 'redis://redis:6379/0'
    CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}}


CELERY_CONFIG = CeleryConfig
RESULTS_BACKEND = RedisCache(host='redis',
                             port=6379,
                             key_prefix='superset_results')
Exemple #9
0
#         return 'tmp_superset_schema'
#     if database.name == 'presto_gold':
#         return user.username
#     if database.name == 'analytics':
#         if 'analytics' in [r.name for r in user.roles]:
#             return 'analytics_cta'
#         else:
#             return f'tmp_{schema}'
# Function accepts database object, user object, schema name and sql that will be run.
SQLLAB_CTAS_SCHEMA_NAME_FUNC: Optional[Callable[
    ["Database", "models.User", str, str], str]] = None

# If enabled, it can be used to store the results of long-running queries
# in SQL Lab by using the "Run Async" button/feature
#RESULTS_BACKEND: Optional[BaseCache] = None
RESULTS_BACKEND: Optional[BaseCache] = RedisCache(
    host='redis', port=6379, key_prefix='superset_results')

# Use PyArrow and MessagePack for async query results serialization,
# rather than JSON. This feature requires additional testing from the
# community before it is fully adopted, so this config option is provided
# in order to disable should breaking issues be discovered.
RESULTS_BACKEND_USE_MSGPACK = True

# The S3 bucket where you want to store your external hive tables created
# from CSV files. For example, 'companyname-superset'
CSV_TO_HIVE_UPLOAD_S3_BUCKET = None

# The directory within the bucket specified above that will
# contain all the external tables
CSV_TO_HIVE_UPLOAD_DIRECTORY = "EXTERNAL_HIVE_TABLES/"
# Function that creates upload directory dynamically based on the