def set_and_log_cache( cache_instance: Cache, cache_key: str, cache_value: Dict[str, Any], cache_timeout: Optional[int] = None, datasource_uid: Optional[str] = None, ) -> None: if isinstance(cache_instance.cache, NullCache): return timeout = (cache_timeout if cache_timeout is not None else app.config["CACHE_DEFAULT_TIMEOUT"]) try: dttm = datetime.utcnow().isoformat().split(".")[0] value = {**cache_value, "dttm": dttm} cache_instance.set(cache_key, value, timeout=timeout) stats_logger.incr("set_cache_key") if datasource_uid and config["STORE_CACHE_KEYS_IN_METADATA_DB"]: ck = CacheKey( cache_key=cache_key, cache_timeout=cache_timeout, datasource_uid=datasource_uid, ) db.session.add(ck) except Exception as ex: # pylint: disable=broad-except # cache.set call can fail if the backend is down or if # the key is too large or whatever other reasons logger.warning("Could not cache key %s", cache_key) logger.exception(ex)
def test_invalidate_existing_cache(invalidate): db.session.add(CacheKey(cache_key="cache_key", datasource_uid="3__table")) db.session.commit() cache_manager.cache.set("cache_key", "value") rv = invalidate({"datasource_uids": ["3__table"]}) assert rv.status_code == 201 assert cache_manager.cache.get("cache_key") == None assert (not db.session.query(CacheKey).filter( CacheKey.cache_key == "cache_key").first())
def test_invalidate_existing_caches(invalidate): schema = get_example_default_schema() or "" bn = SupersetTestCase.get_birth_names_dataset() db.session.add(CacheKey(cache_key="cache_key1", datasource_uid="3__druid")) db.session.add(CacheKey(cache_key="cache_key2", datasource_uid="3__druid")) db.session.add( CacheKey(cache_key="cache_key4", datasource_uid=f"{bn.id}__table")) db.session.add(CacheKey(cache_key="cache_keyX", datasource_uid="X__table")) db.session.commit() cache_manager.cache.set("cache_key1", "value") cache_manager.cache.set("cache_key2", "value") cache_manager.cache.set("cache_key4", "value") cache_manager.cache.set("cache_keyX", "value") rv = invalidate( { "datasource_uids": ["3__druid", "4__druid"], "datasources": [ { "datasource_name": "birth_names", "database_name": "examples", "schema": schema, "datasource_type": "table", }, { # table exists, no cache to invalidate "datasource_name": "energy_usage", "database_name": "examples", "schema": schema, "datasource_type": "table", }, { # table doesn't exist "datasource_name": "does_not_exist", "database_name": "examples", "schema": schema, "datasource_type": "table", }, { # database doesn't exist "datasource_name": "birth_names", "database_name": "does_not_exist", "schema": schema, "datasource_type": "table", }, { # database doesn't exist "datasource_name": "birth_names", "database_name": "examples", "schema": "does_not_exist", "datasource_type": "table", }, ], } ) assert rv.status_code == 201 assert cache_manager.cache.get("cache_key1") is None assert cache_manager.cache.get("cache_key2") is None assert cache_manager.cache.get("cache_key4") is None assert cache_manager.cache.get("cache_keyX") == "value" assert (not db.session.query(CacheKey).filter( CacheKey.cache_key.in_({"cache_key1", "cache_key2", "cache_key4" })).first()) assert (db.session.query(CacheKey).filter( CacheKey.cache_key == "cache_keyX").first().datasource_uid == "X__table")