def db_flush_cache(data_descriptor): cached_key = make_cached_key(data_descriptor.getSchema(), data_descriptor.getDataType(), data_descriptor.getKeys()) cache = None cache_time = get_cache_policy(data_descriptor.getSchema(), data_descriptor.getDataType()) if cache_time is None or cache_time < 0: cache_time = 0 cache = Cache_Service() if not cache_time == 0 else None try: if cache is not None and cache.exists(cached_key): cache.delete(cached_key) except Exception, e: import traceback logger.error("exception from redis:\n %s" % traceback.format_exc())
def db_flush_cache(data_descriptor): cached_key = make_cached_key(data_descriptor.getSchema(), data_descriptor.getDataType(), data_descriptor.getKeys()) cache = None cache_time = get_cache_policy(data_descriptor.getSchema(), data_descriptor.getDataType()) if cache_time is None or cache_time < 0: cache_time = 0 cache = Cache_Service() if not cache_time == 0 else None try: if cache is not None and cache.exists(cached_key): cache.delete(cached_key) except Exception, e: import traceback logger.error("exception from redis:\n %s"%traceback.format_exc())
def db_get(data_descriptor, session=None): cached_key = make_cached_key(data_descriptor.getSchema(), data_descriptor.getDataType(), data_descriptor.getKeys()) cache = None # cache_time = 0 measn the cache will not timeout cache_time = get_cache_policy(data_descriptor.getSchema(), data_descriptor.getDataType()) logger.debug ("configured cache_time is %s, cached_key is %s"%(cache_time, cached_key)) if cache_time < 0: cache_time = None cache = Cache_Service() if cache_time is not None else None try: if cache is not None and cache.exists(cached_key): return cache.get(cached_key) except Exception, e: import traceback logger.error("exception from redis:\n %s"%traceback.format_exc())
def db_get(data_descriptor, session=None): cached_key = make_cached_key(data_descriptor.getSchema(), data_descriptor.getDataType(), data_descriptor.getKeys()) cache = None # cache_time = 0 measn the cache will not timeout cache_time = get_cache_policy(data_descriptor.getSchema(), data_descriptor.getDataType()) logger.debug("configured cache_time is %s, cached_key is %s" % (cache_time, cached_key)) if cache_time < 0: cache_time = None cache = Cache_Service() if cache_time is not None else None try: if cache is not None and cache.exists(cached_key): return cache.get(cached_key) except Exception, e: import traceback logger.error("exception from redis:\n %s" % traceback.format_exc())
def db_mget(data_descriptor, session=None): '''to get multiple record , at the testing stage''' cache_time = get_cache_policy(data_descriptor.getSchema(), data_descriptor.getDataType()) if cache_time < 0: cache_time = None cache = Cache_Service() if cache_time is not None else None if cache is None and data_descriptor.isCacheOnly(): return None keys = data_descriptor.getmKey() if not isinstance(keys, (list, tuple)): return False cached_keys = [make_cached_key(data_descriptor.getSchema(), data_descriptor.getDataType(), k) \ for k in keys] collection = [] if cache is not None: collection = cache.mget(cached_keys) if data_descriptor.isCacheOnly(): return collection keys_index_not_cached = [i for i, item in enumerate(collection) \ if item is None] keys_to_db = [keys[i] for i in keys_index_not_cached] for i in keys_index_not_cached: data_desc = DataDesc(data_descriptor.getSchema(), data_descriptor.getDataType()) data_desc.setKey(i, keys[i]) ret = db_get(data_desc, session) collection[i] = ret return collection
def db_mget(data_descriptor, session=None): '''to get multiple record , at the testing stage''' cache_time = get_cache_policy(data_descriptor.getSchema(), data_descriptor.getDataType()) if cache_time < 0: cache_time = None cache = Cache_Service() if cache_time is not None else None if cache is None and data_descriptor.isCacheOnly(): return None keys = data_descriptor.getmKey() if not isinstance(keys, (list, tuple)): return False cached_keys = [make_cached_key(data_descriptor.getSchema(), data_descriptor.getDataType(), k) \ for k in keys] collection = [] if cache is not None: collection = cache.mget(cached_keys) if data_descriptor.isCacheOnly(): return collection keys_index_not_cached = [i for i, item in enumerate(collection) \ if item is None] keys_to_db = [keys[i] for i in keys_index_not_cached ] for i in keys_index_not_cached: data_desc = DataDesc(data_descriptor.getSchema(), data_descriptor.getDataType()) data_desc.setKey(i, keys[i]) ret = db_get(data_desc, session) collection[i] = ret return collection