def _set_run_state(run_id, state): logger.debug(f"Setting run state to '{state.name}'", run_id=run_id) r = connect_to_redis() key = _get_run_hash_key(run_id) res = r.hset(key, 'state', state.value) r.expire(key, config.CACHE_EXPIRY) return res
def get_deserialized_filter(dp_id): """Cached, deserialized version. """ logger.debug("Getting filters") key = 'clk-pkl-{}'.format(dp_id) r = connect_to_redis(read_only=True) # Check if this dp_id is already saved in redis? if r.exists(key): logger.debug("returning filters from cache") return pickle.loads(r.get(key)) else: logger.debug("Looking up popcounts and filename from database") with DBConn() as db: serialized_filters_file, encoding_size = get_filter_metadata( db, dp_id) mc = connect_to_object_store() logger.debug("Getting filters from object store") # Note this uses already calculated popcounts unlike # serialization.deserialize_filters() raw_data_response = mc.get_object(config.MINIO_BUCKET, serialized_filters_file) python_filters = binary_unpack_filters( raw_data_response.stream(encoding_size)) set_deserialized_filter(dp_id, python_filters) return python_filters
def get_total_number_of_comparisons(project_id): r = connect_to_redis(read_only=True) key = _get_project_hash_key(project_id) res = r.hget(key, 'total_comparisons') # hget returns None if missing key/name, and bytes if present if res: return _convert_redis_result_to_int(res) else: # Calculate the number of comparisons with db.DBConn() as conn: total_comparisons = db.get_total_comparisons_for_project( conn, project_id) # get a writable connection to redis r = connect_to_redis() res = r.hset(key, 'total_comparisons', total_comparisons) r.expire(key, 60 * 60) return total_comparisons
def get_status(): r = connect_to_redis(read_only=True) key = 'entityservice-status' if r.exists(key): logger.debug("returning status from cache") return pickle.loads(r.get(key)) else: return None
def _get_run_state(run_id): r = connect_to_redis(read_only=True) key = _get_run_hash_key(run_id) # hget returns None if missing key/name, and bytes if present maybe_state = r.hget(key, 'state') logger.debug("Loaded run state from cache", run_id=run_id, state=maybe_state) return RunState(maybe_state)
def set_deserialized_filter(dp_id, python_filters): if len(python_filters) <= config.MAX_CACHE_SIZE: logger.debug("Pickling filters and storing in redis") key = 'clk-pkl-{}'.format(dp_id) pickled_filters = pickle.dumps(python_filters) r = connect_to_redis() r.set(key, pickled_filters) else: logger.info("Skipping storing filters in redis cache due to size")
def save_current_progress(comparisons, run_id, config=None): if config is None: config = globalconfig logger.debug(f"Updating progress. Compared {comparisons} CLKS", run_id=run_id) if comparisons > 0: r = connect_to_redis() key = _get_run_hash_key(run_id) r.hincrby(key, 'progress', comparisons) r.expire(key, config.CACHE_EXPIRY)
def clear_run_state(run_id): r = connect_to_redis() key = _get_run_hash_key(run_id) r.hdel(key, 'state')
def remove_from_cache(dp_id): logger.debug("Deleting CLKS for DP {} from redis cache".format(dp_id)) r = connect_to_redis() key = 'clk-pkl-{}'.format(dp_id) r.delete(key)
def clear_progress(run_id): r = connect_to_redis() key = _get_run_hash_key(run_id) r.hdel(key, 'progress')
def get_progress(run_id): r = connect_to_redis(read_only=True) key = _get_run_hash_key(run_id) res = r.hget(key, 'progress') return _convert_redis_result_to_int(res)
def set_status(status): logger.debug("Saving the service status to redis cache") r = connect_to_redis() r.setex('entityservice-status', 30, pickle.dumps(status))