def wrapper(func, *args, **kwargs): """Decorator wrapper""" pylons = get_pylons(args) log.debug("Wrapped with key: %s, expire: %s, type: %s, query_args: %s", key, expire, type, query_args) enabled = pylons.config.get("cache_enabled", "True") if not asbool(enabled): log.debug("Caching disabled, skipping cache lookup") return func(*args, **kwargs) if key: key_dict = kwargs.copy() key_dict.update(_make_dict_from_args(func, args)) ## FIXME: if we can stop there variables from being passed to the # controller action (also the Genshi Markup/pickle problem is # fixed, see below) then we can use the stock beaker_cache. # Remove some system variables that can cause issues while generating cache keys [key_dict.pop(x, None) for x in ("pylons", "start_response", "environ")] if query_args: key_dict.update(pylons.request.GET.mixed()) if key != "cache_default": if isinstance(key, list): key_dict = dict((k, key_dict[k]) for k in key) else: key_dict = {key: key_dict[key]} else: key_dict = None self = None if args: self = args[0] namespace, cache_key = create_cache_key(func, key_dict, self) if type: b_kwargs['type'] = type cache_obj = getattr(pylons.app_globals, 'cache', None) if not cache_obj: cache_obj = getattr(pylons, 'cache', None) if not cache_obj: raise Exception('No cache object found') my_cache = cache_obj.get_cache(namespace, **b_kwargs) if expire == "never": cache_expire = None else: cache_expire = expire def create_func(): log.debug("Creating new cache copy with key: %s, type: %s", cache_key, type) result = func(*args, **kwargs) # This is one of the two changes to the stock beaker_cache # decorator if hasattr(result, '__html__'): # Genshi Markup object, can not be pickled result = unicode(result.__html__()) glob_response = pylons.response headers = glob_response.headerlist status = glob_response.status full_response = dict(headers=headers, status=status, cookies=None, content=result) return full_response response = my_cache.get_value(cache_key, createfunc=create_func, expiretime=cache_expire, starttime=starttime) if cache_response: glob_response = pylons.response glob_response.headerlist = [header for header in response['headers'] if header[0].lower() in cache_headers] glob_response.status = response['status'] return response['content']
def wrapper(func, *args, **kwargs): pylons = get_pylons(args) if not cache_enabled: log.debug("Caching disabled, skipping cache lookup") return func(*args, **kwargs) cfg_expires = "%s.expires" % _func_cname(func) # this section copies entirely too much from beaker cache if key: if query_args: key_dict = pylons.request.GET.mixed() else: key_dict = kwargs.copy() # beaker only does this if !query_args, we do it in # all cases to support both query args and method args # in the controller key_dict.update(_make_dict_from_args(func, args)) if key != "cache_default": if isinstance(key, list): key_dict = dict((k, key_dict[k]) for k in key) else: key_dict = {key: key_dict[key]} else: key_dict = None self = None if args: self = args[0] namespace, cache_key = create_cache_key(func, key_dict, self) if type: cache_kwargs["type"] = type my_cache = pylons.cache.get_cache(namespace, **cache_kwargs) ## end copy from beaker_cache last_modified = test(*args, **kwargs) cache_miss = list() def render(): log.debug("Creating new cache copy with key: %s, type: %s", cache_key, type) result = func(*args, **kwargs) glob_response = pylons.response headers = dict(glob_response.headerlist) status = glob_response.status full_response = dict(headers=headers, status=status, cookies=None, content=result, timestamp=last_modified) cache_miss.append(True) return full_response response = my_cache.get_value(cache_key, createfunc=render) timestamp = response["timestamp"] if timestamp < last_modified: my_cache.remove(cache_key) response = my_cache.get_value(cache_key, createfunc=render) glob_response = pylons.response if response["status"][0] in ("4", "5"): # do not cache 4XX, 5XX my_cache.remove(cache_key) else: headers = dict(glob_response.headerlist) headers.update(header for header in response["headers"].items() if header[0].lower() in cache_headers) headers["Last-Modified"] = strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime(last_modified)) headers["ETag"] = str(last_modified) if cache_miss: headers["X-CKAN-Cache"] = "MISS" else: headers["X-CKAN-Cache"] = "HIT" if expires: if "Pragma" in headers: del headers["Pragma"] if "Cache-Control" in headers: del headers["Cache-Control"] else: headers["Pragma"] = "no-cache" headers["Cache-Control"] = "no-cache" glob_response.headerlist = headers.items() if expires: glob_response.cache_expires(seconds=expires) cc = glob_response.headers["Cache-Control"] glob_response.headers["Cache-Control"] = "%s, must-revalidate" % cc glob_response.status = response['status'] return response["content"]
def wrapper(func, *args, **kwargs): """Decorator wrapper""" pylons = get_pylons(args) log.debug("Wrapped with key: %s, expire: %s, type: %s, query_args: %s", key, expire, type, query_args) enabled = pylons.config.get("cache_enabled", "True") if not asbool(enabled): log.debug("Caching disabled, skipping cache lookup") return func(*args, **kwargs) if key: key_dict = kwargs.copy() key_dict.update(_make_dict_from_args(func, args)) ## FIXME: if we can stop there variables from being passed to the controller # action then we can use the stock beaker_cache. # Remove some system variables that can cause issues while generating cache keys [ key_dict.pop(x, None) for x in ("pylons", "start_response", "environ") ] if query_args: key_dict.update(pylons.request.GET.mixed()) if key != "cache_default": if isinstance(key, list): key_dict = dict((k, key_dict[k]) for k in key) else: key_dict = {key: key_dict[key]} else: key_dict = None self = None if args: self = args[0] namespace, cache_key = create_cache_key(func, key_dict, self) if type: b_kwargs['type'] = type cache_obj = getattr(pylons.app_globals, 'cache', None) if not cache_obj: cache_obj = getattr(pylons, 'cache', None) if not cache_obj: raise Exception('No cache object found') my_cache = cache_obj.get_cache(namespace, **b_kwargs) if expire == "never": cache_expire = None else: cache_expire = expire def create_func(): log.debug("Creating new cache copy with key: %s, type: %s", cache_key, type) result = func(*args, **kwargs) glob_response = pylons.response headers = glob_response.headerlist status = glob_response.status full_response = dict(headers=headers, status=status, cookies=None, content=result) return full_response response = my_cache.get_value(cache_key, createfunc=create_func, expiretime=cache_expire, starttime=starttime) if cache_response: glob_response = pylons.response glob_response.headerlist = [ header for header in response['headers'] if header[0].lower() in cache_headers ] glob_response.status = response['status'] return response['content']
def wrapper(func, *args, **kwargs): pylons = get_pylons(args) if not cache_enabled: log.debug("Caching disabled, skipping cache lookup") return func(*args, **kwargs) cfg_expires = "%s.expires" % _func_cname(func) # this section copies entirely too much from beaker cache if key: if query_args: key_dict = pylons.request.GET.mixed() else: key_dict = kwargs.copy() # beaker only does this if !query_args, we do it in # all cases to support both query args and method args # in the controller key_dict.update(_make_dict_from_args(func, args)) if key != "cache_default": if isinstance(key, list): key_dict = dict((k, key_dict[k]) for k in key) else: key_dict = {key: key_dict[key]} else: key_dict = None self = None if args: self = args[0] namespace, cache_key = create_cache_key(func, key_dict, self) if type: cache_kwargs["type"] = type my_cache = pylons.cache.get_cache(namespace, **cache_kwargs) ## end copy from beaker_cache last_modified = test(*args, **kwargs) cache_miss = list() def render(): log.debug("Creating new cache copy with key: %s, type: %s", cache_key, type) result = func(*args, **kwargs) glob_response = pylons.response headers = dict(glob_response.headerlist) status = glob_response.status full_response = dict(headers=headers, status=status, cookies=None, content=result, timestamp=last_modified) cache_miss.append(True) return full_response response = my_cache.get_value(cache_key, createfunc=render) timestamp = response["timestamp"] if timestamp < last_modified: my_cache.remove(cache_key) response = my_cache.get_value(cache_key, createfunc=render) glob_response = pylons.response if response["status"][0] in ("4", "5"): # do not cache 4XX, 5XX my_cache.remove(cache_key) else: headers = dict(glob_response.headerlist) headers.update(header for header in response["headers"].items() if header[0].lower() in cache_headers) headers["Last-Modified"] = strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime(last_modified)) headers["ETag"] = str(last_modified) if cache_miss: headers["X-CKAN-Cache"] = "MISS" else: headers["X-CKAN-Cache"] = "HIT" if expires: if "Pragma" in headers: del headers["Pragma"] if "Cache-Control" in headers: del headers["Cache-Control"] else: headers["Pragma"] = "no-cache" headers["Cache-Control"] = "no-cache" glob_response.headerlist = headers.items() if expires: glob_response.cache_expires(seconds=expires) cc = glob_response.headers["Cache-Control"] glob_response.headers[ "Cache-Control"] = "%s, must-revalidate" % cc glob_response.status = response['status'] return response["content"]