def prepare_response(self, request, cached): """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ # Special case the '*' Vary value as it means we cannot actually # determine if the cached response is suitable for this request. if "*" in cached.get("vary", {}): return # Ensure that the Vary headers for the cached response match our # request for header, value in cached.get("vary", {}).items(): if request.headers.get(header, None) != value: return body_raw = cached["response"].pop("body") headers = CaseInsensitiveDict(data=cached['response']['headers']) if headers.get('transfer-encoding', '') == 'chunked': headers.pop('transfer-encoding') cached['response']['headers'] = headers try: body = io.BytesIO(body_raw) except TypeError: # This can happen if cachecontrol serialized to v1 format (pickle) # using Python 2. A Python 2 str(byte string) will be unpickled as # a Python 3 str (unicode string), which will cause the above to # fail with: # # TypeError: 'str' does not support the buffer interface body = io.BytesIO(body_raw.encode('utf8')) return HTTPResponse( body=body, preload_content=False, **cached["response"] )
def cache_response(self, request, response, body=None, status_codes=None): """ Algorithm for caching requests. This assumes a requests Response object. """ # From httplib2: Don't cache 206's since we aren't going to # handle byte range requests cacheable_status_codes = status_codes or self.cacheable_status_codes if response.status not in cacheable_status_codes: logger.debug("Status code %s not in %s", response.status, cacheable_status_codes) return response_headers = CaseInsensitiveDict(response.headers) # If we've been given a body, our response has a Content-Length, that # Content-Length is valid then we can check to see if the body we've # been given matches the expected size, and if it doesn't we'll just # skip trying to cache it. if (body is not None and "content-length" in response_headers and response_headers["content-length"].isdigit() and int(response_headers["content-length"]) != len(body)): return cc_req = self.parse_cache_control(request.headers) cc = self.parse_cache_control(response_headers) cache_url = self.cache_url(request.url) logger.debug('Updating cache with response from "%s"', cache_url) # Delete it from the cache if we happen to have it stored there no_store = False if "no-store" in cc: no_store = True logger.debug('Response header has "no-store"') if "no-store" in cc_req: no_store = True logger.debug('Request header has "no-store"') if no_store and self.cache.get(cache_url): logger.debug('Purging existing cache entry to honor "no-store"') self.cache.delete(cache_url) if no_store: return # If we've been given an etag, then keep the response if self.cache_etags and "etag" in response_headers: logger.debug("Caching due to etag") self.cache.set(cache_url, self.serializer.dumps(request, response, body=body)) # Add to the cache any 301s. We do this before looking that # the Date headers. elif response.status == 301: logger.debug("Caching permanant redirect") self.cache.set(cache_url, self.serializer.dumps(request, response)) # Add to the cache if the response headers demand it. If there # is no date header then we can't do anything about expiring # the cache. elif "date" in response_headers: # cache when there is a max-age > 0 if "max-age" in cc and cc["max-age"] > 0: logger.debug("Caching b/c date exists and max-age > 0") self.cache.set( cache_url, self.serializer.dumps(request, response, body=body)) # If the request can expire, it means we should cache it # in the meantime. elif "expires" in response_headers: if response_headers["expires"]: logger.debug("Caching b/c of expires header") self.cache.set( cache_url, self.serializer.dumps(request, response, body=body))
def cached_request(self, request): """ Return a cached response if it exists in the cache, otherwise return False. """ cache_url = self.cache_url(request.url) logger.debug('Looking up "%s" in the cache', cache_url) cc = self.parse_cache_control(request.headers) # Bail out if the request insists on fresh data if "no-cache" in cc: logger.debug('Request header has "no-cache", cache bypassed') return False if "max-age" in cc and cc["max-age"] == 0: logger.debug('Request header has "max_age" as 0, cache bypassed') return False # Request allows serving from the cache, let's see if we find something cache_data = self.cache.get(cache_url) if cache_data is None: logger.debug("No cache entry available") return False # Check whether it can be deserialized resp = self.serializer.loads(request, cache_data) if not resp: logger.warning("Cache entry deserialization failed, entry ignored") return False # If we have a cached 301, return it immediately. We don't # need to test our response for other headers b/c it is # intrinsically "cacheable" as it is Permanent. # See: # https://tools.ietf.org/html/rfc7231#section-6.4.2 # # Client can try to refresh the value by repeating the request # with cache busting headers as usual (ie no-cache). if resp.status == 301: msg = ('Returning cached "301 Moved Permanently" response ' "(ignoring date and etag information)") logger.debug(msg) return resp headers = CaseInsensitiveDict(resp.headers) if not headers or "date" not in headers: if "etag" not in headers: # Without date or etag, the cached response can never be used # and should be deleted. logger.debug("Purging cached response: no date or etag") self.cache.delete(cache_url) logger.debug("Ignoring cached response: no date") return False now = time.time() date = calendar.timegm(parsedate_tz(headers["date"])) current_age = max(0, now - date) logger.debug("Current age based on date: %i", current_age) # TODO: There is an assumption that the result will be a # urllib3 response object. This may not be best since we # could probably avoid instantiating or constructing the # response until we know we need it. resp_cc = self.parse_cache_control(headers) # determine freshness freshness_lifetime = 0 # Check the max-age pragma in the cache control header if "max-age" in resp_cc: freshness_lifetime = resp_cc["max-age"] logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) # If there isn't a max-age, check for an expires header elif "expires" in headers: expires = parsedate_tz(headers["expires"]) if expires is not None: expire_time = calendar.timegm(expires) - date freshness_lifetime = max(0, expire_time) logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) # Determine if we are setting freshness limit in the # request. Note, this overrides what was in the response. if "max-age" in cc: freshness_lifetime = cc["max-age"] logger.debug("Freshness lifetime from request max-age: %i", freshness_lifetime) if "min-fresh" in cc: min_fresh = cc["min-fresh"] # adjust our current age by our min fresh current_age += min_fresh logger.debug("Adjusted current age from min-fresh: %i", current_age) # Return entry if it is fresh enough if freshness_lifetime > current_age: logger.debug('The response is "fresh", returning cached response') logger.debug("%i > %i", freshness_lifetime, current_age) return resp # we're not fresh. If we don't have an Etag, clear it out if "etag" not in headers: logger.debug( 'The cached response is "stale" with no etag, purging') self.cache.delete(cache_url) # return the original handler return False