def finish_response(self, app_iter): if not self.scale: self.start_response(self.status, self.headers) self.output = app_iter return CONTENT_LENGTH.delete(self.headers) inBuffer = StringIO() try: for s in app_iter: self.logger.debug("Writing %d bytes into image buffer." % len(s)) inBuffer.write(s) finally: if hasattr(app_iter, 'close'): app_iter.close() rawImg = Blob() rawImg.data = inBuffer.getvalue() inBuffer.close() image = Image(rawImg) self.logger.debug("Scaling image to %s" % self.scaled_size) image.scale(self.scaled_size[0]) image.write(self.outbuffer) content_length = self.outbuffer.length() CONTENT_LENGTH.update(self.headers, content_length) CACHE_CONTROL.update(self.headers, s_maxage=CACHE_CONTROL.ONE_HOUR) self.start_response(self.status, self.headers)
def __call__(self, environ, start_response): response = self.application(environ, start_response) if 'no-cache' not in CACHE_CONTROL(environ): CACHE_CONTROL.apply(start_response.__self__.wsgi_curr_headers[1], public=True, max_age=self.max_age, s_maxage=self.s_maxage) return response
def download_package(context, request): """Download package, or redirect to the download link""" package = request.db.fetch(context.filename) if not package: if request.registry.fallback != "cache": return HTTPNotFound() if not request.access.can_update_cache(): return request.forbid() # If we are caching pypi, download the package from pypi and save it releases = request.locator.get_releases(context.name) dist = None for release in releases: if posixpath.basename(release["url"]) == context.filename: dist = release break if dist is None: return HTTPNotFound() LOG.info("Caching %s from %s", context.filename, request.fallback_simple) package, data = fetch_dist( request, dist["url"], dist["name"], dist["version"], dist["summary"], dist["requires_python"], ) disp = CONTENT_DISPOSITION.tuples(filename=package.filename) request.response.headers.update(disp) cache_control = CACHE_CONTROL.tuples( public=True, max_age=request.registry.package_max_age ) request.response.headers.update(cache_control) request.response.body = data request.response.content_type = "application/octet-stream" return request.response if request.registry.stream_files: with request.db.storage.open(package) as data: request.response.body = data.read() disp = CONTENT_DISPOSITION.tuples(filename=package.filename) request.response.headers.update(disp) cache = CACHE_CONTROL.tuples( public=True, max_age=request.registry.package_max_age ) request.response.headers.update(cache) request.response.content_type = "application/octet-stream" return request.response response = request.db.download_response(package) return response
def __init__(self, location, headers=None): if headers is None: headers = [] elif not isinstance(headers, list): # Les en-têtes ont été créés à l'aide de WebOb. # On supprime la taille du corps de la page car elle valait # zéro (il s'agissait d'une redirection), mais ce n'est plus # le cas à présent (et Paste vérifie cette valeur). headers.pop('Content-Length', None) # Conversion du format de WebOb vers le format de Paste. headers = headers.items() # Supprime la redirection. remove_header(headers, 'location') # Empêche la mise en cache de la page intermédiaire. CACHE_CONTROL.apply(headers, no_cache=True, no_store=True) super(HTTPFoundGrabFragment, self).__init__(location, headers)
def cache_forever(): headers = [(k, v) for k, v in response.headers.items() if k.lower() not in ("pragma", "cache-control")] delta = CACHE_CONTROL.apply(headers, public=True, max_age=60 * 60 * 24 * 365) EXPIRES.update(headers, delta=delta) response.headers.pop("cache-control", None) response.headers.pop("pragma", None) response.headers.update(headers)
def download_package(context, request): """ Download package, or redirect to the download link """ package = request.db.fetch(context.filename) if not package: if request.registry.fallback != "cache": return HTTPNotFound() if not request.access.can_update_cache(): return request.forbid() # If we are caching pypi, download the package from pypi and save it dists = request.locator.get_project(context.name) dist = None source_url = None for version, url_set in six.iteritems(dists.get("urls", {})): if dist is not None: break for url in url_set: if posixpath.basename(url) == context.filename: source_url = url dist = dists[version] break if dist is None: return HTTPNotFound() LOG.info("Caching %s from %s", context.filename, request.fallback_simple) package, data = fetch_dist(request, dist.name, source_url) disp = CONTENT_DISPOSITION.tuples(filename=package.filename) request.response.headers.update(disp) cache_control = CACHE_CONTROL.tuples( public=True, max_age=request.registry.package_max_age ) request.response.headers.update(cache_control) request.response.body = data request.response.content_type = "application/octet-stream" return request.response if request.registry.stream_files: with request.db.storage.open(package) as data: request.response.body = data.read() disp = CONTENT_DISPOSITION.tuples(filename=package.filename) request.response.headers.update(disp) cache = CACHE_CONTROL.tuples( public=True, max_age=request.registry.package_max_age ) request.response.headers.update(cache) request.response.content_type = "application/octect-stream" return request.response response = request.db.download_response(package) return response
def download_package(context, request): """ Download package, or redirect to the download link """ package = request.db.fetch(context.filename) if not package: if request.registry.fallback != "cache": return HTTPNotFound() if not request.access.can_update_cache(): return request.forbid() # If we are caching pypi, download the package from pypi and save it dists = request.locator.get_project(context.name) dist = None source_url = None for version, url_set in six.iteritems(dists.get("urls", {})): if dist is not None: break for url in url_set: if posixpath.basename(url) == context.filename: source_url = url dist = dists[version] break if dist is None: return HTTPNotFound() LOG.info("Caching %s from %s", context.filename, request.fallback_simple) package, data = fetch_dist(request, dist.name, source_url) disp = CONTENT_DISPOSITION.tuples(filename=package.filename) request.response.headers.update(disp) cache_control = CACHE_CONTROL.tuples( public=True, max_age=request.registry.package_max_age) request.response.headers.update(cache_control) request.response.body = data request.response.content_type = "application/octet-stream" return request.response if request.registry.stream_files: with request.db.storage.open(package) as data: request.response.body = data.read() disp = CONTENT_DISPOSITION.tuples(filename=package.filename) request.response.headers.update(disp) cache = CACHE_CONTROL.tuples(public=True, max_age=request.registry.package_max_age) request.response.headers.update(cache) request.response.content_type = "application/octect-stream" return request.response response = request.db.download_response(package) return response
def cache_forever(): headers = [(k, v) for k, v in response.headers.items() if k.lower() not in ('pragma', 'cache-control')] delta = CACHE_CONTROL.apply(headers, public=True, max_age=60 * 60 * 24 * 365) EXPIRES.update(headers, delta=delta) response.headers.pop('cache-control', None) response.headers.pop('pragma', None) response.headers.update(headers)
def local_start_response(stat_str, headers=[]): CACHE_CONTROL.apply(headers, public=True, max_age=self.max_age, s_maxage=self.s_maxage) return start_response(stat_str, headers)
def get(self, environ, start_response): if 'max-age=0' in CACHE_CONTROL(environ).lower(): self.update(force=True) # RFC 2616 13.2.6 else: self.update() return DataApp.get(self, environ, start_response)