def serve_image(request, leecher=None, category=None, image=None): """Serve the images""" category = Category.query.filter(or_(Category.name==category, Category.secret==category)).first() filename, extension = splitext(image) if not extension: loaded = Image.query.get(image) else: loaded = Image.query.filter( or_(and_(Image.filename==filename+extension, Image.category==category), and_(Image.filename==filename[:-len('.thumbnail')]+extension, Image.category==category), and_(Image.filename==filename[:-len('.resized')]+extension, Image.category==category))).first() if not loaded: raise NotFound("Requested image was not found") if not request.user.is_admin: if loaded.abuse and loaded.abuse.confirmed: raise ImageAbuseConfirmed elif loaded.abuse: raise ImageAbuseReported if not request.user.show_adult_content and loaded.adult_content: raise AdultContentException content_type = loaded.mimetype picture_path = getattr(loaded, "%s_path" % request.endpoint) picture = open(picture_path, 'rb') size = getsize(picture_path) # This image won't change, allow caching it for a year expiry = loaded.stamp + timedelta(days=365) headers = [ # If the image is private, don't allow cache systems to cache it # only the requesting user can cache it ('Cache-Control', loaded.private and 'private' or 'public'), # The rest of the headers ('Content-Length', str(size)), ('Expires', expiry.strftime("%a %b %d %H:%M:%S %Y")), ('ETag', loaded.etag) ] if request.if_none_match.contains(loaded.etag): remove_entity_headers(headers) return Response('', 304, headers=headers) def stream(): try: while True: data = picture.read(2048) if not data: break yield data finally: picture.close() return Response(stream(), content_type=content_type, headers=headers)
def serve_path(file): site = current_site while site: try: sf = StaticFile.q.get_by(site=site, path=file) except NoData: site = site.parent if not site: raise else: break if parse_etags(request.environ.get('HTTP_IF_NONE_MATCH')).contains( sf.hash): r = Response("", mimetype=str(sf.mimetype)) r.status_code = 304 remove_entity_headers(r.headers) else: r = Response(sf.content, mimetype=str(sf.mimetype)) r.set_etag(sf.hash) r.headers[b'Cache-Control'] = 'public' r.headers[b'Expiry'] = http_date( datetime.utcnow() + timedelta(0, current_app.config.STATIC_EXPIRE)) r.headers[b'Last-Modified'] = http_date(sf.modified) return r
def test_remove_entity_headers(self): now = http.http_date() headers1 = [('Date', now), ('Content-Type', 'text/html'), ('Content-Length', '0')] headers2 = datastructures.Headers(headers1) http.remove_entity_headers(headers1) assert headers1 == [('Date', now)] http.remove_entity_headers(headers2) self.assert_equal(headers2, datastructures.Headers([(u'Date', now)]))
def test_remove_entity_headers(self): now = http.http_date() headers1 = [('Date', now), ('Content-Type', 'text/html'), ('Content-Length', '0')] headers2 = datastructures.Headers(headers1) http.remove_entity_headers(headers1) assert headers1 == [('Date', now)] http.remove_entity_headers(headers2) assert headers2 == datastructures.Headers([(u'Date', now)])
def test_remove_entity_headers(self): now = http.http_date() headers1 = [("Date", now), ("Content-Type", "text/html"), ("Content-Length", "0")] headers2 = datastructures.Headers(headers1) http.remove_entity_headers(headers1) assert headers1 == [("Date", now)] http.remove_entity_headers(headers2) self.assert_equal(headers2, datastructures.Headers([(u"Date", now)]))
def test_remove_entity_headers(self): now = http.http_date() headers1 = [ ("Date", now), ("Content-Type", "text/html"), ("Content-Length", "0"), ] headers2 = datastructures.Headers(headers1) http.remove_entity_headers(headers1) assert headers1 == [("Date", now)] http.remove_entity_headers(headers2) assert headers2 == datastructures.Headers([("Date", now)])
def fix_headers(self, environ): """This is automatically called right before the response is started and should fix common mistakes in headers. For example location headers are joined with the root URL here. """ if 'Location' in self.headers: self.headers['Location'] = urlparse.urljoin( get_current_url(environ, root_only=True), self.headers['Location'] ) if 100 <= self.status_code < 200 or self.status_code == 204: self.headers['Content-Length'] = 0 elif self.status_code == 304: remove_entity_headers(self.headers)
def download(request,oid,name=None): obj = Object.by_oid(oid) r = Response(obj.content, mimetype=obj.mimetype) if name: n = obj.name if obj.mime.ext: n += "."+obj.mime.ext assert n == name if parse_etags(request.environ.get('HTTP_IF_NONE_MATCH')).contains(obj.hash): r = Response("", mimetype=obj.mimetype) r.status_code = 304 remove_entity_headers(r.headers) else: r = Response(obj.content, mimetype=obj.mimetype) r.set_etag(obj.hash) r.headers['Cache-Control']='public' r.headers['Expiry']=http_date(datetime.utcnow()+timedelta(999)) r.headers['Last-Modified']=http_date(obj.timestamp) return r
def download(request, oid, name=None): obj = Object.by_oid(oid) r = Response(obj.content, mimetype=obj.mimetype) if name: n = obj.name if obj.mime.ext: n += "." + obj.mime.ext assert n == name if parse_etags(request.environ.get('HTTP_IF_NONE_MATCH')).contains( obj.hash): r = Response("", mimetype=obj.mimetype) r.status_code = 304 remove_entity_headers(r.headers) else: r = Response(obj.content, mimetype=obj.mimetype) r.set_etag(obj.hash) r.headers['Cache-Control'] = 'public' r.headers['Expiry'] = http_date(datetime.utcnow() + timedelta(999)) r.headers['Last-Modified'] = http_date(obj.timestamp) return r
def serve_path(file): site = current_site while site: try: sf = StaticFile.q.get_by(site=site, path=file) except NoData: site = site.parent if not site: raise else: break if parse_etags(request.environ.get('HTTP_IF_NONE_MATCH')).contains(sf.hash): r = Response("", mimetype=str(sf.mimetype)) r.status_code = 304 remove_entity_headers(r.headers) else: r = Response(sf.content, mimetype=str(sf.mimetype)) r.set_etag(sf.hash) r.headers[b'Cache-Control']='public' r.headers[b'Expiry']=http_date(datetime.utcnow()+timedelta(0,current_app.config.STATIC_EXPIRE)) r.headers[b'Last-Modified']=http_date(sf.modified) return r
def get_wsgi_headers(self, environ): headers = Headers(self.headers) location = headers.get('location') if location is not None: if isinstance(location, unicode): location = iri_to_uri(location) headers['Location'] = urlparse.urljoin(get_current_url(environ, root_only=True), location) content_location = headers.get('content-location') if content_location is not None and isinstance(content_location, unicode): headers['Content-Location'] = iri_to_uri(content_location) if 100 <= self.status_code < 200 or self.status_code == 204: headers['Content-Length'] = '0' elif self.status_code == 304: remove_entity_headers(headers) if self.is_sequence and 'content-length' not in self.headers: try: content_length = sum((len(str(x)) for x in self.response)) except UnicodeError: pass else: headers['Content-Length'] = str(content_length) return headers