def test_is_resource_modified_for_range_requests(self): env = create_environ() env["HTTP_IF_MODIFIED_SINCE"] = http.http_date(datetime(2008, 1, 1, 12, 30)) env["HTTP_IF_RANGE"] = http.generate_etag(b"awesome_if_range") # Range header not present, so If-Range should be ignored assert not http.is_resource_modified( env, data=b"not_the_same", ignore_if_range=False, last_modified=datetime(2008, 1, 1, 12, 30), ) env["HTTP_RANGE"] = "" assert not http.is_resource_modified( env, data=b"awesome_if_range", ignore_if_range=False ) assert http.is_resource_modified( env, data=b"not_the_same", ignore_if_range=False ) env["HTTP_IF_RANGE"] = http.http_date(datetime(2008, 1, 1, 13, 30)) assert http.is_resource_modified( env, last_modified=datetime(2008, 1, 1, 14, 00), ignore_if_range=False ) assert not http.is_resource_modified( env, last_modified=datetime(2008, 1, 1, 13, 30), ignore_if_range=False ) assert http.is_resource_modified( env, last_modified=datetime(2008, 1, 1, 13, 30), ignore_if_range=True )
def test_send_file_range_request(self, app, client): @app.route("/") def index(): return flask.send_file("static/index.html", conditional=True) rv = client.get("/", headers={"Range": "bytes=4-15"}) assert rv.status_code == 206 with app.open_resource("static/index.html") as f: assert rv.data == f.read()[4:16] rv.close() rv = client.get("/", headers={"Range": "bytes=4-"}) assert rv.status_code == 206 with app.open_resource("static/index.html") as f: assert rv.data == f.read()[4:] rv.close() rv = client.get("/", headers={"Range": "bytes=4-1000"}) assert rv.status_code == 206 with app.open_resource("static/index.html") as f: assert rv.data == f.read()[4:] rv.close() rv = client.get("/", headers={"Range": "bytes=-10"}) assert rv.status_code == 206 with app.open_resource("static/index.html") as f: assert rv.data == f.read()[-10:] rv.close() rv = client.get("/", headers={"Range": "bytes=1000-"}) assert rv.status_code == 416 rv.close() rv = client.get("/", headers={"Range": "bytes=-"}) assert rv.status_code == 416 rv.close() rv = client.get("/", headers={"Range": "somethingsomething"}) assert rv.status_code == 416 rv.close() last_modified = datetime.datetime.utcfromtimestamp( os.path.getmtime(os.path.join(app.root_path, "static/index.html")) ).replace(microsecond=0) rv = client.get( "/", headers={"Range": "bytes=4-15", "If-Range": http_date(last_modified)} ) assert rv.status_code == 206 rv.close() rv = client.get( "/", headers={ "Range": "bytes=4-15", "If-Range": http_date(datetime.datetime(1999, 1, 1)), }, ) assert rv.status_code == 200 rv.close()
def response_formated_records(records, collection, of, **kwargs): """Return formatter records. Response contains correct Cache and TTL information in HTTP headers. """ response = make_response(format_records(records, collection=collection, of=of, **kwargs)) response.mimetype = get_output_format_content_type(of) current_time = datetime.datetime.now() response.headers['Last-Modified'] = http_date( time.mktime(current_time.timetuple()) ) expires = current_app.config.get( 'CFG_WEBSEARCH_SEARCH_CACHE_TIMEOUT', None) if expires is None: response.headers['Cache-Control'] = ( 'no-store, no-cache, must-revalidate, ' 'post-check=0, pre-check=0, max-age=0' ) response.headers['Expires'] = '-1' else: expires_time = current_time + datetime.timedelta(seconds=expires) response.headers['Vary'] = 'Accept' response.headers['Cache-Control'] = ( 'public' if current_user.is_guest else 'private' ) response.headers['Expires'] = http_date(time.mktime( expires_time.timetuple() )) return response
def default(self, o): """Implement this method in a subclass such that it returns a serializable object for ``o``, or calls the base implementation (to raise a :exc:`TypeError`). For example, to support arbitrary iterators, you could implement default like this:: def default(self, o): try: iterable = iter(o) except TypeError: pass else: return list(iterable) return JSONEncoder.default(self, o) """ if isinstance(o, datetime): return http_date(o.utctimetuple()) if isinstance(o, date): return http_date(o.timetuple()) if isinstance(o, uuid.UUID): return str(o) if hasattr(o, '__html__'): return text_type(o.__html__()) return _json.JSONEncoder.default(self, o)
def timemap_link_response(app, mementos, uri_r): """Return a 200 TimeMap response. :param mementos: A sorted (ascending by date) list of (uri_str, datetime_obj) tuples representing a TimeMap. :param uri_r: The URI-R of the original resource. :return: The ``Response`` object. """ assert len(mementos) >= 1 # Adds Original, TimeGate and TimeMap links original_link = Link(uri_r, rel='original') timegate_link = Link( url_for('timegate', dict(uri_r=uri_r), force_external=True), rel='timegate', ) link_self = Link( url_for('timemap', dict( response_type='link', uri_r=uri_r ), force_external=True), rel='self', type='application/link-format', ) json_self = Link( url_for('timemap', dict( response_type='json', uri_r=uri_r ), force_external=True), rel='timemap', type='application/json', ) # Sets up first and last relations if len(mementos) == 1: mementos_links = [Link(mementos[0][0], rel='first last memento', datetime=http_date(mementos[0][1]))] else: # Browse through Mementos to generate the TimeMap links list mementos_links = [ Link(mementos[0][0], rel='first memento', datetime=http_date(mementos[0][1])) ] + [ Link(uri, rel='memento', datetime=http_date(date)) for (uri, date) in mementos[1:-1] ] + [ Link(mementos[-1][0], rel='last memento', datetime=http_date(mementos[-1][1])) ] # Aggregates all link strings and constructs the TimeMap body links = [original_link, timegate_link, link_self, json_self] links.extend(mementos_links) body = ',\n'.join([str(l) for l in links]) + '\n' # Builds HTTP Response and WSGI return headers = [ ('Date', http_date(datetime.utcnow())), ('Content-Length', str(len(body))), ('Content-Type', 'application/link-format'), ('Connection', 'close'), ] return Response(body, headers=headers)
def test_send_file_range_request(self): app = flask.Flask(__name__) @app.route('/') def index(): return flask.send_file('static/index.html', conditional=True) c = app.test_client() rv = c.get('/', headers={'Range': 'bytes=4-15'}) assert rv.status_code == 206 with app.open_resource('static/index.html') as f: assert rv.data == f.read()[4:16] rv.close() rv = c.get('/', headers={'Range': 'bytes=4-'}) assert rv.status_code == 206 with app.open_resource('static/index.html') as f: assert rv.data == f.read()[4:] rv.close() rv = c.get('/', headers={'Range': 'bytes=4-1000'}) assert rv.status_code == 206 with app.open_resource('static/index.html') as f: assert rv.data == f.read()[4:] rv.close() rv = c.get('/', headers={'Range': 'bytes=-10'}) assert rv.status_code == 206 with app.open_resource('static/index.html') as f: assert rv.data == f.read()[-10:] rv.close() rv = c.get('/', headers={'Range': 'bytes=1000-'}) assert rv.status_code == 416 rv.close() rv = c.get('/', headers={'Range': 'bytes=-'}) assert rv.status_code == 416 rv.close() rv = c.get('/', headers={'Range': 'somethingsomething'}) assert rv.status_code == 416 rv.close() last_modified = datetime.datetime.fromtimestamp(os.path.getmtime( os.path.join(app.root_path, 'static/index.html'))).replace( microsecond=0) rv = c.get('/', headers={'Range': 'bytes=4-15', 'If-Range': http_date(last_modified)}) assert rv.status_code == 206 rv.close() rv = c.get('/', headers={'Range': 'bytes=4-15', 'If-Range': http_date( datetime.datetime(1999, 1, 1))}) assert rv.status_code == 200 rv.close()
def timemap_json_response(app, mementos, uri_r): """Creates and sends a timemap response. :param mementos: A sorted list of (uri_str, datetime_obj) tuples representing a timemap. :param uri_r: The URI-R of the original resource. :param start_response: WSGI callback function. :return: The ``Response`` object. """ assert len(mementos) >= 1 # Prepares the JSON response by building a dict response_dict = {} response_dict['original_uri'] = uri_r response_dict['timegate_uri'] = url_for( 'timegate', dict(uri_r=uri_r), force_external=True ) # Browse through Mementos to generate TimeMap links dict list mementos_links = [ {'uri': urlstr, 'datetime': http_date(date)} for (urlstr, date) in mementos ] # Builds up first and last links dict firstlink = {'uri': mementos[0][0], 'datetime': http_date(mementos[0][1])} lastlink = {'uri': mementos[-1][0], 'datetime': http_date(mementos[-1][1])} response_dict['mementos'] = { 'last': lastlink, 'first': firstlink, 'list': mementos_links, } # Builds self (TimeMap)links dict response_dict['timemap_uri'] = { 'json_format': url_for('timemap', dict( response_type='json', uri_r=uri_r ), force_external=True), 'link_format': url_for('timemap', dict( response_type='link', uri_r=uri_r ), force_external=True), } # Creates the JSON str from the dict response_json = json.dumps(response_dict) # Builds HTTP Response and WSGI return headers = [ ('Date', http_date(datetime.utcnow())), ('Content-Length', str(len(response_json))), ('Content-Type', 'application/json'), ] return Response(response_json, headers=headers)
def __call__(self, environ, start_response): cleaned_path = get_path_info(environ) if PY2: cleaned_path = cleaned_path.encode(sys.getfilesystemencoding()) # sanitize the path for non unix systems cleaned_path = cleaned_path.strip('/') for sep in os.sep, os.altsep: if sep and sep != '/': cleaned_path = cleaned_path.replace(sep, '/') path = '/' + '/'.join(x for x in cleaned_path.split('/') if x and x != '..') file_loader = None for search_path, loader in iteritems(self.exports): if search_path == path: real_filename, file_loader = loader(None) if file_loader is not None: break if not search_path.endswith('/'): search_path += '/' if path.startswith(search_path): real_filename, file_loader = loader(path[len(search_path):]) if file_loader is not None: break if file_loader is None or not self.is_allowed(real_filename): return self.app(environ, start_response) guessed_type = mimetypes.guess_type(real_filename) mime_type = guessed_type[0] or self.fallback_mimetype f, mtime, file_size = file_loader() headers = [('Date', http_date())] if self.cache: timeout = self.cache_timeout etag = self.generate_etag(mtime, file_size, real_filename) headers += [ ('Etag', '"%s"' % etag), ('Cache-Control', 'max-age=%d, public' % timeout) ] if not is_resource_modified(environ, etag, last_modified=mtime): f.close() start_response('304 Not Modified', headers) return [] headers.append(('Expires', http_date(time() + timeout))) else: headers.append(('Cache-Control', 'public')) headers.extend(( ('Content-Type', mime_type), ('Content-Length', str(file_size)), ('Last-Modified', http_date(mtime)) )) start_response('200 OK', headers) return wrap_file(environ, f)
def view_thread(board_name, thread_refno): valid_id_range(thread_refno) board: BoardModel = board_service.find_board(board_name) if not board: abort(404) thread = posts_service.find_thread_by_board_thread_refno_with_posts(board, thread_refno) if not thread: abort(404) additional_page_details = { 'threadRefno': thread.refno } if thread.locked: additional_page_details['locked'] = True if thread.sticky: additional_page_details['sticky'] = True # TODO: don't use the board id show_mod_buttons = show_moderator_buttons(thread.board.id) r: Response = app.make_response(render_template('thread.html', thread=thread, board=thread.board, show_moderator_buttons=show_mod_buttons, **get_board_view_params(board.config, 'thread', board_name, additional_page_details))) r.headers['Last-Modified'] = http_date(thread.last_modified / 1000) return r
def cache(): """Returns a 304 if an If-Modified-Since header or If-None-Match is present. Returns the same as a GET otherwise. --- tags: - Response inspection parameters: - in: header name: If-Modified-Since - in: header name: If-None-Match produces: - application/json responses: 200: description: Cached response 304: description: Modified """ is_conditional = request.headers.get('If-Modified-Since') or request.headers.get('If-None-Match') if is_conditional is None: response = view_get() response.headers['Last-Modified'] = http_date() response.headers['ETag'] = uuid.uuid4().hex return response else: return status_code(304)
def home(): feeds = Feed.query.order_by(Feed.id.desc()).all() return jsonify( count=len(feeds), feeds=[dict(id=f.id, tid=f.tid, text=f.text, created_at=http_date(f.created_at), synced=f.synced) for f in feeds])
def compute_etag(files=None, lastmodified=None, additional=None): if callable(custom_etag): try: etag = custom_etag() if etag: return etag except: _logger.exception("Error while trying to retrieve custom ETag value for plugin {}".format(key)) if files is None: files = collect_files() if lastmodified is None: lastmodified = compute_lastmodified(files) if lastmodified and not isinstance(lastmodified, basestring): from werkzeug.http import http_date lastmodified = http_date(lastmodified) if additional is None: additional = [] import hashlib hash = hashlib.sha1() hash.update(octoprint.__version__) hash.update(octoprint.server.UI_API_KEY) hash.update(",".join(sorted(files))) if lastmodified: hash.update(lastmodified) for add in additional: hash.update(str(add)) return hash.hexdigest()
def _tag(value): if isinstance(value, tuple): return {" t": [_tag(x) for x in value]} elif isinstance(value, uuid.UUID): return {" u": value.hex} elif isinstance(value, bytes): return {" b": b64encode(value).decode("ascii")} elif callable(getattr(value, "__html__", None)): return {" m": text_type(value.__html__())} elif isinstance(value, list): return [_tag(x) for x in value] elif isinstance(value, datetime): return {" d": http_date(value)} elif isinstance(value, dict): return dict((k, _tag(v)) for k, v in iteritems(value)) elif isinstance(value, str): try: return text_type(value) except UnicodeError: raise UnexpectedUnicodeError( u"A byte string with " u"non-ASCII data was passed to the session system " u"which can only store unicode strings. Consider " u"base64 encoding your string (String was %r)" % value ) return value
def default(self, obj): if isinstance(obj, datetime): return http_date(obj) elif isinstance(obj, ObjectId): return str(obj) else: return json.JSONEncoder.default(self, obj)
def graphite(): params = {k: request.values.getlist(k) for k in request.values.keys()} try: response = fetch_remote( current_app.config['GRAPHITE_SERVER'], method=request.method, data=request.data, accept=request.headers.get('Accept'), params=params, timeout=current_app.config['GRAPHITE_TIMEOUT'] ) headers = {'Content-Type': response.headers.get('Content-Type')} cache_for = int(request.values.get('_cache', 0)) if cache_for > 0: headers['Cache-Control'] = 'private, max-age=' + str(cache_for) headers['Expires'] = http_date(datetime.now() + timedelta(seconds=cache_for)) return make_response(( response.content, response.status_code, headers )) except Exception as e: width = request.values.get('width', '400') height = request.values.get('height', '200') image = IMAGE_TPL.format( error=str(e), width=width, height=height ) return make_response(( image, 500, {'Content-Type': 'image/svg+xml'} ))
def fetch_resource(resource): ''' Gets the resource using the request library and sets the times of last successful update based on the status code. :param resource: :return: ''' headers = {} if resource.last_succ: headers['If-Modified-Since'] = http_date(resource.last_succ) if resource.etag: headers["If-None-Match"] = resource.etag.encode('ascii') resp = requests.get(resource.url, headers=headers) resource.last_status_code = resp.status_code resource.last_fetch = datetime.datetime.utcnow() if resp.status_code == 200: resource.document = resp.content if "etag" in resp.headers: resource.etag = resp.headers.get('etag').decode('ascii') else: resource.etag = None resource.last_succ = datetime.datetime.utcnow() resource.last_parsed = None resource.last_parse_error = None if resp.status_code == 304: resource.last_succ = datetime.datetime.utcnow() db.session.add(resource) return resource
def _tag(value): if isinstance(value, tuple): return {' t': [_tag(x) for x in value]} elif isinstance(value, uuid.UUID): return {' u': value.hex} elif isinstance(value, bytes): return {' b': b64encode(value).decode('ascii')} elif callable(getattr(value, '__html__', None)): return {' m': text_type(value.__html__())} elif isinstance(value, list): return [_tag(x) for x in value] elif isinstance(value, datetime): return {' d': http_date(value)} elif isinstance(value, dict): return dict((k, _tag(v)) for k, v in iteritems(value)) elif isinstance(value, str): try: return text_type(value) except UnicodeError: from keyes.debughelpers import UnexpectedUnicodeError raise UnexpectedUnicodeError(u'A byte string with ' u'non-ASCII data was passed to the session system ' u'which can only store unicode strings. Consider ' u'base64 encoding your string (String was %r)' % value) return value
def decorated_controller(*args, **kwargs): response = make_response(controller(*args, **kwargs)) response.headers['Last-Modified'] = http_date(datetime.now()) response.headers['Cache-Control'] = 'no-store, no-cache, must-revalidate, post-check=0, pre-check=0, max-age=0' response.headers['Expires'] = 'Sat, 26 Jul 1997 05:00:00 GMT' response.headers['Pragma'] = 'no-cache' return response
def __call__(self, environ, start_response): cleaned_path = get_path_info(environ) if PY2: cleaned_path = cleaned_path.encode(get_filesystem_encoding()) # sanitize the path for non unix systems cleaned_path = cleaned_path.strip("/") for sep in os.sep, os.altsep: if sep and sep != "/": cleaned_path = cleaned_path.replace(sep, "/") path = "/" + "/".join(x for x in cleaned_path.split("/") if x and x != "..") file_loader = None for search_path, loader in iteritems(self.exports): if search_path == path: real_filename, file_loader = loader(None) if file_loader is not None: break if not search_path.endswith("/"): search_path += "/" if path.startswith(search_path): real_filename, file_loader = loader(path[len(search_path) :]) if file_loader is not None: break if file_loader is None or not self.is_allowed(real_filename): return self.app(environ, start_response) guessed_type = mimetypes.guess_type(real_filename) mime_type = guessed_type[0] or self.fallback_mimetype f, mtime, file_size = file_loader() headers = [("Date", http_date())] if self.cache: timeout = self.cache_timeout etag = self.generate_etag(mtime, file_size, real_filename) headers += [("Etag", '"%s"' % etag), ("Cache-Control", "max-age=%d, public" % timeout)] if not is_resource_modified(environ, etag, last_modified=mtime): f.close() start_response("304 Not Modified", headers) return [] headers.append(("Expires", http_date(time() + timeout))) else: headers.append(("Cache-Control", "public")) headers.extend( (("Content-Type", mime_type), ("Content-Length", str(file_size)), ("Last-Modified", http_date(mtime))) ) start_response("200 OK", headers) return wrap_file(environ, f)
def default(self, o): if isinstance(o, datetime.date): return http_date(o.timetuple()) if isinstance(o, uuid.UUID): return str(o) if hasattr(o, '__html__'): return text_type(o.__html__()) return _json.JSONEncoder.default(self, o)
def _conditions(self, full_path, environ): """Return Etag and Last-Modified values defaults to now for both.""" magic = self._match_magic(full_path) if magic is not None: return magic.conditions(full_path, environ) else: mtime = stat(full_path).st_mtime return str(mtime), http_date(mtime)
def default(self, o): if isinstance(o, datetime): return http_date(o) if isinstance(o, uuid.UUID): return str(o) if hasattr(o, '__html__'): return unicode(o.__html__()) return _json.JSONEncoder.default(self, o)
def _expires_headers(self, response): """Applies Expires and Cache-Control headers :param response: response to extend :return: response """ h = {} if self.expires: if isinstance(self.expires, timedelta): now = datetime.utcnow() now += self.expires h['Expires'] = http_date(now) h['Cache-Control'] = 'max-age={seconds}'.format(seconds=int(self.expires.total_seconds())) elif isinstance(self.expires, dt.datetime): difference = self.expires - datetime.utcnow() h['Expires'] = http_date(self.expires) h['Cache-Control'] = 'max-age={seconds}'.format(seconds=difference.seconds) response.headers.extend(h) return response
def test_static_file_default_expires(self): response = self.client.get("/favicon.ico") self.assertEqual(response.status_code, httplib.OK) with open(static_path("test_statics/favicon.ico")) as f: self.assertEqual(response.data, f.read()) current_time = FAKE_CURRENT_TIME extra_time = datetime.timedelta(seconds=appinfo.ParseExpiration("2d 3h")) expired_time = current_time + extra_time self.assertEqual(response.headers["Expires"], http.http_date(expired_time))
def to_dict(self): return { "id": str(self.id), "email": self.email, "username": self.username, "nickname": self.nickname, "is_valid": self.is_valid, "is_superadmin": self.is_superadmin, "created_at": http_date(self.created_at), "charts": [c.to_dict() for c in self.charts], "dashboards": [d.to_dict() for d in self.dashboards] }
def test_remove_entity_headers(self): now = http.http_date() headers1 = [('Date', now), ('Content-Type', 'text/html'), ('Content-Length', '0')] headers2 = datastructures.Headers(headers1) http.remove_entity_headers(headers1) assert headers1 == [('Date', now)] http.remove_entity_headers(headers2) self.assert_equal(headers2, datastructures.Headers([(u'Date', now)]))
def default(self, o): # pylint: disable=method-hidden if is_undefined(o): return None if isinstance(o, datetime): return http_date(o) if isinstance(o, uuid.UUID): return str(o) if hasattr(o, '__html__'): return text_type(o.__html__()) return json.JSONEncoder.default(self, o)
def test_remove_entity_headers(self): now = http.http_date() headers1 = [("Date", now), ("Content-Type", "text/html"), ("Content-Length", "0")] headers2 = datastructures.Headers(headers1) http.remove_entity_headers(headers1) assert headers1 == [("Date", now)] http.remove_entity_headers(headers2) self.assert_equal(headers2, datastructures.Headers([(u"Date", now)]))
def default(self, o): if is_undefined(o): return None if isinstance(o, datetime): return http_date(o) if isinstance(o, uuid.UUID): return str(o) if hasattr(o, "__html__"): return unicode(o.__html__()) return json.JSONEncoder.default(self, o)
def get_response(self, environ=None): """Get a list of headers.""" response = super(SameContentException, self).get_response( environ=environ ) if self.etag is not None: response.set_etag(self.etag) if self.last_modified is not None: response.headers['Last-Modified'] = http_date(self.last_modified) return response
def cache(): """Returns a 304 if an If-Modified-Since header or If-None-Match is present. Returns the same as a GET otherwise.""" is_conditional = request.headers.get('If-Modified-Since') or request.headers.get('If-None-Match') if is_conditional is None: response = view_get() response.headers['Last-Modified'] = http_date() response.headers['ETag'] = uuid.uuid4().hex return response else: return status_code(304)
def get(self, model_id): instance = self.model.find(self.get_id(model_id)) self.check_get(instance) modified = self.get_last_modified(instance) headers = {'Last-Modified': http_date(modified)} if not is_resource_modified(request.environ, last_modified=modified): return 'Not modified', 304, headers return self.serialize_list(self.model, [instance]), 200, headers
def test_static_file_default_expires(self): response = self.client.get('/favicon.ico') self.assertEqual(response.status_code, httplib.OK) with open(static_path('test_statics/favicon.ico')) as f: self.assertEqual(response.data, f.read()) current_time = FAKE_CURRENT_TIME extra_time = datetime.timedelta( seconds=appinfo.ParseExpiration('2d 3h')) expired_time = current_time + extra_time self.assertEqual(response.headers['Expires'], http.http_date(expired_time))
def retry_after(self, value: t.Optional[t.Union[datetime, int, str]]) -> None: if value is None: if "retry-after" in self.headers: del self.headers["retry-after"] return elif isinstance(value, datetime): value = http_date(value) else: value = str(value) self.headers["Retry-After"] = value
def test_jsonify_date_types(self, app, client): """Test jsonify with datetime.date and datetime.datetime types.""" test_dates = (datetime.datetime(1973, 3, 11, 6, 30, 45), datetime.date(1975, 1, 5)) for i, d in enumerate(test_dates): url = '/datetest{0}'.format(i) app.add_url_rule(url, str(i), lambda val=d: flask.jsonify(x=val)) rv = client.get(url) assert rv.mimetype == 'application/json' assert flask.json.loads(rv.data)['x'] == http_date(d.timetuple())
def test_remove_entity_headers(self): now = http.http_date() headers1 = [('Date', now), ('Content-Type', 'text/html'), ('Content-Length', '0')] headers2 = datastructures.Headers(headers1) http.remove_entity_headers(headers1) assert headers1 == [('Date', now)] http.remove_entity_headers(headers2) assert headers2 == datastructures.Headers([('Date', now)])
def application(environ, start_response): """ Small WSGI application as helper for development. Always send the index.html without any caching. This is only used for Crossbar's built-in web service (WSGI Host Service) to serve the webclient. """ index_path = os.path.join(os.path.dirname(__file__), 'index.html') file = open(index_path, 'rb') mtime = datetime.utcfromtimestamp(os.path.getmtime(index_path)) file_size = int(os.path.getsize(index_path)) headers = [ ('Date', http_date()), ('Cache-Control', 'public'), ('Content-Type', 'text/html'), ('Content-Length', str(file_size)), ('Last-Modified', http_date(mtime)), ] start_response('200 OK', headers) return wrap_file(environ, file)
def _tag(value): if isinstance(value, tuple): return {' t': [_tag(x) for x in value]} elif callable(getattr(value, '__html__', None)): return {' m': unicode(value.__html__())} elif isinstance(value, list): return [_tag(x) for x in value] elif isinstance(value, datetime): return {' d': http_date(value)} elif isinstance(value, dict): return dict((k, _tag(v)) for k, v in value.iteritems()) return value
def to_dict(self, summary=False): rv = { 'source': self.source, 'source_url': self.source_url, 'date': http_date(self.pub_date), 'developer': self.developer.to_dict(summary=True) } if not summary: rv['html_text'] = unicode(self.html_text) if self.source == 'twitter': rv['twitter_text'] = self.text return rv
def default(self, o): if hasattr(o, 'keys') and hasattr(o, '__getitem__'): return dict(o) if isinstance(o, date): return o.strftime('%Y-%m-%d') if isinstance(o, datetime): return http_date(o.utctimetuple()) if isinstance(o, uuid.UUID): return str(o) if dataclasses and dataclasses.is_dataclass(o): return dataclasses.asdict(o) raise ServerError()
def __init__(self): self.conversions = [ { 'check': lambda value: self._is_dict_with_used_key(value), 'tag': lambda value: self._tag_dict_used_with_key(value), 'untag': lambda value: self._untag_dict_used_with_key(value), 'key': ' di', }, { 'check': lambda value: isinstance(value, tuple), 'tag': lambda value: [self._tag(x) for x in value], 'untag': lambda value: tuple(value), 'key': ' t', }, { 'check': lambda value: isinstance(value, uuid.UUID), 'tag': lambda value: value.hex, 'untag': lambda value: uuid.UUID(value), 'key': ' u', }, { 'check': lambda value: isinstance(value, bytes), 'tag': lambda value: b64encode(value).decode('ascii'), 'untag': lambda value: b64decode(value), 'key': ' b', }, { 'check': lambda value: callable(getattr(value, '__html__', None)), 'tag': lambda value: text_type(value.__html__()), 'untag': lambda value: Markup(value), 'key': ' m', }, { 'check': lambda value: isinstance(value, list), 'tag': lambda value: [self._tag(x) for x in value], }, { 'check': lambda value: isinstance(value, datetime), 'tag': lambda value: http_date(value), 'untag': lambda value: parse_date(value), 'key': ' d', }, { 'check': lambda value: isinstance(value, dict), 'tag': lambda value: dict((k, self._tag(v)) for k, v in iteritems(value)), }, { 'check': lambda value: isinstance(value, str), 'tag': lambda value: self._tag_string(value), } ]
def handle_subarchive_path(archivefile, subarchivepath, mimetype=None, list_directory=True): """Show content of a path in a zip file. """ if not os.access(archivefile, os.R_OK): return http_error( 403, "You do not have permission to access this file.") try: zip = zipfile.ZipFile(archivefile) except: return http_error(500, "Unable to open the ZIP file.") try: # KeyError is raised if subarchivepath does not exist info = zip.getinfo(subarchivepath) except KeyError: # subarchivepath does not exist # possibility a missing directory entry? if not list_directory: zip.close() return http_error(404) return handle_zip_directory_listing(zip, archivefile, subarchivepath) fh = zip.open(info, 'r') lm = info.date_time lm = int( time.mktime((lm[0], lm[1], lm[2], lm[3], lm[4], lm[5], 0, 0, -1))) last_modified = http_date(lm) etag = "%s-%s-%s" % ( lm, info.file_size, adler32(archivefile.encode("utf-8")) & 0xFFFFFFFF, ) headers = { 'Accept-Ranges': 'bytes', 'Cache-Control': 'no-cache', 'Last-Modified': last_modified, 'ETag': etag, } response = Response(fh, headers=headers, mimetype=mimetype) response.make_conditional(request.environ, accept_ranges=True, complete_length=info.file_size) return response
def cache(request): """Returns a 304 if an If-Modified-Since header or If-None-Match is present. Returns the same as a GET otherwise.""" is_conditional = ("If-Modified-Since" in request.headers or "If-None-Match" in request.headers) if is_conditional: return status_code(304) else: response = view_method(request, 'get') response.headers['Last-Modified'] = http_date() response.headers['ETag'] = uuid.uuid4().hex return response
def handle_zip_directory_listing(zip, archivefile, subarchivepath): """List contents in a directory. """ # ensure directory has trailing '/' if not request.path.endswith('/'): parts = urlsplit(request.url) new_parts = (parts[0], parts[1], parts[2] + '/', parts[3], parts[4]) new_url = urlunsplit(new_parts) return redirect(new_url) stats = os.lstat(archivefile) last_modified = http_date(stats.st_mtime) etag = "%s-%s-%s" % ( stats.st_mtime, stats.st_size, adler32(archivefile.encode("utf-8")) & 0xFFFFFFFF, ) headers = { 'Cache-Control': 'no-cache', } if not is_resource_modified( request.environ, etag=etag, last_modified=last_modified): return http_response(status=304, headers=headers) headers.update({ 'Last-Modified': last_modified, 'ETag': etag, }) subentries = util.zip_listdir(zip, subarchivepath) try: body = render_template( 'index.html', sitename=runtime['name'], is_local=is_local_access(), base=request.script_root, path=request.path, subarchivepath=subarchivepath, subentries=subentries, ) zip.close() return http_response(body, headers=headers) except util.ZipDirNotFoundError: # zip may not close automatically in such case # (due to a raise in a generator?) zip.close() return http_error(404, "File does not exist.")
def __inject_headers(self, response): self.__check_conditional_deductions(response) current_limit = getattr(g, 'view_rate_limit', None) if self.enabled and self._headers_enabled and current_limit: try: window_stats = self.limiter.get_window_stats(*current_limit) reset_in = 1 + window_stats[0] response.headers.add(self._header_mapping[HEADERS.LIMIT], str(current_limit[0].amount)) response.headers.add(self._header_mapping[HEADERS.REMAINING], window_stats[1]) response.headers.add(self._header_mapping[HEADERS.RESET], reset_in) # response may have an existing retry after existing_retry_after_header = response.headers.get( 'Retry-After') if existing_retry_after_header is not None: # might be in http-date format retry_after = parse_date(existing_retry_after_header) # parse_date failure returns None if retry_after is None: retry_after = time.time() + int( existing_retry_after_header) if isinstance(retry_after, datetime.datetime): retry_after = time.mktime(retry_after.timetuple()) reset_in = max(retry_after, reset_in) # set the header instead of using add response.headers.set( self._header_mapping[HEADERS.RETRY_AFTER], self._retry_after == 'http-date' and http_date(reset_in) or int(reset_in - time.time())) except: # noqa: E722 if self._in_memory_fallback_enabled and not self._storage_dead: self.logger.warning( "Rate limit storage unreachable - falling back to" " in-memory storage") self._storage_dead = True response = self.__inject_headers(response) else: if self._swallow_errors: self.logger.exception( "Failed to update rate limit headers. " "Swallowing error") else: six.reraise(*sys.exc_info()) return response
def test_jsonifytypes(app, client): """Test jsonify with datetime.date and datetime.datetime types.""" test_dates = ( datetime.datetime(1973, 3, 11, 6, 30, 45), datetime.date(1975, 1, 5), ) for i, d in enumerate(test_dates): url = f"/datetest{i}" app.add_url_rule(url, str(i), lambda val=d: flask.jsonify(x=val)) rv = client.get(url) assert rv.mimetype == "application/json" assert flask.json.loads(rv.data)["x"] == http_date(d.timetuple())
def download(request, oid, name=None): obj = Object.by_oid(oid) r = Response(obj.content, mimetype=obj.mimetype) if name: n = obj.name if obj.mime.ext: n += "." + obj.mime.ext assert n == name if parse_etags(request.environ.get('HTTP_IF_NONE_MATCH')).contains( obj.hash): r = Response("", mimetype=obj.mimetype) r.status_code = 304 remove_entity_headers(r.headers) else: r = Response(obj.content, mimetype=obj.mimetype) r.set_etag(obj.hash) r.headers['Cache-Control'] = 'public' r.headers['Expiry'] = http_date(datetime.utcnow() + timedelta(999)) r.headers['Last-Modified'] = http_date(obj.timestamp) return r
def test_no_cache_conditional_default(): rv = send_file( txt_path, EnvironBuilder( headers={"If-Modified-Since": http_date(datetime.datetime(2020, 7, 12))} ).get_environ(), last_modified=datetime.datetime(2020, 7, 11), ) rv.close() assert "no-cache" in rv.headers["Cache-Control"] assert not rv.cache_control.public assert not rv.cache_control.max_age assert not rv.expires assert rv.status_code == 304
def test_group_server_time_limits(client, auth, test_admin, test_group): uid, uname, upass = test_admin response, csrf_cookie = auth.login(uname, upass) response = client.get( f"/admin/groups/{test_group.id}/servers/1/time_limits", headers={"X-CSRF-Token": csrf_cookie}, ) assert 200 == response.status_code json = response.get_json() assert 2 == json["longest_token_life"] assert (http_date( (datetime.datetime.now().date() + datetime.timedelta(days=365) ).timetuple()) == json["latest_token_expiry"])
def test_remove_entity_headers(self): now = http.http_date() headers1 = [ ("Date", now), ("Content-Type", "text/html"), ("Content-Length", "0"), ] headers2 = datastructures.Headers(headers1) http.remove_entity_headers(headers1) assert headers1 == [("Date", now)] http.remove_entity_headers(headers2) assert headers2 == datastructures.Headers([("Date", now)])
def decorated_function(*args, **kwargs): rv = f(*args, **kwargs) if not "Last-Modified" in rv.headers: result = date if callable(result): result = result(rv) if not isinstance(result, basestring): from werkzeug.http import http_date result = http_date(result) if result: rv.headers["Last-Modified"] = result return rv
def test_is_resource_modified_for_range_requests(self): env = create_environ() env['HTTP_IF_MODIFIED_SINCE'] = http.http_date(datetime(2008, 1, 1, 12, 30)) env['HTTP_IF_RANGE'] = http.generate_etag(b'awesome_if_range') # Range header not present, so If-Range should be ignored assert not http.is_resource_modified(env, data=b'not_the_same', ignore_if_range=False, last_modified=datetime(2008, 1, 1, 12, 30)) env['HTTP_RANGE'] = '' assert not http.is_resource_modified(env, data=b'awesome_if_range', ignore_if_range=False) assert http.is_resource_modified(env, data=b'not_the_same', ignore_if_range=False) env['HTTP_IF_RANGE'] = http.http_date(datetime(2008, 1, 1, 13, 30)) assert http.is_resource_modified(env, last_modified=datetime(2008, 1, 1, 14, 00), ignore_if_range=False) assert not http.is_resource_modified(env, last_modified=datetime(2008, 1, 1, 13, 30), ignore_if_range=False) assert http.is_resource_modified(env, last_modified=datetime(2008, 1, 1, 13, 30), ignore_if_range=True)
def get_attachment_thumbnail(pagename, filename, size=80): fn = get_attachment_filename(pagename, filename) ffn = get_attachment_full_filename(pagename, filename) if not storage.exists(fn): abort(404) # TODO fetch from cache data = None from timeit import default_timer as timer if data is None: t_start = timer() mimetype, encoding = mimetypes.guess_type(fn) if mimetype.startswith('image'): # read image image = Image.open(BytesIO(storage.load(fn, mode='rb'))) # create thumbnail image.thumbnail((size, size), resample=Image.ANTIALIAS) options = { 'format': image.format, 'quality': 80, } data = BytesIO() image.save(data, **options) data.seek(0) app.logger.info( "Thumbnail generation took {:.3f} seconds.".format(timer() - t_start)) response = make_response(send_file(data, mimetype=mimetype)) metadata = storage.metadata(fn) # set header, caching, etc modified = metadata['datetime'] expires = modified + timedelta(hours=1) response.headers['Date'] = http_date(modified.utctimetuple()) response.headers['Expires'] = http_date(modified.utctimetuple()) response.headers['Last-Modified'] = http_date(modified.utctimetuple()) return response
def get_info(self, request, ident, base_uri): r = LorisResponse() r.set_acao(request, self.cors_regex) try: info, last_mod = self._get_info(ident, request, base_uri) except ResolverException as re: return NotFoundResponse(re.message) except ImageInfoException as ie: return ServerSideErrorResponse(ie.message) except IOError as e: # 500 msg = '%s \n(This is likely a permissions problem)' % (str(e), ) return ServerSideErrorResponse(msg) else: ims_hdr = request.headers.get('If-Modified-Since') ims = parse_date(ims_hdr) last_mod = parse_date( http_date(last_mod)) # see note under get_img if ims and ims >= last_mod: logger.debug('Sent 304 for %s ' % (ident, )) r.status_code = 304 else: if last_mod: r.last_modified = last_mod # r.automatically_set_content_length callback = request.args.get('callback', None) if callback: r.mimetype = 'application/javascript' r.data = '%s(%s);' % (callback, info.to_json()) else: if request.headers.get('accept') == 'application/ld+json': r.content_type = 'application/ld+json' else: r.content_type = 'application/json' l = '<http://iiif.io/api/image/2/context.json>;rel="http://www.w3.org/ns/json-ld#context";type="application/ld+json"' r.headers['Link'] = '%s,%s' % (r.headers['Link'], l) # If interpolation is not allowed, we have to remove this # value from info.json - but only if exists (cached ImageInfo might miss this) if self.max_size_above_full <= 100: try: info.profile[1]['supports'].remove('sizeAboveFull') except ValueError: pass r.data = info.to_json() finally: return r
def default(self, o: t.Any) -> t.Any: """Convert ``o`` to a JSON serializable type. See :meth:`json.JSONEncoder.default`. Python does not support overriding how basic types like ``str`` or ``list`` are serialized, they are handled before this method. """ if isinstance(o, date): return http_date(o) if isinstance(o, (decimal.Decimal, uuid.UUID)): return str(o) if dataclasses and dataclasses.is_dataclass(o): return dataclasses.asdict(o) if hasattr(o, "__html__"): return str(o.__html__()) return super().default(o)
def __inject_headers(self, response): current_limit = getattr(g, 'view_rate_limit', None) if self.enabled and self._headers_enabled and current_limit: window_stats = self.limiter.get_window_stats(*current_limit) response.headers.add(self._header_mapping[HEADERS.LIMIT], str(current_limit[0].amount)) response.headers.add(self._header_mapping[HEADERS.REMAINING], window_stats[1]) response.headers.add(self._header_mapping[HEADERS.RESET], window_stats[0]) response.headers.add( self._header_mapping[HEADERS.RETRY_AFTER], self._retry_after == 'http-date' and http_date(window_stats[0]) or int(window_stats[0] - time.time())) return response
def ep_file(self, request, _urls, path=''): """Read a file via WSGI""" # Check for unmodified requests etag = ('%s-%08x' % (self.etag, zlib.adler32(path.encode()))) headers = [ ('Date', http_date()), ('Etag', ('"%s"' % etag)), ('Cache-Control', ('max-age=%d, public' % CACHE_MAX_AGE)), ] if not is_resource_modified(request.environ, etag, last_modified=self.mtime): return BaseResponse(status=304, headers=headers) # Check for nonexistent files and for directories isostat = self.isostat(path) if isostat is None: raise NotFound() if isostat['is_dir']: raise Forbidden() # Construct file-like object start = (isostat['LSN'] * pycdio.ISO_BLOCKSIZE) size = isostat['size'] filelike = IsoFile(self.fh, start, size, self.lock) wrapped = wrap_file(request.environ, filelike) # Construct WSGI response mimetype = (mimetypes.guess_type(path)[0] or 'text/plain') headers.extend(( ('Content-Length', str(size)), ('Content-Type', mimetype), ('Last-Modified', http_date(self.mtime)), ('Expires', http_date(time() + CACHE_MAX_AGE)), )) return BaseResponse(wrapped, headers=headers, direct_passthrough=True)
def default(self, o): """部署这个方法是以一个子类形式实现的,这样它为 ``o`` 返回一个可序列化对象, 或者调用基础部署(会抛出一个 :exc:`TypeError` 例外类型)。 例如,要支持任意迭代器,你可以把 default 部署成如下这样:: def default(self, o): try: iterable = iter(o) except TypeError: pass else: return list(iterable) return JSONEncoder.default(self, o) """ if isinstance(o, datetime): return http_date(o.utctimetuple()) if isinstance(o, date): return http_date(o.timetuple()) if isinstance(o, uuid.UUID): return str(o) if hasattr(o, '__html__'): return text_type(o.__html__()) return _json.JSONEncoder.default(self, o)
def test_get_server_time_limits(client, auth, app): # Log in first response, csrf_cookie = auth.login("TEST_ADMIN", "DUMMY_PASSWORD") response = client.get("/admin/servers/1/time_limits", headers={"X-CSRF-Token": csrf_cookie}) assert 200 == response.status_code # Should get an OK assert { "longest_token_life": 2, "latest_token_expiry": http_date((datetime.datetime.now().date() + datetime.timedelta(days=365)).timetuple()), } == response.get_json()
def _compute_etag_for_i18n(locale, domain, files=None, lastmodified=None): if files is None: files = _get_all_translationfiles(locale, domain) if lastmodified is None: lastmodified = _compute_date(files) if lastmodified and not isinstance(lastmodified, basestring): from werkzeug.http import http_date lastmodified = http_date(lastmodified) import hashlib hash = hashlib.sha1() hash.update(",".join(sorted(files))) if lastmodified: hash.update(lastmodified) return hash.hexdigest()