def test_unextended_block(self): loader = DictLoader(self.templates) name = "<script>" self.assertEqual(loader.load("escaped_block.html").generate(name=name), b("base: <script>")) self.assertEqual(loader.load("unescaped_block.html").generate(name=name), b("base: <script>"))
def test_close_buffered_data(self): # Similar to the previous test, but with data stored in the OS's # socket buffers instead of the IOStream's read buffer. Out-of-band # close notifications must be delayed until all data has been # drained into the IOStream buffer. (epoll used to use out-of-band # close events with EPOLLRDHUP, but no longer) # # This depends on the read_chunk_size being smaller than the # OS socket buffer, so make it small. server, client = self.make_iostream_pair(read_chunk_size=256) try: server.write(b("A") * 512) client.read_bytes(256, self.stop) data = self.wait() self.assertEqual(b("A") * 256, data) server.close() # Allow the close to propagate to the client side of the # connection. Using add_callback instead of add_timeout # doesn't seem to work, even with multiple iterations self.io_loop.add_timeout(time.time() + 0.01, self.stop) self.wait() client.read_bytes(256, self.stop) data = self.wait() self.assertEqual(b("A") * 256, data) finally: server.close() client.close()
def test_escaping(self): self.assertRaises(ParseError, lambda: Template("{{")) self.assertRaises(ParseError, lambda: Template("{%")) self.assertEqual(Template("{{!").generate(), b("{{")) self.assertEqual(Template("{%!").generate(), b("{%")) self.assertEqual(Template("{{ 'expr' }} {{!jquery expr}}").generate(), b("expr {{jquery expr}}"))
def __call__(self, request): data = {} response = [] def start_response(status, response_headers, exc_info=None): data["status"] = status data["headers"] = response_headers return response.append app_response = self.wsgi_application( WSGIContainer.environ(request), start_response) response.extend(app_response) body = b("").join(response) if hasattr(app_response, "close"): app_response.close() if not data: raise Exception("WSGI app did not call start_response") status_code = int(data["status"].split()[0]) headers = data["headers"] header_set = set(k.lower() for (k,v) in headers) body = escape.utf8(body) if "content-length" not in header_set: headers.append(("Content-Length", str(len(body)))) if "content-type" not in header_set: headers.append(("Content-Type", "text/html; charset=UTF-8")) if "server" not in header_set: headers.append(("Server", "Anzu/%s" % anzu.version)) parts = [escape.utf8("HTTP/1.1 " + data["status"] + "\r\n")] for key, value in headers: parts.append(escape.utf8(key) + b(": ") + escape.utf8(value) + b("\r\n")) parts.append(b("\r\n")) parts.append(body) request.write(b("").join(parts)) request.finish() self._log(status_code, request)
def test_streaming_callback(self): server, client = self.make_iostream_pair() try: chunks = [] final_called = [] def streaming_callback(data): chunks.append(data) self.stop() def final_callback(data): assert not data final_called.append(True) self.stop() server.read_bytes(6, callback=final_callback, streaming_callback=streaming_callback) client.write(b("1234")) self.wait(condition=lambda: chunks) client.write(b("5678")) self.wait(condition=lambda: final_called) self.assertEqual(chunks, [b("1234"), b("56")]) # the rest of the last chunk is still in the buffer server.read_bytes(2, callback=self.stop) data = self.wait() self.assertEqual(data, b("78")) finally: server.close() client.close()
def test_default(self): response = self.fetch("/default") self.assertEqual(response.code, 500) self.assertTrue(b("500: Internal Server Error") in response.body) response = self.fetch("/default?status=503") self.assertEqual(response.code, 503) self.assertTrue(b("503: Service Unavailable") in response.body)
def _oauth_parse_response(body): p = escape.parse_qs(body, keep_blank_values=False) token = dict(key=p[b("oauth_token")][0], secret=p[b("oauth_token_secret")][0]) # Add the extra parameters the Provider included to the token special = (b("oauth_token"), b("oauth_token_secret")) token.update((k, p[k][0]) for k in p if k not in special) return token
def test_write_error(self): response = self.fetch("/write_error") self.assertEqual(response.code, 500) self.assertEqual(b("Exception: ZeroDivisionError"), response.body) response = self.fetch("/write_error?status=503") self.assertEqual(response.code, 503) self.assertEqual(b("Status: 503"), response.body)
def test_get_error_html(self): response = self.fetch("/get_error_html") self.assertEqual(response.code, 500) self.assertEqual(b("Exception: ZeroDivisionError"), response.body) response = self.fetch("/get_error_html?status=503") self.assertEqual(response.code, 503) self.assertEqual(b("Status: 503"), response.body)
def write_message(self, message): """Sends the given message to the client of this Web Socket.""" if isinstance(message, dict): message = anzu.escape.json_encode(message) if isinstance(message, unicode): message = message.encode("utf-8") assert isinstance(message, bytes_type) self.stream.write(b("\x00") + message + b("\xff"))
def test_hello_world(self): response = self.fetch("/hello") self.assertEqual(response.code, 200) self.assertEqual(response.headers["Content-Type"], "text/plain") self.assertEqual(response.body, b("Hello world!")) self.assertEqual(int(response.request_time), 0) response = self.fetch("/hello?name=Ben") self.assertEqual(response.body, b("Hello Ben!"))
def test_chunked(self): response = self.fetch("/chunk") self.assertEqual(response.body, b("asdfqwer")) chunks = [] response = self.fetch("/chunk", streaming_callback=chunks.append) self.assertEqual(chunks, [b("asdf"), b("qwer")]) self.assertFalse(response.body)
def test_read_until_close(self): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) s.connect(("localhost", self.get_http_port())) stream = IOStream(s, io_loop=self.io_loop) stream.write(b("GET / HTTP/1.0\r\n\r\n")) stream.read_until_close(self.stop) data = self.wait() self.assertTrue(data.startswith(b("HTTP/1.0 200"))) self.assertTrue(data.endswith(b("Hello")))
def _on_chunk_data(self, data): assert data[-2:] == b("\r\n") chunk = data[:-2] if self._decompressor: chunk = self._decompressor.decompress(chunk) if self.request.streaming_callback is not None: self.request.streaming_callback(chunk) else: self.chunks.append(chunk) self.stream.read_until(b("\r\n"), self._on_chunk_length)
def test_multiple_content_length_accepted(self): response = self.fetch("/content_length?value=2,2") self.assertEqual(response.body, b("ok")) response = self.fetch("/content_length?value=2,%202,2") self.assertEqual(response.body, b("ok")) response = self.fetch("/content_length?value=2,4") self.assertEqual(response.code, 599) response = self.fetch("/content_length?value=2,%202,3") self.assertEqual(response.code, 599)
def write_response(stream, request_data): stream.write(b("""\ HTTP/1.1 200 OK Transfer-Encoding: chunked 1 1 1 2 0 """).replace(b("\n"), b("\r\n")), callback=stream.close)
def test_xhtml_escape(self): tests = [ ("<foo>", "<foo>"), (u"<foo>", u"<foo>"), (b("<foo>"), b("<foo>")), ("<>&\"", "<>&""), ("&", "&amp;"), ] for unescaped, escaped in tests: self.assertEqual(utf8(xhtml_escape(unescaped)), utf8(escaped)) self.assertEqual(utf8(unescaped), utf8(xhtml_unescape(escaped)))
def raw_fetch(self, headers, body): conn = RawRequestHTTPConnection(self.io_loop, self.http_client, httpclient.HTTPRequest(self.get_url("/")), None, self.stop, 1024*1024) conn.set_request( b("\r\n").join(headers + [utf8("Content-Length: %d\r\n" % len(body))]) + b("\r\n") + body) response = self.wait() response.rethrow() return response
def test_extended_block(self): loader = DictLoader(self.templates) def render(name): return loader.load(name).generate(name="<script>") self.assertEqual(render("escaped_extends_unescaped.html"), b("base: <script>")) self.assertEqual(render("escaped_overrides_unescaped.html"), b("extended: <script>")) self.assertEqual(render("unescaped_extends_escaped.html"), b("base: <script>")) self.assertEqual(render("unescaped_overrides_escaped.html"), b("extended: <script>"))
def test_gzip(self): # All the tests in this file should be using gzip, but this test # ensures that it is in fact getting compressed. # Setting Accept-Encoding manually bypasses the client's # decompression so we can see the raw data. response = self.fetch("/chunk", use_gzip=False, headers={"Accept-Encoding": "gzip"}) self.assertEqual(response.headers["Content-Encoding"], "gzip") self.assertNotEqual(response.body, b("asdfqwer")) # Our test data gets bigger when gzipped. Oops. :) self.assertEqual(len(response.body), 34) f = gzip.GzipFile(mode="r", fileobj=response.buffer) self.assertEqual(f.read(), b("asdfqwer"))
def test_file_upload(self): data = b("""\ --1234 Content-Disposition: form-data; name="files"; filename="ab.txt" Foo --1234--""").replace(b("\n"), b("\r\n")) args = {} files = {} parse_multipart_form_data(b("1234"), data, args, files) file = files["files"][0] self.assertEqual(file["filename"], "ab.txt") self.assertEqual(file["body"], b("Foo"))
def _on_headers(self, data): data = native_str(data.decode("latin1")) first_line, _, header_data = data.partition("\n") match = re.match("HTTP/1.[01] ([0-9]+)", first_line) assert match self.code = int(match.group(1)) self.headers = HTTPHeaders.parse(header_data) if "Content-Length" in self.headers: if "," in self.headers["Content-Length"]: # Proxies sometimes cause Content-Length headers to get # duplicated. If all the values are identical then we can # use them but if they differ it's an error. pieces = re.split(r',\s*', self.headers["Content-Length"]) if any(i != pieces[0] for i in pieces): raise ValueError("Multiple unequal Content-Lengths: %r" % self.headers["Content-Length"]) self.headers["Content-Length"] = pieces[0] content_length = int(self.headers["Content-Length"]) else: content_length = None if self.request.header_callback is not None: for k, v in self.headers.get_all(): self.request.header_callback("%s: %s\r\n" % (k, v)) if self.request.method == "HEAD": # HEAD requests never have content, even though they may have # content-length headers self._on_body(b("")) return if 100 <= self.code < 200 or self.code in (204, 304): # These response codes never have bodies # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 assert "Transfer-Encoding" not in self.headers assert content_length in (None, 0) self._on_body(b("")) return if (self.request.use_gzip and self.headers.get("Content-Encoding") == "gzip"): # Magic parameter makes zlib module understand gzip header # http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib self._decompressor = zlib.decompressobj(16+zlib.MAX_WBITS) if self.headers.get("Transfer-Encoding") == "chunked": self.chunks = [] self.stream.read_until(b("\r\n"), self._on_chunk_length) elif content_length is not None: self.stream.read_bytes(content_length, self._on_body) else: self.stream.read_until_close(self._on_body)
def test_default_on(self): loader = DictLoader(self.templates, autoescape="xhtml_escape") name = "Bobby <table>s" self.assertEqual(loader.load("escaped.html").generate(name=name), b("Bobby <table>s")) self.assertEqual(loader.load("unescaped.html").generate(name=name), b("Bobby <table>s")) self.assertEqual(loader.load("default.html").generate(name=name), b("Bobby <table>s")) self.assertEqual(loader.load("include.html").generate(name=name), b("escaped: Bobby <table>s\n" "unescaped: Bobby <table>s\n" "default: Bobby <table>s\n"))
def test_unquoted_names(self): # quotes are optional unless special characters are present data = b("""\ --1234 Content-Disposition: form-data; name=files; filename=ab.txt Foo --1234--""").replace(b("\n"), b("\r\n")) args = {} files = {} parse_multipart_form_data(b("1234"), data, args, files) file = files["files"][0] self.assertEqual(file["filename"], "ab.txt") self.assertEqual(file["body"], b("Foo"))
def test_custom_escape(self): loader = DictLoader({"foo.py": "{% autoescape py_escape %}s = {{ name }}\n"}) def py_escape(s): self.assertEqual(type(s), bytes_type) return repr(native_str(s)) def render(template, name): return loader.load(template).generate(py_escape=py_escape, name=name) self.assertEqual(render("foo.py", "<html>"), b("s = '<html>'\n")) self.assertEqual(render("foo.py", "';sys.exit()"), b("""s = "';sys.exit()"\n""")) self.assertEqual(render("foo.py", ["not a string"]), b("""s = "['not a string']"\n"""))
def test_include(self): loader = DictLoader({ "index.html": '{% include "header.html" %}\nbody text', "header.html": "header text", }) self.assertEqual(loader.load("index.html").generate(), b("header text\nbody text"))
def test_chunked_close(self): # test case in which chunks spread read-callback processing # over several ioloop iterations, but the connection is already closed. port = get_unused_port() (sock,) = netutil.bind_sockets(port, address="127.0.0.1") with closing(sock): def write_response(stream, request_data): stream.write(b("""\ HTTP/1.1 200 OK Transfer-Encoding: chunked 1 1 1 2 0 """).replace(b("\n"), b("\r\n")), callback=stream.close) def accept_callback(conn, address): # fake an HTTP server using chunked encoding where the final chunks # and connection close all happen at once stream = IOStream(conn, io_loop=self.io_loop) stream.read_until(b("\r\n\r\n"), functools.partial(write_response, stream)) netutil.add_accept_handler(sock, accept_callback, self.io_loop) self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop) resp = self.wait() resp.rethrow() self.assertEqual(resp.body, b("12"))
def test_streaming_callback(self): # streaming_callback is also tested in test_chunked chunks = [] response = self.fetch("/hello", streaming_callback=chunks.append) # with streaming_callback, data goes to the callback and not response.body self.assertEqual(chunks, [b("Hello world!")]) self.assertFalse(response.body)
def test_relative_load(self): loader = DictLoader({ "a/1.html": "{% include '2.html' %}", "a/2.html": "{% include '../b/3.html' %}", "b/3.html": "ok", }) self.assertEqual(loader.load("a/1.html").generate(), b("ok"))
def test_json_decode(self): # json_decode accepts both bytes and unicode, but strings it returns # are always unicode. self.assertEqual(json_decode(b('"foo"')), u"foo") self.assertEqual(json_decode(u'"foo"'), u"foo") # Non-ascii bytes are interpreted as utf8 self.assertEqual(json_decode(utf8(u'"\u00e9"')), u"\u00e9")
def test_json_encode(self): # json deals with strings, not bytes, but our encoding function should # accept bytes as well as long as they are utf8. self.assertEqual(json_decode(json_encode(u"\u00e9")), u"\u00e9") self.assertEqual(json_decode(json_encode(utf8(u"\u00e9"))), u"\u00e9") self.assertRaises(UnicodeDecodeError, json_encode, b("\xe9"))
def test_simple(self): response = self.fetch("/") self.assertEqual(response.body, b("Hello world!"))
def wsgi_app(self, environ, start_response): status = "200 OK" response_headers = [("Content-Type", "text/plain")] start_response(status, response_headers) return [b("Hello world!")]
def _oauth_get_user(self, access_token, callback): assert access_token == dict(key=b('uiop'), secret=b('5678')), access_token callback(dict(email='*****@*****.**'))
def wake(self): try: self.writer.write(b("x")) except IOError: pass