def test_multipart_form(self): # Encodings here are tricky: Headers are latin1, bodies can be # anything (we use utf8 by default). response = self.raw_fetch( [ b("POST /multipart HTTP/1.0"), b("Content-Type: multipart/form-data; boundary=1234567890"), b("X-Header-encoding-test: \xe9"), ], b("\r\n").join( [ b("Content-Disposition: form-data; name=argument"), b(""), u"\u00e1".encode("utf-8"), b("--1234567890"), u'Content-Disposition: form-data; name="files"; filename="\u00f3"'.encode("utf8"), b(""), u"\u00fa".encode("utf-8"), b("--1234567890--"), b(""), ] ), ) data = json_decode(response.body) self.assertEqual(u"\u00e9", data["header"]) self.assertEqual(u"\u00e1", data["argument"]) self.assertEqual(u"\u00f3", data["filename"]) self.assertEqual(u"\u00fa", data["filebody"])
def _on_headers(self, data): data = native_str(data.decode("latin1")) first_line, _, header_data = data.partition("\n") match = re.match("HTTP/1.[01] ([0-9]+) ([^\r]*)", first_line) assert match code = int(match.group(1)) if 100 <= code < 200: self.stream.read_until_regex(b("\r?\n\r?\n"), self._on_headers) return else: self.code = code self.reason = match.group(2) self.headers = HTTPHeaders.parse(header_data) if "Content-Length" in self.headers: if "," in self.headers["Content-Length"]: # Proxies sometimes cause Content-Length headers to get # duplicated. If all the values are identical then we can # use them but if they differ it's an error. pieces = re.split(r',\s*', self.headers["Content-Length"]) if any(i != pieces[0] for i in pieces): raise ValueError("Multiple unequal Content-Lengths: %r" % self.headers["Content-Length"]) self.headers["Content-Length"] = pieces[0] content_length = int(self.headers["Content-Length"]) else: content_length = None if self.request.header_callback is not None: # re-attach the newline we split on earlier self.request.header_callback(first_line + _) for k, v in self.headers.get_all(): self.request.header_callback("%s: %s\r\n" % (k, v)) self.request.header_callback('\r\n') if self.request.method == "HEAD" or self.code == 304: # HEAD requests and 304 responses never have content, even # though they may have content-length headers self._on_body(b("")) return if 100 <= self.code < 200 or self.code == 204: # These response codes never have bodies # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 if ("Transfer-Encoding" in self.headers or content_length not in (None, 0)): raise ValueError("Response with code %d should not have body" % self.code) self._on_body(b("")) return if (self.request.use_gzip and self.headers.get("Content-Encoding") == "gzip"): self._decompressor = GzipDecompressor() if self.headers.get("Transfer-Encoding") == "chunked": self.chunks = [] self.stream.read_until(b("\r\n"), self._on_chunk_length) elif content_length is not None: self.stream.read_bytes(content_length, self._on_body) else: self.stream.read_until_close(self._on_body)
def test_get_argument(self): response = self.fetch("/get_argument?foo=bar") self.assertEqual(response.body, b("bar")) response = self.fetch("/get_argument?foo=") self.assertEqual(response.body, b("")) response = self.fetch("/get_argument") self.assertEqual(response.body, b("default"))
def test_two_requests(self): self.connect() self.stream.write(b("GET / HTTP/1.1\r\n\r\n")) self.read_response() self.stream.write(b("GET / HTTP/1.1\r\n\r\n")) self.read_response() self.close()
def test_delayed_close_callback(self): # The scenario: Server closes the connection while there is a pending # read that can be served out of buffered data. The client does not # run the close_callback as soon as it detects the close, but rather # defers it until after the buffered read has finished. server, client = self.make_iostream_pair() try: client.set_close_callback(self.stop) server.write(b("12")) chunks = [] def callback1(data): chunks.append(data) client.read_bytes(1, callback2) server.close() def callback2(data): chunks.append(data) client.read_bytes(1, callback1) self.wait() # stopped by close_callback self.assertEqual(chunks, [b("1"), b("2")]) finally: server.close() client.close()
def test_cookie_tampering_future_timestamp(self): handler = CookieTestRequestHandler() # this string base64-encodes to '12345678' handler.set_secure_cookie('foo', binascii.a2b_hex(b('d76df8e7aefc'))) cookie = handler._cookies['foo'] match = re.match(b(r'12345678\|([0-9]+)\|([0-9a-f]+)'), cookie) self.assertTrue(match) timestamp = match.group(1) sig = match.group(2) self.assertEqual( _create_signature(handler.application.settings["cookie_secret"], 'foo', '12345678', timestamp), sig) # shifting digits from payload to timestamp doesn't alter signature # (this is not desirable behavior, just confirming that that's how it # works) self.assertEqual( _create_signature(handler.application.settings["cookie_secret"], 'foo', '1234', b('5678') + timestamp), sig) # tamper with the cookie handler._cookies['foo'] = utf8('1234|5678%s|%s' % ( to_basestring(timestamp), to_basestring(sig))) # it gets rejected with ExpectLog(gen_log, "Cookie timestamp in future"): self.assertTrue(handler.get_secure_cookie('foo') is None)
def test_escaping(self): self.assertRaises(ParseError, lambda: Template("{{")) self.assertRaises(ParseError, lambda: Template("{%")) self.assertEqual(Template("{{!").generate(), b("{{")) self.assertEqual(Template("{%!").generate(), b("{%")) self.assertEqual(Template("{{ 'expr' }} {{!jquery expr}}").generate(), b("expr {{jquery expr}}"))
def test_unextended_block(self): loader = DictLoader(self.templates) name = "<script>" self.assertEqual(loader.load("escaped_block.html").generate(name=name), b("base: <script>")) self.assertEqual(loader.load("unescaped_block.html").generate(name=name), b("base: <script>"))
def test_streaming_callback(self): server, client = self.make_iostream_pair() try: chunks = [] final_called = [] def streaming_callback(data): chunks.append(data) self.stop() def final_callback(data): self.assertFalse(data) final_called.append(True) self.stop() server.read_bytes(6, callback=final_callback, streaming_callback=streaming_callback) client.write(b("1234")) self.wait(condition=lambda: chunks) client.write(b("5678")) self.wait(condition=lambda: final_called) self.assertEqual(chunks, [b("1234"), b("56")]) # the rest of the last chunk is still in the buffer server.read_bytes(2, callback=self.stop) data = self.wait() self.assertEqual(data, b("78")) finally: server.close() client.close()
def __call__(self, request): data = {} response = [] def start_response(status, response_headers, exc_info=None): data["status"] = status data["headers"] = response_headers return response.append app_response = self.wsgi_application( WSGIContainer.environ(request), start_response) response.extend(app_response) body = b("").join(response) if hasattr(app_response, "close"): app_response.close() if not data: raise Exception("WSGI app did not call start_response") status_code = int(data["status"].split()[0]) headers = data["headers"] header_set = set(k.lower() for (k,v) in headers) body = escape.utf8(body) if "content-length" not in header_set: headers.append(("Content-Length", str(len(body)))) if "content-type" not in header_set: headers.append(("Content-Type", "text/html; charset=UTF-8")) if "server" not in header_set: headers.append(("Server", "TornadoServer/%s" % tornado.version)) parts = [escape.utf8("HTTP/1.1 " + data["status"] + "\r\n")] for key, value in headers: parts.append(escape.utf8(key) + b(": ") + escape.utf8(value) + b("\r\n")) parts.append(b("\r\n")) parts.append(body) request.write(b("").join(parts)) request.finish() self._log(status_code, request)
def test_close_buffered_data(self): # Similar to the previous test, but with data stored in the OS's # socket buffers instead of the IOStream's read buffer. Out-of-band # close notifications must be delayed until all data has been # drained into the IOStream buffer. (epoll used to use out-of-band # close events with EPOLLRDHUP, but no longer) # # This depends on the read_chunk_size being smaller than the # OS socket buffer, so make it small. server, client = self.make_iostream_pair(read_chunk_size=256) try: server.write(b("A") * 512) client.read_bytes(256, self.stop) data = self.wait() self.assertEqual(b("A") * 256, data) server.close() # Allow the close to propagate to the client side of the # connection. Using add_callback instead of add_timeout # doesn't seem to work, even with multiple iterations self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop) self.wait() client.read_bytes(256, self.stop) data = self.wait() self.assertEqual(b("A") * 256, data) finally: server.close() client.close()
def test_large_read_until(self): # Performance test: read_until used to have a quadratic component # so a read_until of 4MB would take 8 seconds; now it takes 0.25 # seconds. server, client = self.make_iostream_pair() try: try: # This test fails on pypy with ssl. I think it's because # pypy's gc defeats moves objects, breaking the # "frozen write buffer" assumption. if (isinstance(server, SSLIOStream) and platform.python_implementation() == 'PyPy'): raise unittest.SkipTest( "pypy gc causes problems with openssl") except AttributeError: # python 2.5 didn't have platform.python_implementation, # but there was no pypy for 2.5 pass NUM_KB = 4096 for i in xrange(NUM_KB): client.write(b("A") * 1024) client.write(b("\r\n")) server.read_until(b("\r\n"), self.stop) data = self.wait() self.assertEqual(len(data), NUM_KB * 1024 + 2) finally: server.close() client.close()
def test_write_while_connecting(self): stream = self._make_client_iostream() connected = [False] def connected_callback(): connected[0] = True self.stop() stream.connect(("localhost", self.get_http_port()), callback=connected_callback) # unlike the previous tests, try to write before the connection # is complete. written = [False] def write_callback(): written[0] = True self.stop() stream.write(b("GET / HTTP/1.0\r\nConnection: close\r\n\r\n"), callback=write_callback) self.assertTrue(not connected[0]) # by the time the write has flushed, the connection callback has # also run try: self.wait(lambda: connected[0] and written[0]) finally: logging.debug((connected, written)) stream.read_until_close(self.stop) data = self.wait() self.assertTrue(data.endswith(b("Hello"))) stream.close()
def _on_headers(self, data): data = native_str(data.decode("latin1")) first_line, _, header_data = data.partition("\n") match = re.match("HTTP/1.[01] ([0-9]+)", first_line) assert match self.code = int(match.group(1)) self.headers = HTTPHeaders.parse(header_data) if self.code == 100: # http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html#sec8.2.3 # support HTTP/1.1 100 Continue if self.request.body is not None: self.stream.write(self.request.body) self.stream.read_until_regex(b("\r?\n\r?\n"), self._on_headers) return if "Content-Length" in self.headers: if "," in self.headers["Content-Length"]: # Proxies sometimes cause Content-Length headers to get # duplicated. If all the values are identical then we can # use them but if they differ it's an error. pieces = re.split(r',\s*', self.headers["Content-Length"]) if any(i != pieces[0] for i in pieces): raise ValueError("Multiple unequal Content-Lengths: %r" % self.headers["Content-Length"]) self.headers["Content-Length"] = pieces[0] content_length = int(self.headers["Content-Length"]) else: content_length = None if self.request.header_callback is not None: for k, v in self.headers.get_all(): self.request.header_callback("%s: %s\r\n" % (k, v)) if self.request.method == "HEAD": # HEAD requests never have content, even though they may have # content-length headers self._on_body(b("")) return if 100 <= self.code < 200 or self.code in (204, 304): # These response codes never have bodies # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 assert "Transfer-Encoding" not in self.headers assert content_length in (None, 0) self._on_body(b("")) return if (self.request.use_gzip and self.headers.get("Content-Encoding") == "gzip"): # Magic parameter makes zlib module understand gzip header # http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib self._decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS) if self.headers.get("Transfer-Encoding") == "chunked": self.chunks = [] self.stream.read_until(b("\r\n"), self._on_chunk_length) elif content_length is not None: self.stream.read_bytes(content_length, self._on_body) else: self.stream.read_until_close(self._on_body)
def _oauth_parse_response(body): p = escape.parse_qs(body, keep_blank_values=False) token = dict(key=p[b("oauth_token")][0], secret=p[b("oauth_token_secret")][0]) # Add the extra parameters the Provider included to the token special = (b("oauth_token"), b("oauth_token_secret")) token.update((k, p[k][0]) for k in p if k not in special) return token
def make_request_headers(self): req_path = "/" request_lines = [utf8("%s %s HTTP/1.1" % (self.method, req_path))] for k, v in self.headers.items(): line = utf8(k) + b(": ") + utf8(v) request_lines.append(line) toreturn = b("\r\n").join(request_lines) + b("\r\n\r\n") return toreturn
def test_chunked(self): response = self.fetch("/chunk") self.assertEqual(response.body, b("asdfqwer")) chunks = [] response = self.fetch("/chunk", streaming_callback=chunks.append) self.assertEqual(chunks, [b("asdf"), b("qwer")]) self.assertFalse(response.body)
def generate_headers(self): request_lines = [utf8("%s %s HTTP/1.1" % (self.method, self.uri))] for k, v in self.headers.items(): line = utf8(k) + b(": ") + utf8(v) request_lines.append(line) toreturn = b("\r\n").join(request_lines) + b("\r\n\r\n") return toreturn
def write_message(self, message): """Sends the given message to the client of this Web Socket.""" if isinstance(message, dict): message = tornado.escape.json_encode(message) if isinstance(message, unicode): message = message.encode("utf-8") assert isinstance(message, bytes_type) self.stream.write(b("\x00") + message + b("\xff"))
def test_hello_world(self): response = self.fetch("/hello") self.assertEqual(response.code, 200) self.assertEqual(response.headers["Content-Type"], "text/plain") self.assertEqual(response.body, b("Hello world!")) response = self.fetch("/hello?name=Ben") self.assertEqual(response.body, b("Hello Ben!"))
def test_get_error_html(self): response = self.fetch("/get_error_html") self.assertEqual(response.code, 500) self.assertEqual(b("Exception: ZeroDivisionError"), response.body) response = self.fetch("/get_error_html?status=503") self.assertEqual(response.code, 503) self.assertEqual(b("Status: 503"), response.body)
def write_message(self, message, binary=False): """Sends the given message to the client of this Web Socket.""" if binary: raise ValueError("Binary messages not supported by this version of websockets") if isinstance(message, unicode): message = message.encode("utf-8") assert isinstance(message, bytes_type) self.stream.write(b("\x00") + message + b("\xff"))
def test_default(self): response = self.fetch("/default") self.assertEqual(response.code, 500) self.assertTrue(b("500: Internal Server Error") in response.body) response = self.fetch("/default?status=503") self.assertEqual(response.code, 503) self.assertTrue(b("503: Service Unavailable") in response.body)
def read_headers(self): self.stream.read_until(b("\r\n"), self.stop) first_line = self.wait() self.assertTrue(first_line.startswith(self.http_version + b(" 200")), first_line) self.stream.read_until(b("\r\n\r\n"), self.stop) header_bytes = self.wait() headers = HTTPHeaders.parse(header_bytes.decode("latin1")) return headers
def test_max_redirects(self): response = self.fetch("/countdown/5", max_redirects=3) self.assertEqual(302, response.code) # We requested 5, followed three redirects for 4, 3, 2, then the last # unfollowed redirect is to 1. self.assertTrue(response.request.url.endswith(b("/countdown/5"))) self.assertTrue(response.effective_url.endswith(b("/countdown/2"))) self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
def test_http10_keepalive(self): self.http_version = b("HTTP/1.0") self.connect() self.stream.write(b("GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n")) self.read_response() self.stream.write(b("GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n")) self.read_response() self.close()
def test_write_error(self): response = self.fetch("/write_error") self.assertEqual(response.code, 500) self.assertEqual(b("Exception: ZeroDivisionError"), response.body) response = self.fetch("/write_error?status=503") self.assertEqual(response.code, 503) self.assertEqual(b("Status: 503"), response.body)
def test_get_error_html(self): with ExpectLog(app_log, "Uncaught exception"): response = self.fetch("/get_error_html") self.assertEqual(response.code, 500) self.assertEqual(b("Exception: ZeroDivisionError"), response.body) response = self.fetch("/get_error_html?status=503") self.assertEqual(response.code, 503) self.assertEqual(b("Status: 503"), response.body)
def test_get_cookie(self): response = self.fetch("/get", headers={"Cookie": "foo=bar"}) self.assertEqual(response.body, b("bar")) response = self.fetch("/get", headers={"Cookie": 'foo="bar"'}) self.assertEqual(response.body, b("bar")) response = self.fetch("/get", headers={"Cookie": "/=exception;"}) self.assertEqual(response.body, b("default"))
def test_default(self): with ExpectLog(app_log, "Uncaught exception"): response = self.fetch("/default") self.assertEqual(response.code, 500) self.assertTrue(b("500: Internal Server Error") in response.body) response = self.fetch("/default?status=503") self.assertEqual(response.code, 503) self.assertTrue(b("503: Service Unavailable") in response.body)
def accept_callback(conn, address): # fake an HTTP server using chunked encoding where the final chunks # and connection close all happen at once stream = IOStream(conn, io_loop=self.io_loop) stream.read_until(b("\r\n\r\n"), functools.partial(write_response, stream))
def test_empty_flush(self): response = self.fetch("/empty_flush") self.assertEqual(response.body, b("ok"))
def test_basic_auth(self): self.assertEqual( self.fetch("/auth", auth_username="******", auth_password="******").body, b("Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="))
def test_arbitrary_bytes(self): # Secure cookies accept arbitrary data (which is base64 encoded). # Note that normal cookies accept only a subset of ascii. handler = CookieTestRequestHandler() handler.set_secure_cookie('foo', b('\xe9')) self.assertEqual(handler.get_secure_cookie('foo'), b('\xe9'))
def test_round_trip(self): handler = CookieTestRequestHandler() handler.set_secure_cookie('foo', b('bar')) self.assertEqual(handler.get_secure_cookie('foo'), b('bar'))
def test_yield_exception_handler(self): response = self.fetch('/yield_exception') self.assertEqual(response.body, b('ok'))
def test_sequence_handler(self): response = self.fetch('/sequence') self.assertEqual(response.body, b("123"))
def test_failed_write_error(self): response = self.fetch("/failed_write_error") self.assertEqual(response.code, 500) self.assertEqual(b(""), response.body)
def test_credentials_in_url(self): url = self.get_url("/auth").replace("http://", "http://*****:*****@") self.http_client.fetch(url, self.stop) response = self.wait() self.assertEqual( b("Basic ") + base64.b64encode(b("me:secret")), response.body)
def test_post(self): response = self.fetch("/post", method="POST", body="arg1=foo&arg2=bar") self.assertEqual(response.code, 200) self.assertEqual(response.body, b("Post arg1: foo, arg2: bar"))
def test_flow_control(self): self.assertEqual(self.fetch("/flow_control").body, b("123"))
def test_static_files(self): response = self.fetch('/robots.txt') self.assertTrue(b("Disallow: /") in response.body) response = self.fetch('/static/robots.txt') self.assertTrue(b("Disallow: /") in response.body)
def test_header_injection(self): response = self.fetch("/header_injection") self.assertEqual(response.body, b("ok"))
class SimpleHTTPClientTestCase(AsyncHTTPTestCase, LogTrapTestCase): def setUp(self): super(SimpleHTTPClientTestCase, self).setUp() self.http_client = SimpleAsyncHTTPClient(self.io_loop) def get_app(self): # callable objects to finish pending /trigger requests self.triggers = collections.deque() return Application([ url("/trigger", TriggerHandler, dict(queue=self.triggers, wake_callback=self.stop)), url("/chunk", ChunkHandler), url("/countdown/([0-9]+)", CountdownHandler, name="countdown"), url("/hang", HangHandler), url("/hello", HelloWorldHandler), url("/content_length", ContentLengthHandler), url("/head", HeadHandler), url("/options", OptionsHandler), url("/no_content", NoContentHandler), url("/303_post", SeeOther303PostHandler), url("/303_get", SeeOther303GetHandler), url("/host_echo", HostEchoHandler), ], gzip=True) def test_singleton(self): # Class "constructor" reuses objects on the same IOLoop self.assertTrue( SimpleAsyncHTTPClient(self.io_loop) is SimpleAsyncHTTPClient( self.io_loop)) # unless force_instance is used self.assertTrue( SimpleAsyncHTTPClient(self.io_loop) is not SimpleAsyncHTTPClient( self.io_loop, force_instance=True)) # different IOLoops use different objects io_loop2 = IOLoop() self.assertTrue( SimpleAsyncHTTPClient(self.io_loop) is not SimpleAsyncHTTPClient( io_loop2)) def test_connection_limit(self): client = SimpleAsyncHTTPClient(self.io_loop, max_clients=2, force_instance=True) self.assertEqual(client.max_clients, 2) seen = [] # Send 4 requests. Two can be sent immediately, while the others # will be queued for i in range(4): client.fetch(self.get_url("/trigger"), lambda response, i=i: (seen.append(i), self.stop())) self.wait(condition=lambda: len(self.triggers) == 2) self.assertEqual(len(client.queue), 2) # Finish the first two requests and let the next two through self.triggers.popleft()() self.triggers.popleft()() self.wait( condition=lambda: (len(self.triggers) == 2 and len(seen) == 2)) self.assertEqual(set(seen), set([0, 1])) self.assertEqual(len(client.queue), 0) # Finish all the pending requests self.triggers.popleft()() self.triggers.popleft()() self.wait(condition=lambda: len(seen) == 4) self.assertEqual(set(seen), set([0, 1, 2, 3])) self.assertEqual(len(self.triggers), 0) def test_redirect_connection_limit(self): # following redirects should not consume additional connections client = SimpleAsyncHTTPClient(self.io_loop, max_clients=1, force_instance=True) client.fetch(self.get_url('/countdown/3'), self.stop, max_redirects=3) response = self.wait() response.rethrow() def test_default_certificates_exist(self): open(_DEFAULT_CA_CERTS).close() def test_gzip(self): # All the tests in this file should be using gzip, but this test # ensures that it is in fact getting compressed. # Setting Accept-Encoding manually bypasses the client's # decompression so we can see the raw data. response = self.fetch("/chunk", use_gzip=False, headers={"Accept-Encoding": "gzip"}) self.assertEqual(response.headers["Content-Encoding"], "gzip") self.assertNotEqual(response.body, b("asdfqwer")) # Our test data gets bigger when gzipped. Oops. :) self.assertEqual(len(response.body), 34) f = gzip.GzipFile(mode="r", fileobj=response.buffer) self.assertEqual(f.read(), b("asdfqwer")) def test_max_redirects(self): response = self.fetch("/countdown/5", max_redirects=3) self.assertEqual(302, response.code) # We requested 5, followed three redirects for 4, 3, 2, then the last # unfollowed redirect is to 1. self.assertTrue(response.request.url.endswith("/countdown/5")) self.assertTrue(response.effective_url.endswith("/countdown/2")) self.assertTrue(response.headers["Location"].endswith("/countdown/1")) def test_header_reuse(self): # Apps may reuse a headers object if they are only passing in constant # headers like user-agent. The header object should not be modified. headers = HTTPHeaders({'User-Agent': 'Foo'}) self.fetch("/hello", headers=headers) self.assertEqual(list(headers.get_all()), [('User-Agent', 'Foo')]) def test_303_redirect(self): response = self.fetch("/303_post", method="POST", body="blah") self.assertEqual(200, response.code) self.assertTrue(response.request.url.endswith("/303_post")) self.assertTrue(response.effective_url.endswith("/303_get")) #request is the original request, is a POST still self.assertEqual("POST", response.request.method) def test_request_timeout(self): response = self.fetch('/trigger?wake=false', request_timeout=0.1) self.assertEqual(response.code, 599) self.assertTrue(0.099 < response.request_time < 0.11, response.request_time) self.assertEqual(str(response.error), "HTTP 599: Timeout") # trigger the hanging request to let it clean up after itself self.triggers.popleft()() def test_ipv6(self): if not socket.has_ipv6: # python compiled without ipv6 support, so skip this test return try: self.http_server.listen(self.get_http_port(), address='::1') except socket.gaierror, e: if e.args[0] == socket.EAI_ADDRFAMILY: # python supports ipv6, but it's not configured on the network # interface, so skip this test. return raise url = self.get_url("/hello").replace("localhost", "[::1]") # ipv6 is currently disabled by default and must be explicitly requested self.http_client.fetch(url, self.stop) response = self.wait() self.assertEqual(response.code, 599) self.http_client.fetch(url, self.stop, allow_ipv6=True) response = self.wait() self.assertEqual(response.body, b("Hello world!"))
def test_json_encode(self): # json deals with strings, not bytes, but our encoding function should # accept bytes as well as long as they are utf8. self.assertEqual(json_decode(json_encode("\u00e9")), "\u00e9") self.assertEqual(json_decode(json_encode(utf8("\u00e9"))), "\u00e9") self.assertRaises(UnicodeDecodeError, json_encode, b("\xe9"))
def test_options_request(self): response = self.fetch("/options", method="OPTIONS") self.assertEqual(response.code, 200) self.assertEqual(response.headers["content-length"], "2") self.assertEqual(response.headers["access-control-allow-origin"], "*") self.assertEqual(response.body, b("ok"))
def get(self): io_loop = self.request.connection.stream.io_loop client = AsyncHTTPClient(io_loop=io_loop) response = yield gen.Task(client.fetch, self.get_argument('url')) response.rethrow() self.finish(b("got response: ") + response.body)
def test_connection_close(self): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) s.connect(("localhost", self.get_http_port())) self.stream = IOStream(s, io_loop=self.io_loop) self.stream.write(b("GET / HTTP/1.0\r\n\r\n")) self.wait()
def test_task_handler(self): response = self.fetch('/task?url=%s' % url_escape(self.get_url('/sequence'))) self.assertEqual(response.body, b("got response: 123"))
def _challenge_response(self): sha1 = hashlib.sha1() sha1.update(tornado.escape.utf8( self.request.headers.get("Sec-Websocket-Key"))) sha1.update(b("258EAFA5-E914-47DA-95CA-C5AB0DC85B11")) # Magic value return tornado.escape.native_str(base64.b64encode(sha1.digest()))
def _oauth_get_user(self, access_token, callback): if access_token != dict(key=b('uiop'), secret=b('5678')): raise Exception("incorrect access token %r" % access_token) callback(dict(email='*****@*****.**'))
def post(self): assert self.request.body == b("blah") self.set_header("Location", "/303_get") self.set_status(303)
def close(self): """Closes the WebSocket connection.""" self._write_frame(True, 0x8, b("")) self._started_closing_handshake = True self._waiting = tornado.ioloop.IOLoop.instance().add_timeout(time.time() + 5, self._abort)
def test_static_url(self): response = self.fetch("/static_url/foo.txt") self.assertEqual(response.body, b("/static/foo.42.txt"))
def _on_connect(self, parsed, parsed_hostname): if self._timeout is not None: self.io_loop.remove_timeout(self._timeout) self._timeout = None if self.request.request_timeout: self._timeout = self.io_loop.add_timeout( self.start_time + self.request.request_timeout, self._on_timeout, monotonic=True) if (self.request.validate_cert and isinstance(self.stream, SSLIOStream)): match_hostname( self.stream.socket.getpeercert(), # ipv6 addresses are broken (in # parsed.hostname) until 2.7, here is # correctly parsed value calculated in # __init__ parsed_hostname) if (self.request.method not in self._SUPPORTED_METHODS and not self.request.allow_nonstandard_methods): raise KeyError("unknown method %s" % self.request.method) for key in ('network_interface', 'proxy_host', 'proxy_port', 'proxy_username', 'proxy_password'): if getattr(self.request, key, None): raise NotImplementedError('%s not supported' % key) if "Connection" not in self.request.headers: self.request.headers["Connection"] = "close" if "Host" not in self.request.headers: if '@' in parsed.netloc: self.request.headers["Host"] = parsed.netloc.rpartition( '@')[-1] else: self.request.headers["Host"] = parsed.netloc username, password = None, None if parsed.username is not None: username, password = parsed.username, parsed.password elif self.request.auth_username is not None: username = self.request.auth_username password = self.request.auth_password or '' if username is not None: auth = utf8(username) + b(":") + utf8(password) self.request.headers["Authorization"] = (b("Basic ") + base64.b64encode(auth)) if self.request.user_agent: self.request.headers["User-Agent"] = self.request.user_agent if not self.request.allow_nonstandard_methods: if self.request.method in ("POST", "PATCH", "PUT"): assert self.request.body is not None else: assert self.request.body is None if self.request.body is not None: self.request.headers["Content-Length"] = str(len( self.request.body)) if (self.request.method == "POST" and "Content-Type" not in self.request.headers): self.request.headers[ "Content-Type"] = "application/x-www-form-urlencoded" if self.request.use_gzip: self.request.headers["Accept-Encoding"] = "gzip" req_path = ((parsed.path or '/') + (('?' + parsed.query) if parsed.query else '')) request_lines = [ utf8("%s %s HTTP/1.1" % (self.request.method, req_path)) ] for k, v in self.request.headers.get_all(): line = utf8(k) + b(": ") + utf8(v) if b('\n') in line: raise ValueError('Newline in header: ' + repr(line)) request_lines.append(line) self.stream.write(b("\r\n").join(request_lines) + b("\r\n\r\n")) if self.request.body is not None: self.stream.write(self.request.body) self.stream.read_until_regex(b("\r?\n\r?\n"), self._on_headers)
def test_named_urlspec_groups(self): response = self.fetch("/str/foo") self.assertEqual(response.body, b("foo")) response = self.fetch("/unicode/bar") self.assertEqual(response.body, b("bar"))
def get(self): # Try setting cookies with different argument types # to ensure that everything gets encoded correctly self.set_cookie("str", "asdf") self.set_cookie("unicode", u"qwer") self.set_cookie("bytes", b("zxcv"))
def test_serve(self): response = self.fetch("/static/foo.42.txt") self.assertEqual(response.body, b("bar"))
def test_uimodule_unescaped(self): response = self.fetch("/linkify") self.assertEqual( response.body, b("<a href=\"http://example.com\">http://example.com</a>"))
def test_static_url(self): response = self.fetch("/static_url/robots.txt") self.assertEqual(response.body, b("/static/robots.txt?v=f71d2"))