def test_large_body_buffered_chunked(self): # This test is flaky on windows for unknown reasons. with ExpectLog(gen_log, '.*chunked body too large'): response = self.fetch('/buffered', method='PUT', body_producer=lambda write: write(b'a' * 10240)) self.assertEqual(response.code, 400)
def test_twitter_show_user_error(self): with ExpectLog(gen_log, 'Error response HTTP 500'): response = self.fetch('/twitter/client/show_user?name=error') self.assertEqual(response.code, 500) self.assertEqual(response.body, b'error from twitter request')
def test_exception_logging(self): """Uncaught exceptions get logged by the IOLoop.""" self.io_loop.add_callback(lambda: 1 / 0) self.io_loop.add_callback(self.stop) with ExpectLog(app_log, "Exception in callback"): self.wait()
def test_coroutine_exception_handler(self): # Make sure we get an error and not a timeout with ExpectLog(app_log, "Uncaught exception GET /coroutine_exception"): response = self.fetch('/coroutine_exception') self.assertEqual(500, response.code)
def test_malformed_headers(self): with ExpectLog(gen_log, '.*Malformed HTTP headers'): self.stream.write(b'GET / HTTP/1.0\r\nasdf\r\n\r\n') self.io_loop.add_timeout(datetime.timedelta(seconds=0.05), self.stop) self.wait()
def test_ssl_context_handshake_fail(self): with ExpectLog(gen_log, "SSL Error|Uncaught exception"): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED resp = self.fetch("/hello", ssl_options=ctx) self.assertRaises(ssl.SSLError, resp.rethrow)
def test_large_body(self): with ExpectLog(gen_log, "Malformed HTTP message from None: Content-Length too long"): response = self.fetch('/large') self.assertEqual(response.code, 599)
def test_large_headers(self): with ExpectLog(gen_log, "Unsatisfiable read"): with self.assertRaises(UnsatisfiableReadError): self.fetch('/large', raise_error=True)
def test_large_body(self): with ExpectLog( gen_log, "Malformed HTTP message from None: Content-Length too long"): with self.assertRaises(HTTPStreamClosedError): self.fetch('/large', raise_error=True)
def test_handler_deleted(self): # A warning is logged if the referred handler has been deleted. del self.handler with ExpectLog('', self.expected_log, required=True): self.wrapped('hello') self.assertEqual([], self.messages)
def test_large_headers(self): with ExpectLog(gen_log, "Unsatisfiable read"): response = self.fetch("/", headers={'X-Filler': 'a' * 1000}) self.assertEqual(response.code, 599)
def test_invalid_type(self): # If the resulting object is not a dict-like object, a warning is # logged and None is returned. expected_log = 'JSON decoder: message is not a dict: \'"not-a-dict"\'' with ExpectLog('', expected_log, required=True): self.assertIsNone(utils.json_decode_dict('"not-a-dict"'))
def test_invalid_json(self): # If the message is not a valid JSON string, a warning is logged and # None is returned. expected_log = "JSON decoder: message is not valid JSON: 'not-json'" with ExpectLog('', expected_log, required=True): self.assertIsNone(utils.json_decode_dict('not-json'))
def test_large_body_streaming_chunked(self): with ExpectLog(gen_log, '.*chunked body too large'): response = self.fetch('/streaming', method='PUT', body_producer=lambda write: write(b'a' * 10240)) self.assertEqual(response.code, 400)
def test_large_body_buffered(self): with ExpectLog(gen_log, '.*Content-Length too long'): response = self.fetch('/buffered', method='PUT', body=b'a' * 10240) self.assertEqual(response.code, 599)
def test_http_error(self): # HTTPErrors are logged as warnings with no stack trace. # TODO: extend ExpectLog to test this more precisely with ExpectLog(gen_log, '.*no longer here'): response = self.fetch('/?exc=http') self.assertEqual(response.code, 410)
def test_ssl_options_handshake_fail(self): with ExpectLog(gen_log, "SSL Error|Uncaught exception", required=False): resp = self.fetch( "/hello", ssl_options=dict(cert_reqs=ssl.CERT_REQUIRED)) self.assertRaises(ssl.SSLError, resp.rethrow)
def test_unknown_error(self): # Unknown errors are logged as errors with a stack trace. with ExpectLog(app_log, 'Uncaught exception'): response = self.fetch('/?exc=zero') self.assertEqual(response.code, 500)
def test_large_headers(self): with ExpectLog(gen_log, "Unsatisfiable read"): response = self.fetch('/large') self.assertEqual(response.code, 599)
def test_main(self, mock_app): """Start up the server with the default options.""" with ExpectLog('', 'Starting server on localhost:8080'): main(self.io_loop) mock_app.assert_called_with(**self.get_app_defaults())
def test_chunked_with_content_length(self): # Make sure the invalid headers are detected with ExpectLog(gen_log, ("Malformed HTTP message from None: Response " "with both Transfer-Encoding and Content-Length")): response = self.fetch('/chunkwithcl') self.assertEqual(response.code, 599)
def test_failed_write_error(self): with ExpectLog(app_log, "Uncaught exception"): response = self.fetch("/failed_write_error") self.assertEqual(response.code, 500) self.assertEqual(b(""), response.body)
def test_large_body_streaming(self): with ExpectLog(gen_log, '.*Content-Length too long'): response = self.fetch('/streaming', method='PUT', body=b'a' * 10240) self.assertEqual(response.code, 400)
def test_static_url(self): with ExpectLog(gen_log, "Could not open static file", required=False): response = self.fetch("/static_url/foo.txt") self.assertEqual(response.body, b("/static/foo.42.txt"))
def test_multi_process(self): # This test can't work on twisted because we use the global reactor # and have no way to get it back into a sane state after the fork. skip_if_twisted() with ExpectLog( gen_log, "(Starting .* processes|child .* exited|uncaught exception)"): self.assertFalse(IOLoop.initialized()) sock, port = bind_unused_port() def get_url(path): return "http://127.0.0.1:%d%s" % (port, path) # ensure that none of these processes live too long signal.alarm(5) # master process try: id = fork_processes(3, max_restarts=3) self.assertTrue(id is not None) signal.alarm(5) # child processes except SystemExit as e: # if we exit cleanly from fork_processes, all the child processes # finished with status 0 self.assertEqual(e.code, 0) self.assertTrue(task_id() is None) sock.close() return try: if id in (0, 1): self.assertEqual(id, task_id()) server = HTTPServer(self.get_app()) server.add_sockets([sock]) IOLoop.current().start() elif id == 2: self.assertEqual(id, task_id()) sock.close() # Always use SimpleAsyncHTTPClient here; the curl # version appears to get confused sometimes if the # connection gets closed before it's had a chance to # switch from writing mode to reading mode. client = HTTPClient(SimpleAsyncHTTPClient) def fetch(url, fail_ok=False): try: return client.fetch(get_url(url)) except HTTPError as e: if not (fail_ok and e.code == 599): raise # Make two processes exit abnormally fetch("/?exit=2", fail_ok=True) fetch("/?exit=3", fail_ok=True) # They've been restarted, so a new fetch will work int(fetch("/").body) # Now the same with signals # Disabled because on the mac a process dying with a signal # can trigger an "Application exited abnormally; send error # report to Apple?" prompt. # fetch("/?signal=%d" % signal.SIGTERM, fail_ok=True) # fetch("/?signal=%d" % signal.SIGABRT, fail_ok=True) # int(fetch("/").body) # Now kill them normally so they won't be restarted fetch("/?exit=0", fail_ok=True) # One process left; watch it's pid change pid = int(fetch("/").body) fetch("/?exit=4", fail_ok=True) pid2 = int(fetch("/").body) self.assertNotEqual(pid, pid2) # Kill the last one so we shut down cleanly fetch("/?exit=0", fail_ok=True) os._exit(0) except Exception: logging.error("exception in child process %d", id, exc_info=True) raise
def test_error_in_on_message(self): ws = yield self.ws_connect("/error_in_on_message") ws.write_message("hello") with ExpectLog(app_log, "Uncaught exception"): response = yield ws.read_message() self.assertIs(response, None)
def test_large_body_streaming(self): with ExpectLog(gen_log, ".*Content-Length too long", level=logging.INFO): response = self.fetch("/streaming", method="PUT", body=b"a" * 10240) self.assertEqual(response.code, 400)
def test_error_in_async_open(self): with ExpectLog(app_log, "Uncaught exception"): ws = yield self.ws_connect("/error_in_async_open") res = yield ws.read_message() self.assertIsNone(res)
def test_stack_context(self): with ExpectLog(app_log, "Uncaught exception GET /"): self.http_client.fetch(self.get_url('/'), self.handle_response) self.wait() self.assertEqual(self.response.code, 500) self.assertTrue(b'got expected exception' in self.response.body)
class SimpleHTTPClientTestCase(AsyncHTTPTestCase): def setUp(self): super(SimpleHTTPClientTestCase, self).setUp() self.http_client = SimpleAsyncHTTPClient(self.io_loop) def get_app(self): # callable objects to finish pending /trigger requests self.triggers = collections.deque() return Application([ url("/trigger", TriggerHandler, dict(queue=self.triggers, wake_callback=self.stop)), url("/chunk", ChunkHandler), url("/countdown/([0-9]+)", CountdownHandler, name="countdown"), url("/hang", HangHandler), url("/hello", HelloWorldHandler), url("/content_length", ContentLengthHandler), url("/head", HeadHandler), url("/options", OptionsHandler), url("/no_content", NoContentHandler), url("/303_post", SeeOther303PostHandler), url("/303_get", SeeOther303GetHandler), url("/host_echo", HostEchoHandler), ], gzip=True) def test_singleton(self): # Class "constructor" reuses objects on the same IOLoop self.assertTrue( SimpleAsyncHTTPClient(self.io_loop) is SimpleAsyncHTTPClient( self.io_loop)) # unless force_instance is used self.assertTrue( SimpleAsyncHTTPClient(self.io_loop) is not SimpleAsyncHTTPClient( self.io_loop, force_instance=True)) # different IOLoops use different objects io_loop2 = IOLoop() self.assertTrue( SimpleAsyncHTTPClient(self.io_loop) is not SimpleAsyncHTTPClient( io_loop2)) def test_connection_limit(self): client = SimpleAsyncHTTPClient(self.io_loop, max_clients=2, force_instance=True) self.assertEqual(client.max_clients, 2) seen = [] # Send 4 requests. Two can be sent immediately, while the others # will be queued for i in range(4): client.fetch(self.get_url("/trigger"), lambda response, i=i: (seen.append(i), self.stop())) self.wait(condition=lambda: len(self.triggers) == 2) self.assertEqual(len(client.queue), 2) # Finish the first two requests and let the next two through self.triggers.popleft()() self.triggers.popleft()() self.wait( condition=lambda: (len(self.triggers) == 2 and len(seen) == 2)) self.assertEqual(set(seen), set([0, 1])) self.assertEqual(len(client.queue), 0) # Finish all the pending requests self.triggers.popleft()() self.triggers.popleft()() self.wait(condition=lambda: len(seen) == 4) self.assertEqual(set(seen), set([0, 1, 2, 3])) self.assertEqual(len(self.triggers), 0) def test_redirect_connection_limit(self): # following redirects should not consume additional connections client = SimpleAsyncHTTPClient(self.io_loop, max_clients=1, force_instance=True) client.fetch(self.get_url('/countdown/3'), self.stop, max_redirects=3) response = self.wait() response.rethrow() def test_default_certificates_exist(self): open(_DEFAULT_CA_CERTS).close() def test_gzip(self): # All the tests in this file should be using gzip, but this test # ensures that it is in fact getting compressed. # Setting Accept-Encoding manually bypasses the client's # decompression so we can see the raw data. response = self.fetch("/chunk", use_gzip=False, headers={"Accept-Encoding": "gzip"}) self.assertEqual(response.headers["Content-Encoding"], "gzip") self.assertNotEqual(response.body, b("asdfqwer")) # Our test data gets bigger when gzipped. Oops. :) self.assertEqual(len(response.body), 34) f = gzip.GzipFile(mode="r", fileobj=response.buffer) self.assertEqual(f.read(), b("asdfqwer")) def test_max_redirects(self): response = self.fetch("/countdown/5", max_redirects=3) self.assertEqual(302, response.code) # We requested 5, followed three redirects for 4, 3, 2, then the last # unfollowed redirect is to 1. self.assertTrue(response.request.url.endswith("/countdown/5")) self.assertTrue(response.effective_url.endswith("/countdown/2")) self.assertTrue(response.headers["Location"].endswith("/countdown/1")) def test_header_reuse(self): # Apps may reuse a headers object if they are only passing in constant # headers like user-agent. The header object should not be modified. headers = HTTPHeaders({'User-Agent': 'Foo'}) self.fetch("/hello", headers=headers) self.assertEqual(list(headers.get_all()), [('User-Agent', 'Foo')]) def test_303_redirect(self): response = self.fetch("/303_post", method="POST", body="blah") self.assertEqual(200, response.code) self.assertTrue(response.request.url.endswith("/303_post")) self.assertTrue(response.effective_url.endswith("/303_get")) #request is the original request, is a POST still self.assertEqual("POST", response.request.method) def test_request_timeout(self): with ExpectLog(gen_log, "uncaught exception"): response = self.fetch('/trigger?wake=false', request_timeout=0.1) self.assertEqual(response.code, 599) self.assertTrue(0.099 < response.request_time < 0.12, response.request_time) self.assertEqual(str(response.error), "HTTP 599: Timeout") # trigger the hanging request to let it clean up after itself self.triggers.popleft()() @unittest.skipIf(not socket.has_ipv6, 'ipv6 support not present') def test_ipv6(self): try: self.http_server.listen(self.get_http_port(), address='::1') except socket.gaierror, e: if e.args[0] == socket.EAI_ADDRFAMILY: # python supports ipv6, but it's not configured on the network # interface, so skip this test. return raise url = self.get_url("/hello").replace("localhost", "[::1]") # ipv6 is currently disabled by default and must be explicitly requested with ExpectLog(gen_log, "uncaught exception"): self.http_client.fetch(url, self.stop) response = self.wait() self.assertEqual(response.code, 599) self.http_client.fetch(url, self.stop, allow_ipv6=True) response = self.wait() self.assertEqual(response.body, b("Hello world!"))