def test_setdefault(self): headers = HTTPHeaders() headers['foo'] = 'bar' # If a value is present, setdefault returns it without changes. self.assertEqual(headers.setdefault('foo', 'baz'), 'bar') self.assertEqual(headers['foo'], 'bar') # If a value is not present, setdefault sets it for future use. self.assertEqual(headers.setdefault('quux', 'xyzzy'), 'xyzzy') self.assertEqual(headers['quux'], 'xyzzy') self.assertEqual(sorted(headers.get_all()), [('Foo', 'bar'), ('Quux', 'xyzzy')])
def finish(self): response_body = b"OK" self.connection.write_headers( ResponseStartLine("HTTP/1.1", 200, "OK"), HTTPHeaders({"Content-Length": str(len(response_body))})) self.connection.write(response_body) self.connection.finish()
def finish(self): response_body = utf8(json_encode(self.chunk_lengths)) self.connection.write_headers( ResponseStartLine('HTTP/1.1', 200, 'OK'), HTTPHeaders({'Content-Length': str(len(response_body))})) self.connection.write(response_body) self.connection.finish()
def test_100_continue(self): # Run through a 100-continue interaction by hand: # When given Expect: 100-continue, we get a 100 response after the # headers, and then the real response after the body. stream = IOStream(socket.socket(), io_loop=self.io_loop) stream.connect(("127.0.0.1", self.get_http_port()), callback=self.stop) self.wait() stream.write(b"\r\n".join([ b"POST /hello HTTP/1.1", b"Content-Length: 1024", b"Expect: 100-continue", b"Connection: close", b"\r\n" ]), callback=self.stop) self.wait() stream.read_until(b"\r\n\r\n", self.stop) data = self.wait() self.assertTrue(data.startswith(b"HTTP/1.1 100 "), data) stream.write(b"a" * 1024) stream.read_until(b"\r\n", self.stop) first_line = self.wait() self.assertTrue(first_line.startswith(b"HTTP/1.1 200"), first_line) stream.read_until(b"\r\n\r\n", self.stop) header_data = self.wait() headers = HTTPHeaders.parse(native_str(header_data.decode('latin1'))) stream.read_bytes(int(headers["Content-Length"]), self.stop) body = self.wait() self.assertEqual(body, b"Got 1024 bytes in POST") stream.close()
def read_headers(self): self.stream.read_until(b'\r\n', self.stop) first_line = self.wait() self.assertTrue(first_line.startswith(b'HTTP/1.1 200'), first_line) self.stream.read_until(b'\r\n\r\n', self.stop) header_bytes = self.wait() headers = HTTPHeaders.parse(header_bytes.decode('latin1')) return headers
def test_pickle_roundtrip(self): headers = HTTPHeaders() headers.add('Set-Cookie', 'a=b') headers.add('Set-Cookie', 'c=d') headers.add('Content-Type', 'text/html') pickled = pickle.dumps(headers) unpickled = pickle.loads(pickled) self.assertEqual(sorted(headers.get_all()), sorted(unpickled.get_all())) self.assertEqual(sorted(headers.items()), sorted(unpickled.items()))
def test_string(self): headers = HTTPHeaders() headers.add("Foo", "1") headers.add("Foo", "2") headers.add("Foo", "3") headers2 = HTTPHeaders.parse(str(headers)) self.assertEquals(headers, headers2)
def respond_100(self, request): self.http1 = request.version.startswith('HTTP/1.') if not self.http1: request.connection.write_headers(ResponseStartLine('', 200, 'OK'), HTTPHeaders()) request.connection.finish() return self.request = request self.request.connection.stream.write(b"HTTP/1.1 100 CONTINUE\r\n\r\n", self.respond_200)
def test_unix_socket(self): self.stream.write(b"GET /hello HTTP/1.0\r\n\r\n") self.stream.read_until(b"\r\n", self.stop) response = self.wait() self.assertEqual(response, b"HTTP/1.1 200 OK\r\n") self.stream.read_until(b"\r\n\r\n", self.stop) headers = HTTPHeaders.parse(self.wait().decode('latin1')) self.stream.read_bytes(int(headers["Content-Length"]), self.stop) body = self.wait() self.assertEqual(body, b"Hello world")
def test_optional_cr(self): # Both CRLF and LF should be accepted as separators. CR should not be # part of the data when followed by LF, but it is a normal char # otherwise (or should bare CR be an error?) headers = HTTPHeaders.parse( 'CRLF: crlf\r\nLF: lf\nCR: cr\rMore: more\r\n') self.assertEqual(sorted(headers.get_all()), [ ('Cr', 'cr\rMore: more'), ('Crlf', 'crlf'), ('Lf', 'lf'), ])
def handle_request(request): self.http1 = request.version.startswith("HTTP/1.") if not self.http1: # This test will be skipped if we're using HTTP/2, # so just close it out cleanly using the modern interface. request.connection.write_headers( ResponseStartLine('', 200, 'OK'), HTTPHeaders()) request.connection.finish() return message = b"Hello world" request.write( utf8("HTTP/1.1 200 OK\r\n" "Content-Length: %d\r\n\r\n" % len(message))) request.write(message) request.finish()
def test_future_interface(self): """Basic test of IOStream's ability to return Futures.""" stream = self._make_client_iostream() connect_result = yield stream.connect( ("127.0.0.1", self.get_http_port())) self.assertIs(connect_result, stream) yield stream.write(b"GET / HTTP/1.0\r\n\r\n") first_line = yield stream.read_until(b"\r\n") self.assertEqual(first_line, b"HTTP/1.1 200 OK\r\n") # callback=None is equivalent to no callback. header_data = yield stream.read_until(b"\r\n\r\n", callback=None) headers = HTTPHeaders.parse(header_data.decode('latin1')) content_length = int(headers['Content-Length']) body = yield stream.read_bytes(content_length) self.assertEqual(body, b'Hello') stream.close()
def test_copy(self): all_pairs = [('A', '1'), ('A', '2'), ('B', 'c')] h1 = HTTPHeaders() for k, v in all_pairs: h1.add(k, v) h2 = h1.copy() h3 = copy.copy(h1) h4 = copy.deepcopy(h1) for headers in [h1, h2, h3, h4]: # All the copies are identical, no matter how they were # constructed. self.assertEqual(list(sorted(headers.get_all())), all_pairs) for headers in [h2, h3, h4]: # Neither the dict or its member lists are reused. self.assertIsNot(headers, h1) self.assertIsNot(headers.get_list('A'), h1.get_list('A'))
def test_unicode_newlines(self): # Ensure that only \r\n is recognized as a header separator, and not # the other newline-like unicode characters. # Characters that are likely to be problematic can be found in # http://unicode.org/standard/reports/tr13/tr13-5.html # and cpython's unicodeobject.c (which defines the implementation # of unicode_type.splitlines(), and uses a different list than TR13). newlines = [ u'\u001b', # VERTICAL TAB u'\u001c', # FILE SEPARATOR u'\u001d', # GROUP SEPARATOR u'\u001e', # RECORD SEPARATOR u'\u0085', # NEXT LINE u'\u2028', # LINE SEPARATOR u'\u2029', # PARAGRAPH SEPARATOR ] for newline in newlines: # Try the utf8 and latin1 representations of each newline for encoding in ['utf8', 'latin1']: try: try: encoded = newline.encode(encoding) except UnicodeEncodeError: # Some chars cannot be represented in latin1 continue data = b'Cookie: foo=' + encoded + b'bar' # parse() wants a native_str, so decode through latin1 # in the same way the real parser does. headers = HTTPHeaders.parse( native_str(data.decode('latin1'))) expected = [ ('Cookie', 'foo=' + native_str(encoded.decode('latin1')) + 'bar') ] self.assertEqual(expected, list(headers.get_all())) except Exception: gen_log.warning("failed while trying %r in %s", newline, encoding) raise
def respond_204(self, request): self.http1 = request.version.startswith('HTTP/1.') if not self.http1: # Close the request cleanly in HTTP/2; it will be skipped anyway. request.connection.write_headers(ResponseStartLine('', 200, 'OK'), HTTPHeaders()) request.connection.finish() return # A 204 response never has a body, even if doesn't have a content-length # (which would otherwise mean read-until-close). We simulate here a # server that sends no content length and does not close the connection. # # Tests of a 204 response with no Content-Length header are included # in SimpleHTTPClientTestMixin. stream = request.connection.detach() stream.write(b"HTTP/1.1 204 No content\r\n") if request.arguments.get("error", [False])[-1]: stream.write(b"Content-Length: 5\r\n") else: stream.write(b"Content-Length: 0\r\n") stream.write(b"\r\n") stream.close()
def test_multi_line(self): # Lines beginning with whitespace are appended to the previous line # with any leading whitespace replaced by a single space. # Note that while multi-line headers are a part of the HTTP spec, # their use is strongly discouraged. data = """\ Foo: bar baz Asdf: qwer \tzxcv Foo: even more lines """.replace("\n", "\r\n") headers = HTTPHeaders.parse(data) self.assertEqual(headers["asdf"], "qwer zxcv") self.assertEqual(headers.get_list("asdf"), ["qwer zxcv"]) self.assertEqual(headers["Foo"], "bar baz,even more lines") self.assertEqual(headers.get_list("foo"), ["bar baz", "even more lines"]) self.assertEqual(sorted(list(headers.get_all())), [("Asdf", "qwer zxcv"), ("Foo", "bar baz"), ("Foo", "even more lines")])
def finish(self): self.connection.write_headers( ResponseStartLine("HTTP/1.1", 200, "OK"), HTTPHeaders({"Content-Length": "2"}), b"OK") self.connection.finish()
def test_header_reuse(self): # Apps may reuse a headers object if they are only passing in constant # headers like user-agent. The header object should not be modified. headers = HTTPHeaders({'User-Agent': 'Foo'}) self.fetch("/hello", headers=headers) self.assertEqual(list(headers.get_all()), [('User-Agent', 'Foo')])