class SocketConnection: def __init__(self,send_str,callback): self._stream = None self._host = 'localhost' self._port = SOCKET_PORT self._send_str = send_str self._callback = callback def _get_stream(self): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) self._stream = IOStream(sock) def start_work(self): self._get_stream() self._stream.connect((self._host, self._port),self._send) # self._stream.read_bytes(2, self._finish) def _send(self): #print self._req._finished #print 'sadsa',self._send_str self._stream.write(self._send_str) #self._stream.read_until(self.EOF, self._finish) self._stream.read_until_close(self._finish) #print 'asddsaasd' def _finish(self,data): self._stream.close() self._callback(data.strip())
def test_100_continue(self): # Run through a 100-continue interaction by hand: # When given Expect: 100-continue, we get a 100 response after the # headers, and then the real response after the body. stream = IOStream(socket.socket()) stream.connect(("127.0.0.1", self.get_http_port()), callback=self.stop) self.wait() stream.write(b"\r\n".join([b"POST /hello HTTP/1.1", b"Content-Length: 1024", b"Expect: 100-continue", b"Connection: close", b"\r\n"]), callback=self.stop) self.wait() stream.read_until(b"\r\n\r\n", self.stop) data = self.wait() self.assertTrue(data.startswith(b"HTTP/1.1 100 "), data) stream.write(b"a" * 1024) stream.read_until(b"\r\n", self.stop) first_line = self.wait() self.assertTrue(first_line.startswith(b"HTTP/1.1 200"), first_line) stream.read_until(b"\r\n\r\n", self.stop) header_data = self.wait() headers = HTTPHeaders.parse(native_str(header_data.decode('latin1'))) stream.read_bytes(int(headers["Content-Length"]), self.stop) body = self.wait() self.assertEqual(body, b"Got 1024 bytes in POST") stream.close()
def test_empty_request(self): stream = IOStream(socket.socket(), io_loop=self.io_loop) stream.connect(("localhost", self.get_http_port()), self.stop) self.wait() stream.close() self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop) self.wait()
def test_message_response(self): # handle_stream may be a coroutine and any exception in its # Future will be logged. server = client = None try: sock, port = bind_unused_port() sock2, port2 = bind_unused_port() with NullContext(): server = StatusServer() notify_server = NotifyServer() notify_server.add_socket(sock2) server.notify_server = notify_server server.add_socket(sock) client = IOStream(socket.socket()) yield client.connect(('localhost', port)) yield client.write(msg1) results = yield client.read_bytes(4) assert results == b'\x11\x00\x01\x10' finally: if server is not None: server.stop() if client is not None: client.close()
class ForwardConnection(object): def __init__(self, remote_address, stream, address, headers): self.remote_address = remote_address self.stream = stream self.address = address self.headers = headers sock = socket.socket() self.remote_stream = IOStream(sock) self.remote_stream.connect(self.remote_address, self._on_remote_connected) self.remote_stream.set_close_callback(self._on_close) def _on_remote_write_complete(self): logging.info('send request to %s', self.remote_address) self.remote_stream.read_until_close(self._on_remote_read_close) def _on_remote_connected(self): logging.info('forward %r to %r', self.address, self.remote_address) self.remote_stream.write(self.headers, self._on_remote_write_complete) def _on_remote_read_close(self, data): self.stream.write(data, self.stream.close) def _on_close(self): logging.info('remote quit %s', self.remote_address) self.remote_stream.close()
class _UDPConnection(object): def __init__(self, io_loop, client, request, release_callback, final_callback, max_buffer_size): self.start_time = time.time() self.io_loop = io_loop self.client = client self.request = request self.release_callback = release_callback self.final_callback = final_callback address_info = socket.getaddrinfo(request.address, request.port, socket.AF_INET, socket.SOCK_DGRAM, 0, 0) af, socket_type, proto, _, socket_address = address_info[0] self.stream = IOStream(socket.socket(af, socket_type, proto), io_loop=self.io_loop, max_buffer_size=max_buffer_size) self.stream.connect(socket_address, self._on_connect) def _on_connect(self): self.stream.write(self.request.data) # self.stream.read_bytes(65536, self._on_response) self.stream.read_until('}}', self._on_response) # print("asdfsfeiwjef") def _on_response(self, data): if self.release_callback is not None: release_callback = self.release_callback self.release_callback = None release_callback() self.stream.close() if self.final_callback is not None: final_callback = self.final_callback self.final_callback = None final_callback(data)
def test_handle_stream_coroutine_logging(self): # handle_stream may be a coroutine and any exception in its # Future will be logged. class TestServer(TCPServer): @gen.coroutine def handle_stream(self, stream, address): yield gen.moment stream.close() 1 / 0 server = client = None try: sock, port = bind_unused_port() with NullContext(): server = TestServer() server.add_socket(sock) client = IOStream(socket.socket()) with ExpectLog(app_log, "Exception in callback"): yield client.connect(('localhost', port)) yield client.read_until_close() yield gen.moment finally: if server is not None: server.stop() if client is not None: client.close()
def test_100_continue(self): # Run through a 100-continue interaction by hand: # When given Expect: 100-continue, we get a 100 response after the # headers, and then the real response after the body. stream = IOStream(socket.socket()) yield stream.connect(("127.0.0.1", self.get_http_port())) yield stream.write( b"\r\n".join( [ b"POST /hello HTTP/1.1", b"Content-Length: 1024", b"Expect: 100-continue", b"Connection: close", b"\r\n", ] ) ) data = yield stream.read_until(b"\r\n\r\n") self.assertTrue(data.startswith(b"HTTP/1.1 100 "), data) stream.write(b"a" * 1024) first_line = yield stream.read_until(b"\r\n") self.assertTrue(first_line.startswith(b"HTTP/1.1 200"), first_line) header_data = yield stream.read_until(b"\r\n\r\n") headers = HTTPHeaders.parse(native_str(header_data.decode("latin1"))) body = yield stream.read_bytes(int(headers["Content-Length"])) self.assertEqual(body, b"Got 1024 bytes in POST") stream.close()
def test_empty_request(self): stream = IOStream(socket.socket(), io_loop=self.io_loop) stream.connect(('localhost', self.get_http_port()), self.stop) self.wait() stream.close() self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop) self.wait()
async def _send_tplink_command(self, command): out_cmd = {} if command in ["on", "off"]: out_cmd = {'system': {'set_relay_state': {'state': int(command == "on")}}} elif command == "info": out_cmd = {'system': {'get_sysinfo': {}}} else: raise self.server.error(f"Invalid tplink command: {command}") s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) stream = IOStream(s) try: await stream.connect((self.addr, self.port)) await stream.write(self._encrypt(out_cmd)) data = await stream.read_bytes(2048, partial=True) length = struct.unpack(">I", data[:4])[0] data = data[4:] retries = 5 remaining = length - len(data) while remaining and retries: data += await stream.read_bytes(remaining) remaining = length - len(data) retries -= 1 if not retries: raise self.server.error("Unable to read tplink packet") except Exception: msg = f"Error sending tplink command: {command}" logging.exception(msg) raise self.server.error(msg) finally: stream.close() return json.loads(self._decrypt(data))
def test_100_continue(self): # Run through a 100-continue interaction by hand: # When given Expect: 100-continue, we get a 100 response after the # headers, and then the real response after the body. stream = IOStream(socket.socket(), io_loop=self.io_loop) stream.connect(("localhost", self.get_http_port()), callback=self.stop) self.wait() stream.write(b("\r\n").join([ b("POST /hello HTTP/1.1"), b("Content-Length: 1024"), b("Expect: 100-continue"), b("Connection: close"), b("\r\n") ]), callback=self.stop) self.wait() stream.read_until(b("\r\n\r\n"), self.stop) data = self.wait() self.assertTrue(data.startswith(b("HTTP/1.1 100 ")), data) stream.write(b("a") * 1024) stream.read_until(b("\r\n"), self.stop) first_line = self.wait() self.assertTrue(first_line.startswith(b("HTTP/1.1 200")), first_line) stream.read_until(b("\r\n\r\n"), self.stop) header_data = self.wait() headers = HTTPHeaders.parse(native_str(header_data.decode('latin1'))) stream.read_bytes(int(headers["Content-Length"]), self.stop) body = self.wait() self.assertEqual(body, b("Got 1024 bytes in POST")) stream.close()
async def aiter_content(self): """ Ref: - https://stackoverflow.com/questions/32310951/how-to-get-the-underlying-socket-when-using-python-requests - https://www.tornadoweb.org/en/stable/iostream.html - https://realpython.com/async-io-python/#other-features-async-for-and-async-generators-comprehensions """ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) stream = IOStream(s) try: url = urllib.request.urlparse(self._url) host, port = url.netloc.split(":") port = int(port) path = url.path or "/" await stream.connect((host, port)) await stream.write( "GET {path} HTTP/1.0\r\nHost: {netloc}\r\n\r\n".format( path=path, netloc=url.netloc).encode('utf-8')) header_data = await stream.read_until(b"\r\n\r\n") while True: line = await stream.read_until(b'\r\n') if not line.startswith(b"Content-Length"): continue length = int(line.decode('utf-8').split(": ")[1]) await stream.read_until(b"\r\n") yield await stream.read_bytes(length) finally: stream.close()
class ForwardConnection(object): def __init__(self, remote_address, stream, address): self.remote_address = remote_address self.stream = stream self.address = address sock = socket.socket() self.remote_stream = IOStream(sock) self.remote_stream.connect(self.remote_address, self._on_remote_connected) def _on_remote_connected(self): logging.info("forward %r to %r", self.address, self.remote_address) self.remote_stream.read_until_close(self._on_remote_read_close, self.stream.write) self.stream.read_until_close(self._on_read_close, self.remote_stream.write) def _on_remote_read_close(self, data): if self.stream.writing(): self.stream.write(data, self.stream.close) else: self.stream.close() def _on_read_close(self, data): if self.remote_stream.writing(): self.remote_stream.write(data, self.remote_stream.close) else: self.remote_stream.close()
class HTTPServerRawTest(AsyncHTTPTestCase): def get_app(self): return Application([("/echo", EchoHandler)]) def setUp(self): super(HTTPServerRawTest, self).setUp() self.stream = IOStream(socket.socket()) self.stream.connect(("localhost", self.get_http_port()), self.stop) self.wait() def tearDown(self): self.stream.close() super(HTTPServerRawTest, self).tearDown() def test_empty_request(self): self.stream.close() self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop) self.wait() def test_malformed_first_line(self): with ExpectLog(gen_log, ".*Malformed HTTP request line"): self.stream.write(b"asdf\r\n\r\n") # TODO: need an async version of ExpectLog so we don't need # hard-coded timeouts here. self.io_loop.add_timeout(datetime.timedelta(seconds=0.01), self.stop) self.wait() def test_malformed_headers(self): with ExpectLog(gen_log, ".*Malformed HTTP headers"): self.stream.write(b"GET / HTTP/1.0\r\nasdf\r\n\r\n") self.io_loop.add_timeout(datetime.timedelta(seconds=0.01), self.stop) self.wait()
class ManualCapClient(BaseCapClient): def capitalize(self, request_data, callback=None): logging.debug("capitalize") self.request_data = request_data self.stream = IOStream(socket.socket()) self.stream.connect(('127.0.0.1', self.port), callback=self.handle_connect) self.future = Future() if callback is not None: self.future.add_done_callback( stack_context.wrap(lambda future: callback(future.result()))) return self.future def handle_connect(self): logging.debug("handle_connect") self.stream.write(utf8(self.request_data + "\n")) self.stream.read_until(b'\n', callback=self.handle_read) def handle_read(self, data): logging.debug("handle_read") self.stream.close() try: self.future.set_result(self.process_response(data)) except CapError as e: self.future.set_exception(e)
async def handle_stream(self, stream: IOStream, address: Tuple[str, str]) -> None: """ :param stream: Client IOStream to read telemetry data from :type stream: IOStream :param address: The IP address and port on which a client connects to the server :type address: Tuple[str,str] :return: None """ try: self.log.info(f"Got Connection from {address[0]}:{address[1]}") while not stream.closed(): header_data: bytes = await stream.read_bytes(self._header_size) ( msg_type, encode_type, msg_version, flags, msg_length, ) = self._header_struct.unpack(header_data) # encoding = {1: "gpb", 2: "json"}[encode_type] # implement json encoding msg_data: bytes = b"" while len(msg_data) < msg_length: packet: bytes = await stream.read_bytes(msg_length - len(msg_data)) msg_data += packet self.data_queue.put_nowait( ("ems", msg_data, None, None, address[0])) except StreamClosedError as error: self.log.error(f'{address[0]}:{address[1]} {error}') stream.close()
class ManualCapClient(BaseCapClient): def capitalize(self, request_data, callback=None): logging.info("capitalize") self.request_data = request_data self.stream = IOStream(socket.socket()) self.stream.connect(('127.0.0.1', self.port), callback=self.handle_connect) self.future = Future() if callback is not None: self.future.add_done_callback( stack_context.wrap(lambda future: callback(future.result()))) return self.future def handle_connect(self): logging.info("handle_connect") self.stream.write(utf8(self.request_data + "\n")) self.stream.read_until(b'\n', callback=self.handle_read) def handle_read(self, data): logging.info("handle_read") self.stream.close() try: self.future.set_result(self.process_response(data)) except CapError as e: self.future.set_exception(e)
class TecoRouteHttpClient: def __init__(self, host, port): self._sock = socket() self._sock.connect((host, port)) self._stream = IOStream(self._sock) self._connection = HTTP1Connection(self._stream, True) def send_request(self, request, headers_raw=None): url_tuple = urlparse(request.url) url_from_path = urlunparse(('', ) * 2 + url_tuple[2:]) or '/' request.headers.setdefault('Host', 'NT_Host') if request.body is not None: request.headers.setdefault('Content-Length', str(len(request.body))) request.headers.setdefault( 'User-Agent', 'tecoroute/1.0 (https://github.com/czetech/tecoroute)') # Tornado's method write_headers doesn't support case sensitive headers, # so start line and headers must be formated here and written directly to stream raw = '{method} {path} HTTP/1.1\r\n'.format(method=request.method, path=url_from_path) for key, value in chain( request.headers.get_all(), (headers_raw if headers_raw is not None else {}).items()): raw += '{key}: {value}\r\n'.format(key=key, value=value) raw += '\r\n' self._connection.stream.write(raw.encode('ascii')) if request.body is not None: self._connection.write(request.body) self._connection.finish() async def receive_response(self, request=HTTPRequest('')): http_message = _HttpMessage() await self._connection.read_response(http_message) return HTTPResponse(request, http_message.start_line.code, headers=http_message.headers, buffer=http_message.buffer, reason=http_message.start_line.reason) async def request(self, request, headers_raw=None): self.send_request(request, headers_raw=headers_raw) return await self.receive_response(request=request) def close(self): try: self._connection.detach() except AttributeError: pass try: self._stream.close() except AttributeError: pass try: self._sock.close() except AttributeError: pass
async def _pipe_stream(self, _in: IOStream, out: IOStream): while not _in.closed(): try: data = await _in.read_bytes(10240, partial=True) await out.write(data) except StreamClosedError as e: _in.close() # here may call twice out.close()
class HTTPServerRawTest(AsyncHTTPTestCase): def get_app(self): return Application([ ('/echo', EchoHandler), ]) def setUp(self): super(HTTPServerRawTest, self).setUp() self.stream = IOStream(socket.socket()) self.stream.connect(('localhost', self.get_http_port()), self.stop) self.wait() def tearDown(self): self.stream.close() super(HTTPServerRawTest, self).tearDown() def test_empty_request(self): self.stream.close() self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop) self.wait() def test_malformed_first_line(self): with ExpectLog(gen_log, '.*Malformed HTTP request line'): self.stream.write(b'asdf\r\n\r\n') # TODO: need an async version of ExpectLog so we don't need # hard-coded timeouts here. self.io_loop.add_timeout(datetime.timedelta(seconds=0.01), self.stop) self.wait() def test_malformed_headers(self): with ExpectLog(gen_log, '.*Malformed HTTP headers'): self.stream.write(b'GET / HTTP/1.0\r\nasdf\r\n\r\n') self.io_loop.add_timeout(datetime.timedelta(seconds=0.01), self.stop) self.wait() def test_chunked_request_body(self): # Chunked requests are not widely supported and we don't have a way # to generate them in AsyncHTTPClient, but HTTPServer will read them. self.stream.write(b"""\ POST /echo HTTP/1.1 Transfer-Encoding: chunked Content-Type: application/x-www-form-urlencoded 4 foo= 3 bar 0 """.replace(b"\n", b"\r\n")) read_stream_body(self.stream, self.stop) headers, response = self.wait() self.assertEqual(json_decode(response), {u('foo'): [u('bar')]})
class HTTPServerRawTest(AsyncHTTPTestCase): def get_app(self): return Application([ ('/echo', EchoHandler), ]) def setUp(self): super(HTTPServerRawTest, self).setUp() self.stream = IOStream(socket.socket()) self.stream.connect(('127.0.0.1', self.get_http_port()), self.stop) self.wait() def tearDown(self): self.stream.close() super(HTTPServerRawTest, self).tearDown() def test_empty_request(self): self.stream.close() self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop) self.wait() def test_malformed_first_line(self): with ExpectLog(gen_log, '.*Malformed HTTP request line'): self.stream.write(b'asdf\r\n\r\n') # TODO: need an async version of ExpectLog so we don't need # hard-coded timeouts here. self.io_loop.add_timeout(datetime.timedelta(seconds=0.01), self.stop) self.wait() def test_malformed_headers(self): with ExpectLog(gen_log, '.*Malformed HTTP headers'): self.stream.write(b'GET / HTTP/1.0\r\nasdf\r\n\r\n') self.io_loop.add_timeout(datetime.timedelta(seconds=0.01), self.stop) self.wait() def test_chunked_request_body(self): # Chunked requests are not widely supported and we don't have a way # to generate them in AsyncHTTPClient, but HTTPServer will read them. self.stream.write(b"""\ POST /echo HTTP/1.1 Transfer-Encoding: chunked Content-Type: application/x-www-form-urlencoded 4 foo= 3 bar 0 """.replace(b"\n", b"\r\n")) read_stream_body(self.stream, self.stop) headers, response = self.wait() self.assertEqual(json_decode(response), {u('foo'): [u('bar')]})
def capitalize(self, request_data): logging.debug('capitalize') stream = IOStream(socket.socket()) logging.debug('connecting') yield stream.connect(('127.0.0.1', self.port)) stream.write(utf8(request_data + '\n')) logging.debug('reading') data = yield stream.read_until(b'\n') logging.debug('returning') stream.close() raise gen.Return(self.process_response(data))
def capitalize(self, request_data, callback): logging.info('capitalize') stream = IOStream(socket.socket()) logging.info('connecting') yield gen.Task(stream.connect, ('127.0.0.1', self.port)) stream.write(utf8(request_data + '\n')) logging.info('reading') data = yield gen.Task(stream.read_until, b'\n') logging.info('returning') stream.close() callback(self.process_response(data))
def capitalize(self, request_data): logging.debug("capitalize") stream = IOStream(socket.socket()) logging.debug("connecting") yield stream.connect(("10.0.0.7", self.port)) stream.write(utf8(request_data + "\n")) logging.debug("reading") data = yield stream.read_until(b"\n") logging.debug("returning") stream.close() raise gen.Return(self.process_response(data))
def capitalize(self, request_data, callback): logging.info('capitalize') stream = IOStream(socket.socket(), io_loop=self.io_loop) logging.info('connecting') yield gen.Task(stream.connect, ('127.0.0.1', self.port)) stream.write(utf8(request_data + '\n')) logging.info('reading') data = yield gen.Task(stream.read_until, b('\n')) logging.info('returning') stream.close() callback(self.process_response(data))
def accept_callback(conn, address): stream = IOStream(conn) request_data = yield stream.read_until(b"\r\n\r\n") if b"HTTP/1." not in request_data: self.skipTest("requires HTTP/1.x") yield stream.write(b"""\ HTTP/1.1 200 OK X-XSS-Protection: 1; \tmode=block """.replace(b"\n", b"\r\n")) stream.close()
def test_timeout(self): stream = IOStream(socket.socket()) try: yield stream.connect(("127.0.0.1", self.get_http_port())) # Use a raw stream because AsyncHTTPClient won't let us read a # response without finishing a body. stream.write(b"PUT /streaming?body_timeout=0.1 HTTP/1.0\r\n" b"Content-Length: 42\r\n\r\n") with ExpectLog(gen_log, "Timeout reading body"): response = yield stream.read_until_close() self.assertEqual(response, b"") finally: stream.close()
class Flash(object): def __init__(self, close_callback=None): self._iostream = None self._close_callback = close_callback def connect(self, host='127.0.0.1', port=9999): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._iostream = IOStream(sock) self._iostream.set_close_callback(self._on_connection_close) # коннектимся и начинаем слушать команды self._iostream.connect((host, port), self._read_head) def close(self): self._on_connection_close() def _on_connection_close(self): self._iostream.close() if self._close_callback: self._close_callback() def _read_head(self): self._iostream.read_bytes(BaseCommand.meta_size, self._on_read_head) def _on_read_head(self, data): ctype, length = struct.unpack(">BH", data) if length: self._iostream.read_bytes(length, partial(self.execute_command, ctype)) else: self.execute_command(ctype) def execute_command(self, ctype, value=None): command = CommandsRegistry.get_by_type(ctype) if command is not None: command.execute(value) # else: # print 'unknown command: type={:#x}'.format(ctype) self._read_head() @classmethod def start(cls, host, port): flash = cls(close_callback=IOLoop.instance().stop) flash.connect(host, port) signal.signal(signal.SIGINT, flash.close) IOLoop.instance().start() IOLoop.instance().close()
async def notify(socket_name): if socket_name[0] == '@': # abstract namespace socket socket_name = '\0' + socket_name[1:] log.pcsd.info("Notifying systemd we are running (socket '%s')", socket_name) try: stream = IOStream(socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)) await stream.connect(socket_name) await stream.write(b'READY=1') stream.close() except Exception as e: log.pcsd.error("Unable to notify systemd on '%s': %s", socket_name, e)
def test_timeout(self): stream = IOStream(socket.socket()) try: yield stream.connect(("10.0.0.7", self.get_http_port())) # Use a raw stream because AsyncHTTPClient won't let us read a # response without finishing a body. stream.write(b"PUT /streaming?body_timeout=0.1 HTTP/1.0\r\n" b"Content-Length: 42\r\n\r\n") with ExpectLog(gen_log, "Timeout reading body"): response = yield stream.read_until_close() self.assertEqual(response, b"") finally: stream.close()
class HTTP1ConnectionTest(AsyncTestCase): code = None # type: typing.Optional[int] def setUp(self): super().setUp() self.asyncSetUp() @gen_test def asyncSetUp(self): listener, port = bind_unused_port() event = Event() def accept_callback(conn, addr): self.server_stream = IOStream(conn) self.addCleanup(self.server_stream.close) event.set() add_accept_handler(listener, accept_callback) self.client_stream = IOStream(socket.socket()) self.addCleanup(self.client_stream.close) yield [self.client_stream.connect(("127.0.0.1", port)), event.wait()] self.io_loop.remove_handler(listener) listener.close() @gen_test def test_http10_no_content_length(self): # Regression test for a bug in which can_keep_alive would crash # for an HTTP/1.0 (not 1.1) response with no content-length. conn = HTTP1Connection(self.client_stream, True) self.server_stream.write(b"HTTP/1.0 200 Not Modified\r\n\r\nhello") self.server_stream.close() event = Event() test = self body = [] class Delegate(HTTPMessageDelegate): def headers_received(self, start_line, headers): test.code = start_line.code def data_received(self, data): body.append(data) def finish(self): event.set() yield conn.read_response(Delegate()) yield event.wait() self.assertEqual(self.code, 200) self.assertEqual(b"".join(body), b"hello")
async def notify(socket_name): if socket_name[0] == "@": # abstract namespace socket socket_name = "\0" + socket_name[1:] log.pcsd.info("Notifying systemd we are running (socket '%s')", socket_name) try: stream = IOStream(socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)) await stream.connect(socket_name) await stream.write(b"READY=1") stream.close() # pylint: disable=broad-except except Exception as e: log.pcsd.error("Unable to notify systemd on '%s': %s", socket_name, e)
async def notify(socket_name): if socket_name[0] == '@': # abstract namespace socket socket_name = '\0' + socket_name[1:] log.pcsd.info("Notifying systemd we are running (socket '%s')", socket_name) try: stream = IOStream(socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)) await stream.connect(socket_name) await stream.write(b'READY=1') stream.close() # pylint: disable=broad-except except Exception as e: log.pcsd.error("Unable to notify systemd on '%s': %s", socket_name, e)
class RemoteUpstream(Upstream): """ The most methods are the same in LocalUpstream, but maybe in future need to be diffrent. """ def initialize(self): self.socket = socket.socket(self._address_type, socket.SOCK_STREAM) self.stream = IOStream(self.socket) self.stream.set_close_callback(self.on_close) def do_connect(self): self.stream.connect(self.dest, self.on_connect) @property def address(self): return self.socket.getsockname() @property def address_type(self): return self._address_type def on_connect(self): self.connection_callback(self) on_finish = functools.partial(self.on_streaming_data, finished=True) self.stream.read_until_close(on_finish, self.on_streaming_data) def on_close(self): if self.stream.error: self.error_callback(self, self.stream.error) else: self.close_callback(self) def on_streaming_data(self, data, finished=False): if len(data): self.streaming_callback(self, data) def do_write(self, data): try: self.stream.write(data) except IOError as e: self.close() def do_close(self): if self.socket: logger.info("close upstream: %s:%s" % self.address) self.stream.close()
class HTTP1ConnectionTest(AsyncTestCase): def setUp(self): super(HTTP1ConnectionTest, self).setUp() self.asyncSetUp() @gen_test def asyncSetUp(self): listener, port = bind_unused_port() event = Event() def accept_callback(conn, addr): self.server_stream = IOStream(conn) self.addCleanup(self.server_stream.close) event.set() add_accept_handler(listener, accept_callback) self.client_stream = IOStream(socket.socket()) self.addCleanup(self.client_stream.close) yield [self.client_stream.connect(('127.0.0.1', port)), event.wait()] self.io_loop.remove_handler(listener) listener.close() @gen_test def test_http10_no_content_length(self): # Regression test for a bug in which can_keep_alive would crash # for an HTTP/1.0 (not 1.1) response with no content-length. conn = HTTP1Connection(self.client_stream, True) self.server_stream.write(b"HTTP/1.0 200 Not Modified\r\n\r\nhello") self.server_stream.close() event = Event() test = self body = [] class Delegate(HTTPMessageDelegate): def headers_received(self, start_line, headers): test.code = start_line.code def data_received(self, data): body.append(data) def finish(self): event.set() yield conn.read_response(Delegate()) yield event.wait() self.assertEqual(self.code, 200) self.assertEqual(b''.join(body), b'hello')
def io_stream(request, io_loop): """Create an instance of the `tornado.iostream.IOStream`. Current `tornado.ioloop.IOLoop` is used for the stream, that is provided by `io_loop` fixture. No-delay flag is set for this stream. The no-delay flag requests that data should be written as soon as possible, even if doing so would consume additional bandwidth. """ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) stream = IOStream(s) stream.set_nodelay(True) yield stream stream.close()
def io_stream(request, io_loop): '''Create an instance of the `tornado.iostream.IOStream`. Current `tornado.ioloop.IOLoop` is used for the stream, that is provided by `io_loop` fixture. No-delay flag is set for this stream. The no-delay flag requests that data should be written as soon as possible, even if doing so would consume additional bandwidth. ''' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) stream = IOStream(s) stream.set_nodelay(True) yield stream stream.close()
class ForwardConnection(object): def __init__(self, server, stream, address): self._close_callback = None self.server = server self.stream = stream self.reverse_address = address self.address = stream.socket.getsockname() self.remote_address = server.conf[self.address] sock = socket.socket() self.remote_stream = IOStream(sock) self.remote_stream.connect(self.remote_address, self._on_remote_connected) def close(self): self.remote_stream.close() def set_close_callback(self, callback): self._close_callback = callback def _on_remote_connected(self): ip_from = self.reverse_address[0] fwd_str = get_forwarding_str(self.address[0], self.address[1], self.remote_address[0], self.remote_address[1]) logging.info('Connected ip: %s, forward %s', ip_from, fwd_str) self.remote_stream.read_until_close(self._on_remote_read_close, self.stream.write) self.stream.read_until_close(self._on_read_close, self.remote_stream.write) def _on_remote_read_close(self, data): if self.stream.writing(): self.stream.write(data, self.stream.close) else: if self.stream.closed(): self._on_closed() else: self.stream.close() def _on_read_close(self, data): if self.remote_stream.writing(): self.remote_stream.write(data, self.remote_stream.close) else: if self.remote_stream.closed(): self._on_closed() else: self.remote_stream.close() def _on_closed(self): logging.info('Disconnected ip: %s', self.reverse_address[0]) if self._close_callback: self._close_callback(self)
class ESME(DeliverMixin, BaseESME): def __init__(self, **kwargs): BaseESME.__init__(self, **kwargs) self.running = False self.closed = False @coroutine def connect(self, host, port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) self.ioloop = IOLoop.current() self.stream = IOStream(s) yield self.stream.connect((host, port)) def on_send(self, data): return self.stream.write(data) def on_close(self): self.closed = True self.stream.close() @coroutine def readloop(self, future): while not self.closed and (not future or not future.done()): try: data = yield self.stream.read_bytes(1024, partial=True) except StreamClosedError: # pragma: no cover break else: self.feed(data) def wait_for(self, response): future = Future() response.callback = lambda resp: future.set_result(resp.response) if self.running: return future else: return self.run(future) @coroutine def run(self, future=None): self.running = True try: yield self.readloop(future) finally: self.running = False if future and future.done(): raise Return(future.result())
class UnixSocketTest(AsyncTestCase): """HTTPServers can listen on Unix sockets too. Why would you want to do this? Nginx can proxy to backends listening on unix sockets, for one thing (and managing a namespace for unix sockets can be easier than managing a bunch of TCP port numbers). Unfortunately, there's no way to specify a unix socket in a url for an HTTP client, so we have to test this by hand. """ def setUp(self): super(UnixSocketTest, self).setUp() self.tmpdir = tempfile.mkdtemp() self.sockfile = os.path.join(self.tmpdir, "test.sock") sock = netutil.bind_unix_socket(self.sockfile) app = Application([("/hello", HelloWorldRequestHandler)]) self.server = HTTPServer(app) self.server.add_socket(sock) self.stream = IOStream(socket.socket(socket.AF_UNIX)) self.stream.connect(self.sockfile, self.stop) self.wait() def tearDown(self): self.stream.close() self.io_loop.run_sync(self.server.close_all_connections) self.server.stop() shutil.rmtree(self.tmpdir) super(UnixSocketTest, self).tearDown() def test_unix_socket(self): self.stream.write(b"GET /hello HTTP/1.0\r\n\r\n") self.stream.read_until(b"\r\n", self.stop) response = self.wait() self.assertEqual(response, b"HTTP/1.1 200 OK\r\n") self.stream.read_until(b"\r\n\r\n", self.stop) headers = HTTPHeaders.parse(self.wait().decode('latin1')) self.stream.read_bytes(int(headers["Content-Length"]), self.stop) body = self.wait() self.assertEqual(body, b"Hello world") def test_unix_socket_bad_request(self): # Unix sockets don't have remote addresses so they just return an # empty string. with ExpectLog(gen_log, "Malformed HTTP message from"): self.stream.write(b"garbage\r\n\r\n") self.stream.read_until_close(self.stop) response = self.wait() self.assertEqual(response, b"HTTP/1.1 400 Bad Request\r\n\r\n")
class RemoteUpstream(Upstream): """ The most methods are the same in LocalUpstream, but maybe in future need to be diffrent. """ def initialize(self): self.socket = socket.socket(self._address_type, socket.SOCK_STREAM) self.stream = IOStream(self.socket) self.stream.set_close_callback(self.on_close) def do_connect(self): self.stream.connect(self.dest, self.on_connect) @property def address(self): return self.socket.getsockname() @property def address_type(self): return self._address_type def on_connect(self): self.connection_callback(self) on_finish = functools.partial(self.on_streaming_data, finished=True) self.stream.read_until_close(on_finish, self.on_streaming_data) def on_close(self): if self.stream.error: self.error_callback(self, self.stream.error) else: self.close_callback(self) def on_streaming_data(self, data, finished=False): if len(data): self.streaming_callback(self, data, finished) def do_write(self, data): try: self.stream.write(data) except IOError as e: self.close() def do_close(self): if self.socket: logger.debug("close upstream: %s" % self.socket) self.stream.close()
class UnixSocketTest(AsyncTestCase): """HTTPServers can listen on Unix sockets too. Why would you want to do this? Nginx can proxy to backends listening on unix sockets, for one thing (and managing a namespace for unix sockets can be easier than managing a bunch of TCP port numbers). Unfortunately, there's no way to specify a unix socket in a url for an HTTP client, so we have to test this by hand. """ def setUp(self): super(UnixSocketTest, self).setUp() self.tmpdir = tempfile.mkdtemp() self.sockfile = os.path.join(self.tmpdir, "test.sock") sock = netutil.bind_unix_socket(self.sockfile) app = Application([("/hello", HelloWorldRequestHandler)]) self.server = HTTPServer(app, io_loop=self.io_loop) self.server.add_socket(sock) self.stream = IOStream(socket.socket(socket.AF_UNIX), io_loop=self.io_loop) self.stream.connect(self.sockfile, self.stop) self.wait() def tearDown(self): self.stream.close() self.server.stop() shutil.rmtree(self.tmpdir) super(UnixSocketTest, self).tearDown() def test_unix_socket(self): self.stream.write(b"GET /hello HTTP/1.0\r\n\r\n") self.stream.read_until(b"\r\n", self.stop) response = self.wait() self.assertEqual(response, b"HTTP/1.0 200 OK\r\n") self.stream.read_until(b"\r\n\r\n", self.stop) headers = HTTPHeaders.parse(self.wait().decode('latin1')) self.stream.read_bytes(int(headers["Content-Length"]), self.stop) body = self.wait() self.assertEqual(body, b"Hello world") def test_unix_socket_bad_request(self): # Unix sockets don't have remote addresses so they just return an # empty string. with ExpectLog(gen_log, "Malformed HTTP message from"): self.stream.write(b"garbage\r\n\r\n") self.stream.read_until_close(self.stop) response = self.wait() self.assertEqual(response, b"")
async def handle_stream(self, stream: IOStream, address: tuple[str, int]) -> None: try: protocol_version = as_uint(await stream.read_bytes(4)) if not self.protocols[0] <= protocol_version <= self.protocols[1]: await stream.write(to_uint(self.protocols[1], 4)) stream.close(CatsError('Unsupported protocol version')) return await stream.write(bytes(4)) async with self.create_connection(stream, address, protocol_version) as conn: conn: ServerConnection conn.debug(f'[INIT {address}]') await conn.init() await conn.start() conn.debug(f'[STOP {address}]') except self.app.config.stream_errors: pass
def test_handle_stream_native_coroutine(self): # handle_stream may be a native coroutine. class TestServer(TCPServer): async def handle_stream(self, stream, address): stream.write(b"data") stream.close() sock, port = bind_unused_port() server = TestServer() server.add_socket(sock) client = IOStream(socket.socket()) yield client.connect(("localhost", port)) result = yield client.read_until_close() self.assertEqual(result, b"data") server.stop() client.close()
def test_handle_stream_native_coroutine(self): # handle_stream may be a native coroutine. class TestServer(TCPServer): async def handle_stream(self, stream, address): stream.write(b"data") stream.close() sock, port = bind_unused_port() server = TestServer() server.add_socket(sock) client = IOStream(socket.socket()) yield client.connect(("10.0.0.7", port)) result = yield client.read_until_close() self.assertEqual(result, b"data") server.stop() client.close()
def test_body_size_override_reset(self): # The max_body_size override is reset between requests. stream = IOStream(socket.socket()) try: yield stream.connect(("127.0.0.1", self.get_http_port())) # Use a raw stream so we can make sure it's all on one connection. stream.write(b"PUT /streaming?expected_size=10240 HTTP/1.1\r\n" b"Content-Length: 10240\r\n\r\n") stream.write(b"a" * 10240) headers, response = yield gen.Task(read_stream_body, stream) self.assertEqual(response, b"10240") # Without the ?expected_size parameter, we get the old default value stream.write(b"PUT /streaming HTTP/1.1\r\n" b"Content-Length: 10240\r\n\r\n") with ExpectLog(gen_log, ".*Content-Length too long"): data = yield stream.read_until_close() self.assertEqual(data, b"") finally: stream.close()
def handle_stream(self, ssl_stream, address): try: yield ssl_stream.wait_for_handshake() except SSLErrorHTTPRequest: stream = IOStream(ssl_stream.socket._sock) conn = HTTP1Connection(stream, is_client=False) try: yield self.handle_http_connection(conn) except Exception: logger.exception("Failed to process HTTP request:") finally: stream.close() except StreamClosedError: logger.debug("Stream closed by client during handshake. Skipping.") return else: super(AutoHTTPSServer, self).handle_stream(ssl_stream, address)
class ConnectionCloseTest(WebTestCase): def get_handlers(self): return [('/', ConnectionCloseHandler, dict(test=self))] def test_connection_close(self): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) s.connect(("localhost", self.get_http_port())) self.stream = IOStream(s, io_loop=self.io_loop) self.stream.write(b"GET / HTTP/1.0\r\n\r\n") self.wait() def on_handler_waiting(self): logging.debug('handler waiting') self.stream.close() def on_connection_close(self): logging.debug('connection closed') self.stop()
class ConnectionCloseTest(AsyncHTTPTestCase, LogTrapTestCase): def get_app(self): return Application([('/', ConnectionCloseHandler, dict(test=self))]) def test_connection_close(self): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) s.connect(("localhost", self.get_http_port())) self.stream = IOStream(s, io_loop=self.io_loop) self.stream.write(b("GET / HTTP/1.0\r\n\r\n")) self.wait() def on_handler_waiting(self): logging.info('handler waiting') self.stream.close() def on_connection_close(self): logging.info('connection closed') self.stop()
class ConnectionCloseTest(AsyncHTTPTestCase, LogTrapTestCase): def get_app(self): return Application([('/', ConnectionCloseHandler, dict(test=self))]) def test_connection_close(self): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) s.connect(("localhost", self.get_http_port())) self.stream = IOStream(s, io_loop=self.io_loop) self.stream.write("GET / HTTP/1.0\r\n\r\n") self.wait() def on_handler_waiting(self): logging.info('handler waiting') self.stream.close() def on_connection_close(self): logging.info('connection closed') self.stop()
def accept_callback(conn, address): # fake an HTTP server using chunked encoding where the final chunks # and connection close all happen at once stream = IOStream(conn) request_data = yield stream.read_until(b"\r\n\r\n") if b"HTTP/1." not in request_data: self.skipTest("requires HTTP/1.x") yield stream.write(b"""\ HTTP/1.1 200 OK Transfer-Encoding: chunked 1 1 1 2 0 """.replace(b"\n", b"\r\n")) stream.close()
def test_handle_stream_native_coroutine(self): # handle_stream may be a native coroutine. namespace = exec_test(globals(), locals(), """ class TestServer(TCPServer): async def handle_stream(self, stream, address): stream.write(b'data') stream.close() """) sock, port = bind_unused_port() server = namespace['TestServer']() server.add_socket(sock) client = IOStream(socket.socket()) yield client.connect(('localhost', port)) result = yield client.read_until_close() self.assertEqual(result, b'data') server.stop() client.close()
class DecoratorCapClient(BaseCapClient): @future_wrap def capitalize(self, request_data, callback): logging.info("capitalize") self.request_data = request_data self.stream = IOStream(socket.socket(), io_loop=self.io_loop) self.stream.connect(('127.0.0.1', self.port), callback=self.handle_connect) self.callback = callback def handle_connect(self): logging.info("handle_connect") self.stream.write(utf8(self.request_data + "\n")) self.stream.read_until(b('\n'), callback=self.handle_read) def handle_read(self, data): logging.info("handle_read") self.stream.close() self.callback(self.process_response(data))
class DecoratorCapClient(BaseCapClient): with ignore_deprecation(): @return_future def capitalize(self, request_data, callback): logging.debug("capitalize") self.request_data = request_data self.stream = IOStream(socket.socket()) self.stream.connect(('127.0.0.1', self.port), callback=self.handle_connect) self.callback = callback def handle_connect(self): logging.debug("handle_connect") self.stream.write(utf8(self.request_data + "\n")) self.stream.read_until(b'\n', callback=self.handle_read) def handle_read(self, data): logging.debug("handle_read") self.stream.close() self.callback(self.process_response(data))
def test_unix_socket(self): sockfile = os.path.join(self.tmpdir, "test.sock") sock = netutil.bind_unix_socket(sockfile) app = Application([("/hello", HelloWorldRequestHandler)]) server = HTTPServer(app, io_loop=self.io_loop) server.add_socket(sock) stream = IOStream(socket.socket(socket.AF_UNIX), io_loop=self.io_loop) stream.connect(sockfile, self.stop) self.wait() stream.write(b("GET /hello HTTP/1.0\r\n\r\n")) stream.read_until(b("\r\n"), self.stop) response = self.wait() self.assertEqual(response, b("HTTP/1.0 200 OK\r\n")) stream.read_until(b("\r\n\r\n"), self.stop) headers = HTTPHeaders.parse(self.wait().decode('latin1')) stream.read_bytes(int(headers["Content-Length"]), self.stop) body = self.wait() self.assertEqual(body, b("Hello world")) stream.close() server.stop()