def test_fcm_emits_difference_when_blocked(self): b = FlowControlManager(1500) # Move the window size down from the base. b.window_size = 1000 assert b._blocked() == 500 assert b.window_size == 1500
def test_fcm_emits_when_window_drops_below_1k(self): b = FlowControlManager(1500) # Receive a frame slightly smaller than 500 bytes. assert b._handle_frame(499) == 0 assert b.window_size == 1001 # Push us over to 1k. assert b._handle_frame(2) == 501 assert b.window_size == 1500
def test_fcm_emits_when_window_drops_below_one_quarter(self): b = FlowControlManager(65535) # Receive a frame slightly smaller than 3/4 of the size of the window. assert b._handle_frame(49000) == 0 assert b.window_size == 65535 - 49000 # Now push us over to 3/4. assert b._handle_frame(1000) == 50000 assert b.window_size == 65535
def test_can_read_multiple_frames_from_streams(self): out_frames = [] in_frames = [] def send_cb(frame): out_frames.append(frame) def recv_cb(s): def inner(): s.receive_frame(in_frames.pop(0)) return inner s = Stream(1, send_cb, None, None, None, None, FlowControlManager(65535)) s._recv_cb = recv_cb(s) s.state = STATE_HALF_CLOSED_LOCAL # Provide two data frames to read. f = DataFrame(1) f.data = b'hi there!' in_frames.append(f) f = DataFrame(1) f.data = b'hi there again!' f.flags.add('END_STREAM') in_frames.append(f) data = s._read() assert data == b'hi there!hi there again!' assert len(out_frames) == 1 assert isinstance(out_frames[0], WindowUpdateFrame) assert out_frames[0].window_increment == len(b'hi there!')
def test_stream_reading_works(self): out_frames = [] in_frames = [] def send_cb(frame, tolerate_peer_gone=False): out_frames.append(frame) def recv_cb(s): def inner(): s.receive_frame(in_frames.pop(0)) return inner s = Stream(1, send_cb, None, None, None, None, FlowControlManager(65535)) s._recv_cb = recv_cb(s) s.state = STATE_HALF_CLOSED_LOCAL # Provide a data frame to read. f = DataFrame(1) f.data = b'hi there!' f.flags.add('END_STREAM') in_frames.append(f) data = s._read() assert data == b'hi there!' assert len(out_frames) == 0
def test_flow_control_manager_update_includes_padding(self): out_frames = [] in_frames = [] def send_cb(frame): out_frames.append(frame) def recv_cb(s): def inner(): s.receive_frame(in_frames.pop(0)) return inner start_window = 65535 s = Stream(1, send_cb, None, None, None, None, FlowControlManager(start_window)) s._recv_cb = recv_cb(s) s.state = STATE_HALF_CLOSED_LOCAL # Provide two data frames to read. f = DataFrame(1) f.data = b'hi there!' f.pad_length = 10 f.flags.add('END_STREAM') in_frames.append(f) data = s._read() assert data == b'hi there!' assert s._in_window_manager.window_size == start_window - f.pad_length - len( data) - 1
def test_can_receive_continuation_frame_after_end_stream(self): s = Stream(1, None, None, None, None, None, FlowControlManager(65535)) f = HeadersFrame(1) f.data = 'hi there' f.flags = set('END_STREAM') f2 = ContinuationFrame(1) f2.data = ' sir' f2.flags = set('END_HEADERS') s.receive_frame(f) s.receive_frame(f2)
def __init__(self, request, stream_id, io_loop, h2conn, callback_response, send_outstanding_data_cb, close_cb): self.stream_id = stream_id self.io_loop = io_loop self.h2conn = h2conn self.callback_response = callback_response self.send_outstanding_data_cb = send_outstanding_data_cb self.close_cb = close_cb self.data = [] self.remote_closed = False self.local_closed = False self.request = request self.flow_control_manager = FlowControlManager(DEFAULT_WINDOW_SIZE) self.response_headers = None self.response_trailers = None self._finished = False self._timeout_handle = None
def test_reading_trailers_early_reads_all_data(self): in_frames = [] headers = [('a', 'b'), ('c', 'd'), (':status', '200')] trailers = [('e', 'f'), ('g', 'h')] def recv_cb(s): def inner(): s.receive_frame(in_frames.pop(0)) return inner s = Stream(1, None, None, None, None, FixedDecoder(headers), FlowControlManager(65535)) s._recv_cb = recv_cb(s) s.state = STATE_HALF_CLOSED_LOCAL # Provide the first HEADERS frame. f = HeadersFrame(1) f.data = b'hi there!' f.flags.add('END_HEADERS') in_frames.append(f) # Provide some data. f = DataFrame(1) f.data = b'testdata' in_frames.append(f) # Provide the trailers. f = HeadersFrame(1) f.data = b'hi there again!' f.flags.add('END_STREAM') f.flags.add('END_HEADERS') in_frames.append(f) # Begin by reading the first headers. assert s.getheaders() == HTTPHeaderMap(headers) # Now, replace the dummy decoder to ensure we get a new header block. s._decoder = FixedDecoder(trailers) # Ask for the trailers. This should also read the data frames. assert s.gettrailers() == HTTPHeaderMap(trailers) assert s.data == [b'testdata']
def test_blocked_frames_cause_window_updates(self): out_frames = [] def send_cb(frame, *args): out_frames.append(frame) start_window = 65535 s = Stream(1, send_cb, None, None, None, None, FlowControlManager(start_window)) s._data_cb = send_cb s.state = STATE_HALF_CLOSED_LOCAL # Change the window size. s._in_window_manager.window_size = 60000 # Provide a BLOCKED frame. f = BlockedFrame(1) s.receive_frame(f) assert len(out_frames) == 1 assert out_frames[0].type == WindowUpdateFrame.type assert out_frames[0].window_increment == 5535
def test_partial_reads_from_streams(self): out_frames = [] in_frames = [] def send_cb(frame): out_frames.append(frame) def recv_cb(s): def inner(): s.receive_frame(in_frames.pop(0)) return inner s = Stream(1, send_cb, None, None, None, None, FlowControlManager(65535)) s._recv_cb = recv_cb(s) s.state = STATE_HALF_CLOSED_LOCAL # Provide two data frames to read. f = DataFrame(1) f.data = b'hi there!' in_frames.append(f) f = DataFrame(1) f.data = b'hi there again!' f.flags.add('END_STREAM') in_frames.append(f) # We'll get the entire first frame. data = s._read(4) assert data == b'hi there!' assert len(out_frames) == 1 # Now we'll get the entire of the second frame. data = s._read(4) assert data == b'hi there again!' assert len(out_frames) == 1 assert s.state == STATE_CLOSED
def test_can_read_single_frames_from_streams(self): out_frames = [] in_frames = [] def send_cb(frame, tolerate_peer_gone=False): out_frames.append(frame) def recv_cb(s): def inner(): s.receive_frame(in_frames.pop(0)) return inner s = Stream(1, send_cb, None, None, None, None, FlowControlManager(800)) s._recv_cb = recv_cb(s) s.state = STATE_HALF_CLOSED_LOCAL # Provide two data frames to read. f = DataFrame(1) f.data = b'hi there!' in_frames.append(f) f = DataFrame(1) f.data = b'hi there again!' f.flags.add('END_STREAM') in_frames.append(f) data = s._read_one_frame() assert data == b'hi there!' data = s._read_one_frame() assert data == b'hi there again!' data = s._read_one_frame() assert data is None data = s._read() assert data == b''
def on_connect(self, cancelled, io_stream): """Initiate this connections state after we're connected. :param cancelled: A CancelContext to cancel inflight operations associated with this connection or connection attempt. :param io_stream: IOStream object that was successfully connected. """ try: if cancelled(): io_stream.close() raise ConnectionError("Connected timed out!") self.consecutive_connect_fails = 0 if io_stream.socket.selected_alpn_protocol() not in ALPN_PROTOCOLS: logging.error( 'Negotiated protocols mismatch, got %s, expected one of %s', io_stream.socket.selected_alpn_protocol(), ALPN_PROTOCOLS) raise ConnectionError( 'Negotiated protocols mismatch, got %s but not in supported protos %s', io_stream.socket.selected_alpn_protocol(), ALPN_PROTOCOLS) # remove the connection timeout self.io_loop.remove_timeout(self._connect_timeout_handle) self._connect_timeout_handle = None self.io_stream = io_stream self.io_stream.set_nodelay(True) # set the close callback self.io_stream.set_close_callback(partial(self.on_close, cancelled)) # initialize the connection self.h2conn = h2.connection.H2Connection( h2.config.H2Configuration(client_side=True)) # initiate the h2 connection self.h2conn.initiate_connection() # disable server push self.h2conn.update_settings({ h2.settings.SettingCodes.ENABLE_PUSH: 0, h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: self.initial_window_size }) self.window_manager = FlowControlManager(self.initial_window_size) # set the stream reading callback. We don't care whats # passed into this function, so prepare to get called # with anything from the iostream callback (should be # an empty data frame) def read_until_cancelled(*args, **kwargs): if cancelled(): return with stack_context.ExceptionStackContext( partial(self.on_error, 'during read', cancelled)): self.io_stream.read_bytes( num_bytes=65535, streaming_callback=partial( self.receive_data_until_cancelled, cancelled), callback=read_until_cancelled) read_until_cancelled() self.flush() except Exception as e: self._connect_future.set_exception(e) else: self._connect_future.set_result(True) self.io_loop.add_callback(self.connect_callback)
def test_fcm_returns_whats_given(self): b = FlowControlManager(100, 100) assert b._handle_frame(10) == 10 assert b._handle_frame(30) == 30 assert b.window_size == 60
class H2Stream(object): def __init__(self, request, stream_id, io_loop, h2conn, callback_response, send_outstanding_data_cb, close_cb): self.stream_id = stream_id self.io_loop = io_loop self.h2conn = h2conn self.callback_response = callback_response self.send_outstanding_data_cb = send_outstanding_data_cb self.close_cb = close_cb self.data = [] self.remote_closed = False self.local_closed = False self.request = request self.flow_control_manager = FlowControlManager(DEFAULT_WINDOW_SIZE) self.response_headers = None self.response_trailers = None self._finished = False self._timeout_handle = None def start(self): """Send the http request to the remote""" timeout = self.request.request_timeout if not timeout: timeout = 30 self._timeout_handle = self.io_loop.add_timeout( self.io_loop.time() + timeout, self._on_timeout ) parsed = urlsplit(to_unicode(self.request.url)) if 'Host' not in self.request.headers: if not parsed.netloc: self.request.headers['Host'] = self.connection.host elif '@' in parsed.netloc: self.request.headers['Host'] = parsed.netloc.rpartition( '@')[-1] else: self.request.headers['Host'] = parsed.netloc if self.request.user_agent: self.request.headers['User-Agent'] = self.request.user_agent if self.request.body is not None: self.request.headers['Content-Length'] = str( len(self.request.body)) if ( self.request.method == 'POST' and 'Content-Type' not in self.request.headers ): self.request.headers['Content-Type'] = ( 'application/x-www-form-urlencoded' ) self.request.url = ( (parsed.path or '/') + (('?' + parsed.query) if parsed.query else '') ) self.scheme = parsed.scheme http2_headers = [ (':authority', self.request.headers.pop('Host')), (':path', self.request.url), (':scheme', self.scheme), (':method', self.request.method), ] + self.request.headers.items() self.h2conn.send_headers( self.stream_id, http2_headers, end_stream=not self.request.body ) self.send_outstanding_data_cb() # send body, if any if self.request.body: self.send_body() def send_body(self): """Send the body of the http request""" self.total = len(self.request.body) self.sent = 0 if not self._finished: self._send_data() def _send_data(self, sent_bytes=0, *args): """Send the data in chunks according to flow control :param sent_bytes: how many bytes were sent on the last call :param *args: this function can be called with a future from add_done_callback. """ self.sent += sent_bytes if self._finished: return if self.sent < self.total: to_send = min( self.h2conn.local_flow_control_window(self.stream_id), self.h2conn.max_outbound_frame_size) end_stream = False if self.sent + to_send >= self.total: end_stream = True data_chunk = self.request.body[self.sent:self.sent + to_send] self.h2conn.send_data( self.stream_id, data_chunk, end_stream=end_stream ) future = self.send_outstanding_data_cb() # Have the callback yield to the IOLoop, so that we can process # events in the case the stream dies future.add_done_callback(partial( self.io_loop.add_callback, partial(self._send_data, to_send))) else: self.local_closed = True def handle_event(self, event): """Handle any http2 events""" if isinstance(event, h2.events.DataReceived): size = event.flow_controlled_length increment = self.flow_control_manager._handle_frame(size) self.data.append(event.data) if increment: try: self.h2conn.increment_flow_control_window( increment, stream_id=self.stream_id ) except h2.exceptions.StreamClosedError: pass else: self.send_outstanding_data_cb() elif isinstance(event, h2.events.ResponseReceived): self.response_headers = event.headers elif isinstance(event, h2.events.TrailersReceived): self.response_trailers = event.headers elif isinstance(event, h2.events.StreamEnded): self.remote_closed = True self.finish() elif isinstance(event, h2.events.StreamReset): self.remote_closed = True self.finish(exc=StreamResetException("Stream reset")) else: logger.info( "Got unhandled event on stream %s: %s", self.stream_id, event) def _on_timeout(self): """Handle a request timeout, sends RST to the remote for this stream""" self.io_loop.remove_timeout(self._timeout_handle) self._timeout_handle = None # Let the other end know we're cancelling this stream try: self.h2conn.reset_stream(stream_id=self.stream_id, error_code=8) self.send_outstanding_data_cb() except Exception as e: pass self.finish(exc=None, timed_out=True) def finish(self, exc=None, timed_out=False): """Finish the request. :param exc: exception encountered while processing this request, if any :param timed_out: whether this request timed out or not """ if self._finished: return self._finished = True if self._timeout_handle: self.io_loop.remove_timeout(self._timeout_handle) self._timeout_handle = None headers = {} data = io.BytesIO() # Stream reset if exc: code = 500 reason = exc.message if hasattr(exc, "message") else str(exc) elif not timed_out: data = io.BytesIO(b''.join(self.data)) headers = {} for header, value in self.response_headers: headers[header] = value code = int(headers.pop(':status')) reason = httplib.responses.get(code, 'Unknown') else: code = 599 reason = "CLIENT_SIDE_TIMEOUT" response = HTTPResponse( self.request, code, reason=reason, headers=httputil.HTTPHeaders(headers), buffer=data, request_time=self.io_loop.time() - self.request.start_time, effective_url=self.request.url ) response.request.response = response self.close_cb(stream_id=self.stream_id) self.callback_response(response)
class H2Connection(object): def __init__(self, host, port, io_loop, ssl_options, max_connect_backoff=MAX_CONNECT_BACKOFF, initial_window_size=DEFAULT_WINDOW_SIZE, connect_timeout=5, connect_callback=None): self.host = host self.port = port self.io_loop = io_loop self.tcp_client = TCPClient() self.h2conn = None self.io_stream = None self.window_manager = None self.connect_timeout = connect_timeout self._connect_timeout_handle = None self._connect_future = None self._streams = {} self.ssl_context = None self.ssl_options = ssl_options self.parse_ssl_opts() self.initial_window_size = initial_window_size self.max_connect_backoff = max_connect_backoff self.consecutive_connect_fails = 0 self.connect_callback = connect_callback self._closed = False @property def drained(self): """Whether this connection has any inflight streams""" return len(self._streams) <= 0 @property def ready(self): """Whether this connection is capable of serving more requests""" return self.is_connected and self.has_outbound_capacity @property def has_outbound_capacity(self): """Whether this connection has more outbound streams available. This is negotiated via http2 settings updates with the remote. """ if not self.h2conn: return True has_outbound_capacity = self.h2conn.open_outbound_streams + 1 <= \ self.h2conn.remote_settings.max_concurrent_streams return has_outbound_capacity @property def has_available_streams(self): """Whether there are more client stream ids available. See https://http2.github.io/http2-spec/#StreamIdentifiers for more infomation. """ if not self.h2conn: return True has_available_stream_ids = self.h2conn.highest_outbound_stream_id is None or \ self.h2conn.highest_outbound_stream_id + \ 2 <= self.h2conn.HIGHEST_ALLOWED_STREAM_ID return has_available_stream_ids @property def is_connected(self): """Whether this connection is connected to the remote, or not""" if self._connect_future.done(): return self._connect_future.result() and self.h2conn return False def request(self, request): """Make a new stream on this connection and sends the request""" future = Future() def callback(result): if isinstance(result, Exception): future.set_exception(result) else: future.set_result(result) stream_id = self.h2conn.get_next_available_stream_id() self._streams[stream_id] = H2Stream(request, stream_id, self.io_loop, self.h2conn, callback, self.flush, self._close_stream_callback) self._streams[stream_id].start() return future def _close_stream_callback(self, stream_id): """Called by H2Stream objects to clean up their state after they are finished. """ del self._streams[stream_id] def parse_ssl_opts(self): """Parses the ssl_options passed into this connection. If self.ssl_options is an ssl.SSLContext, it uses it. Otherwise it's assumed to be a dictionary and an ssl.SSLContext is created according to the options. """ if isinstance(self.ssl_options, ssl.SSLContext): self.ssl_context = ssl_options return ssl_context = ssl.create_default_context( ssl.Purpose.SERVER_AUTH, cafile=self.ssl_options.get('ca_certs')) ssl_context.options |= ssl.OP_NO_TLSv1 ssl_context.options |= ssl.OP_NO_TLSv1_1 if not self.ssl_options.get('validate_cert', True): ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE if self.ssl_options.get('client_key') or self.ssl_options.get( 'client_cert'): ssl_context.load_cert_chain( self.ssl_options.get('client_cert'), keyfile=self.ssl_options.get('client_key')) ssl_context.set_ciphers('ECDHE+AESGCM') ssl_context.set_alpn_protocols(ALPN_PROTOCOLS) self.ssl_context = ssl_context def connect(self): """Connect to the remote""" if self._connect_timeout_handle: return self._connect_future = Future() # Shared context to cleanly cancel inflight operations cancelled = CancelContext() start_time = self.io_loop.time() self._connect_timeout_handle = self.io_loop.add_timeout( start_time + self.connect_timeout, partial(self.on_connect_timeout, cancelled)) def _on_tcp_client_connected(f): exc = f.exc_info() if exc is not None: self._connect_future.set_result(False) if not cancelled(): self.consecutive_connect_fails += 1 self.on_error('during connection', cancelled, *exc) else: if not cancelled(): self.on_connect(cancelled, f.result()) ft = self.tcp_client.connect( self.host, self.port, af=socket.AF_UNSPEC, ssl_options=self.ssl_context, ) ft.add_done_callback(_on_tcp_client_connected) return self._connect_future def _backoff_reconnect(self): """Try to reconnect to the remote, backoff as appropriate""" self.io_loop.add_timeout( IOLoop.current().time() + min(self.max_connect_backoff, self.consecutive_connect_fails**1.5), self.connect) def on_connect_timeout(self, cancelled): """Handle a connection timeout. :param cancelled: A CancelContext to cancel inflight operations associated with this connection or connection attempt. """ self.consecutive_connect_fails += 1 cancelled.cancel() exc = ConnectionError('Connection could not be established in time!') self.close(exc) def on_error(self, phase, cancelled, typ, val, tb): """An unrecoverable connection error happened, close the connection. :param phase: phase we encountered the error in :param cancelled: A CancelContext to cancel inflight operations associated with this connection or connection attempt. :param typ: type of error :param val: error :param tb: traceback information """ cancelled.cancel() self.close(val) def on_close(self, cancelled): """Handle the underlying iostream getting closed. :param cancelled: A CancelContext to cancel inflight operations associated with this connection or connection attempt. """ # Already closed, so no cleanup needed if cancelled(): return err = None if self.io_stream: err = self.io_stream.error if not err: err = ConnectionError("Error closed by remote end") cancelled.cancel() self.close(err) @coroutine def graceful_close(self, max_backoff=MAX_CLOSE_BACKOFF): """Gracefully close this connection by waiting for inflight operations to finish. :param max_backoff: maximum backoff time for polling this connection for inflight operations and deciding to call close. """ i = 0 while True: if self._closed: return if self.drained: self.close("Graceful close called", reconnect=False) return yield sleep(min(max_backoff, i**1.5)) i += 1 def close(self, reason, reconnect=True): """Closes the connection, sending a GOAWAY frame. :param reason: why this connection was closed :param reconnect: whether to try to reconnect """ logger.debug('Closing HTTP2Connection with reason %s', reason) if self._connect_timeout_handle: self.io_loop.remove_timeout(self._connect_timeout_handle) self._connect_timeout_handle = None if self.h2conn: try: self.h2conn.close_connection() self.flush() except Exception: logging.error( 'Could not send GOAWAY frame, connection terminated!', exc_info=True) finally: self.h2conn = None if self.io_stream: try: self.io_stream.close() except Exception: logging.error('Could not close IOStream!', exc_info=True) finally: self.io_stream = None self.window_manager = None self.end_all_streams(reason) if reconnect: self._backoff_reconnect() else: self._closed = True def end_all_streams(self, exc=None): """Ends all inflight streams with the given exception. :param exc: exception for why we're ending all streams. """ for _, stream in self._streams.iteritems(): self.io_loop.add_callback(partial(stream.finish, exc)) def on_connect(self, cancelled, io_stream): """Initiate this connections state after we're connected. :param cancelled: A CancelContext to cancel inflight operations associated with this connection or connection attempt. :param io_stream: IOStream object that was successfully connected. """ try: if cancelled(): io_stream.close() raise ConnectionError("Connected timed out!") self.consecutive_connect_fails = 0 if io_stream.socket.selected_alpn_protocol() not in ALPN_PROTOCOLS: logging.error( 'Negotiated protocols mismatch, got %s, expected one of %s', io_stream.socket.selected_alpn_protocol(), ALPN_PROTOCOLS) raise ConnectionError( 'Negotiated protocols mismatch, got %s but not in supported protos %s', io_stream.socket.selected_alpn_protocol(), ALPN_PROTOCOLS) # remove the connection timeout self.io_loop.remove_timeout(self._connect_timeout_handle) self._connect_timeout_handle = None self.io_stream = io_stream self.io_stream.set_nodelay(True) # set the close callback self.io_stream.set_close_callback(partial(self.on_close, cancelled)) # initialize the connection self.h2conn = h2.connection.H2Connection( h2.config.H2Configuration(client_side=True)) # initiate the h2 connection self.h2conn.initiate_connection() # disable server push self.h2conn.update_settings({ h2.settings.SettingCodes.ENABLE_PUSH: 0, h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: self.initial_window_size }) self.window_manager = FlowControlManager(self.initial_window_size) # set the stream reading callback. We don't care whats # passed into this function, so prepare to get called # with anything from the iostream callback (should be # an empty data frame) def read_until_cancelled(*args, **kwargs): if cancelled(): return with stack_context.ExceptionStackContext( partial(self.on_error, 'during read', cancelled)): self.io_stream.read_bytes( num_bytes=65535, streaming_callback=partial( self.receive_data_until_cancelled, cancelled), callback=read_until_cancelled) read_until_cancelled() self.flush() except Exception as e: self._connect_future.set_exception(e) else: self._connect_future.set_result(True) self.io_loop.add_callback(self.connect_callback) def _adjust_window(self, frame_len): """Adjust the flow control window""" increment = self.window_manager._handle_frame(frame_len) if increment: self.h2conn.increment_flow_control_window(increment) self.flush() def receive_data_until_cancelled(self, cancelled, data): """Handle the received data over the wire. :param cancelled: A CancelContext to cancel inflight operations associated with this connection or connection attempt. :param data: data received over the wire from the remote """ # If we got cancelled, that means the connection died and # we're making a new one. Don't process any events from the # (now) old connection if cancelled(): return try: events = self.h2conn.receive_data(data) for event in events: try: if isinstance(event, h2.events.DataReceived): self._adjust_window(event.flow_controlled_length) self._streams[event.stream_id].handle_event(event) elif isinstance(event, h2.events.PushedStreamReceived): # We don't handle server push, and we say so in the # settings configuration on connect, so close the connection # and continue with our business conn.reset_stream(event.stream_id, error_code=7) self.flush() elif isinstance(event, ( h2.events.ResponseReceived, h2.events.TrailersReceived, h2.events.StreamEnded, h2.events.StreamReset, )): self._streams[event.stream_id].handle_event(event) elif isinstance(event, h2.events.ConnectionTerminated): cancelled.cancel() error_string = "Connection closed by remote end" if event.error_code != 0: try: name, number, description = get_data_errors( event.error_code) except ValueError: error_string = ("Encountered error code %d" % event.error_code) else: error_string = ("Encountered error %s %s: %s" % (name, number, description)) self.close(ConnectionError(error_string)) else: logger.info("Received unhandled event %s", event) except Exception: logger.exception("Error while handling event %s", event) except h2.exceptions.StreamClosedError: logger.info("Got stream closed on connection, reconnecting...") cancelled.cancel() self.close(ConnectionError("Stream closed by remote peer")) except h2.exceptions.ProtocolError as e: logger.exception( "Exception while receiving data from %s, closing connection", ( self.host, self.port, )) self.close(ConnectionError(str(e))) except Exception: logger.exception( "Unhandled exception while receiving data from %s", ( self.host, self.port, )) def flush(self): """Send any outstanding data to the remote""" future = Future() if self._closed: future.set_result(None) return future data_to_send = self.h2conn.data_to_send() if data_to_send: try: future = self.io_stream.write(data_to_send) except Exception as e: future.set_exception(e) else: # Since the data we're sending can be from multiple streams # it doesn't make sense to return the number of bytes written # for example, when any individual stream could call this future.set_result(None) return future