def send_data(self) -> None: """Called immediately after the headers are sent. Here we send all the data as part of the request. If the content length is 0 initially then we end the stream immediately and wait for response data. Warning: Only call this method when stream not closed from client side and has initiated request already by sending HEADER frame. If not then stream will raise ProtocolError (raise by h2 state machine). """ if self.metadata['stream_closed_local']: raise StreamClosedError(self.stream_id) # Firstly, check what the flow control window is for current stream. window_size = self._protocol.conn.local_flow_control_window( stream_id=self.stream_id) # Next, check what the maximum frame size is. max_frame_size = self._protocol.conn.max_outbound_frame_size # We will send no more than the window size or the remaining file size # of data in this call, whichever is smaller. bytes_to_send_size = min(window_size, self.metadata['remaining_content_length']) # We now need to send a number of data frames. while bytes_to_send_size > 0: chunk_size = min(bytes_to_send_size, max_frame_size) data_chunk_start_id = self.metadata[ 'request_content_length'] - self.metadata[ 'remaining_content_length'] data_chunk = self._request.body[ data_chunk_start_id:data_chunk_start_id + chunk_size] self._protocol.conn.send_data(self.stream_id, data_chunk, end_stream=False) bytes_to_send_size = bytes_to_send_size - chunk_size self.metadata['remaining_content_length'] = self.metadata[ 'remaining_content_length'] - chunk_size self.metadata['remaining_content_length'] = max( 0, self.metadata['remaining_content_length']) # End the stream if no more data needs to be send if self.metadata['remaining_content_length'] == 0: self._protocol.conn.end_stream(self.stream_id)
def reset_stream(self, reason: StreamCloseReason = StreamCloseReason.RESET ) -> None: """Close this stream by sending a RST_FRAME to the remote peer""" if self.metadata['stream_closed_local']: raise StreamClosedError(self.stream_id) # Clear buffer earlier to avoid keeping data in memory for a long time self._response['body'].truncate(0) self.metadata['stream_closed_local'] = True self._protocol.conn.reset_stream(self.stream_id, ErrorCodes.REFUSED_STREAM) self.close(reason)
async def send_headers(self, headers, end_stream=False): assert self.id is not None if not self._connection.write_ready.is_set(): await self._connection.write_ready.wait() # Workaround for the H2Connection.send_headers method, which will try # to create a new stream if it was removed earlier from the # H2Connection.streams, and therefore will raise StreamIDTooLowError if self.id not in self._h2_connection.streams: raise StreamClosedError(self.id) self._h2_connection.send_headers(self.id, headers, end_stream=end_stream) self._transport.write(self._h2_connection.data_to_send())
async def send_data( self, stream_id: int, data: bytes, end_stream: bool = False, ): """Send data, respecting the receiver's flow control instructions. If the provided data is larger than the connection's maximum outbound frame size, it will be broken into several frames as appropriate. """ if self.closed: raise ConnectionClosedError stream = self._get_stream(stream_id) if stream.closed: raise StreamClosedError(stream_id) remaining = data while len(remaining) > 0: await asyncio.gather( self._writable.wait(), self._window_open(stream.id), ) remaining_size = len(remaining) window_size = self._h2.local_flow_control_window(stream.id) max_frame_size = self._h2.max_outbound_frame_size send_size = min(remaining_size, window_size, max_frame_size) if send_size == 0: continue logger.debug( f'[{stream.id}] Sending {send_size} of {remaining_size} ' f'bytes (window {window_size}, frame max {max_frame_size})') to_send = remaining[:send_size] remaining = remaining[send_size:] end = (end_stream is True and len(remaining) == 0) self._h2.send_data(stream.id, to_send, end_stream=end) self._flush() if self._h2.local_flow_control_window(stream.id) == 0: stream.window_open.clear()
def close( self, reason: StreamCloseReason, errors: Optional[List[BaseException]] = None, from_protocol: bool = False, ) -> None: """Based on the reason sent we will handle each case. """ if self.metadata['stream_closed_server']: raise StreamClosedError(self.stream_id) if not isinstance(reason, StreamCloseReason): raise TypeError( f'Expected StreamCloseReason, received {reason.__class__.__qualname__}' ) # Have default value of errors as an empty list as # some cases can add a list of exceptions errors = errors or [] if not from_protocol: self._protocol.pop_stream(self.stream_id) self.metadata['stream_closed_server'] = True # We do not check for Content-Length or Transfer-Encoding in response headers # and add `partial` flag as in HTTP/1.1 as 'A request or response that includes # a payload body can include a content-length header field' (RFC 7540 - Section 8.1.2.6) # NOTE: Order of handling the events is important here # As we immediately cancel the request when maxsize is exceeded while # receiving DATA_FRAME's when we have received the headers (not # having Content-Length) if reason is StreamCloseReason.MAXSIZE_EXCEEDED: expected_size = int(self._response['headers'].get( b'Content-Length', self._response['flow_controlled_size'])) error_msg = ( f'Cancelling download of {self._request.url}: received response ' f'size ({expected_size}) larger than download max size ({self._download_maxsize})' ) logger.error(error_msg) self._deferred_response.errback(CancelledError(error_msg)) elif reason is StreamCloseReason.ENDED: self._fire_response_deferred() # Stream was abruptly ended here elif reason is StreamCloseReason.CANCELLED: # Client has cancelled the request. Remove all the data # received and fire the response deferred with no flags set # NOTE: The data is already flushed in Stream.reset_stream() called # immediately when the stream needs to be cancelled # There maybe no :status in headers, we make # HTTP Status Code: 499 - Client Closed Request self._response['headers'][':status'] = '499' self._fire_response_deferred() elif reason is StreamCloseReason.RESET: self._deferred_response.errback( ResponseFailed([ Failure( f'Remote peer {self._protocol.metadata["ip_address"]} sent RST_STREAM', ProtocolError) ])) elif reason is StreamCloseReason.CONNECTION_LOST: self._deferred_response.errback(ResponseFailed(errors)) elif reason is StreamCloseReason.INACTIVE: errors.insert(0, InactiveStreamClosed(self._request)) self._deferred_response.errback(ResponseFailed(errors)) else: assert reason is StreamCloseReason.INVALID_HOSTNAME self._deferred_response.errback( InvalidHostname( self._request, str(self._protocol.metadata['uri'].host, 'utf-8'), f'{self._protocol.metadata["ip_address"]}:{self._protocol.metadata["uri"].port}' ))