def _receive_data_on_session_stream(self, data: bytes, fin: bool) -> None: self._capsule_decoder_for_session_stream.append(data) if fin: self._capsule_decoder_for_session_stream.final() for capsule in self._capsule_decoder_for_session_stream: if capsule.type in { CapsuleType.DATAGRAM, CapsuleType.REGISTER_DATAGRAM_CONTEXT, CapsuleType.CLOSE_DATAGRAM_CONTEXT }: raise ProtocolError( f"Unimplemented capsule type: {capsule.type}") if capsule.type in { CapsuleType.REGISTER_DATAGRAM_NO_CONTEXT, CapsuleType.CLOSE_WEBTRANSPORT_SESSION }: # We'll handle this case below. pass else: # We should ignore unknown capsules. continue if self._close_info is not None: raise ProtocolError( ("Receiving a capsule with type = {} after receiving " + "CLOSE_WEBTRANSPORT_SESSION").format(capsule.type)) if capsule.type == CapsuleType.REGISTER_DATAGRAM_NO_CONTEXT: buffer = Buffer(data=capsule.data) format_type = buffer.pull_uint_var() # https://ietf-wg-webtrans.github.io/draft-ietf-webtrans-http3/draft-ietf-webtrans-http3.html#name-datagram-format-type WEBTRANPORT_FORMAT_TYPE = 0xff7c00 if format_type != WEBTRANPORT_FORMAT_TYPE: raise ProtocolError( "Unexpected datagram format type: {}".format( format_type)) self._allow_datagrams = True elif capsule.type == CapsuleType.CLOSE_WEBTRANSPORT_SESSION: buffer = Buffer(data=capsule.data) code = buffer.pull_uint32() # 4 bytes for the uint32. reason = buffer.pull_bytes(len(capsule.data) - 4) # TODO(yutakahirano): Make sure `reason` is a UTF-8 text. self._close_info = (code, reason) if fin: self._call_session_closed(self._close_info, abruptly=False)
class H3CapsuleDecoder: """ A decoder of H3Capsule. This is a streaming decoder and can handle multiple decoders. """ def __init__(self) -> None: self._buffer: Optional[Buffer] = None self._type: Optional[int] = None self._length: Optional[int] = None self._final: bool = False def append(self, data: bytes) -> None: """ Appends the given bytes to this decoder. """ assert not self._final if len(data) == 0: return if self._buffer: remaining = self._buffer.pull_bytes(self._buffer.capacity - self._buffer.tell()) self._buffer = Buffer(data=(remaining + data)) else: self._buffer = Buffer(data=data) def final(self) -> None: """ Pushes the end-of-stream mark to this decoder. After calling this, calling append() will be invalid. """ self._final = True def __iter__(self) -> Iterator[H3Capsule]: """ Yields decoded capsules. """ try: while self._buffer is not None: if self._type is None: self._type = self._buffer.pull_uint_var() if self._length is None: self._length = self._buffer.pull_uint_var() if self._buffer.capacity - self._buffer.tell() < self._length: if self._final: raise ValueError('insufficient buffer') return capsule = H3Capsule(self._type, self._buffer.pull_bytes(self._length)) self._type = None self._length = None if self._buffer.tell() == self._buffer.capacity: self._buffer = None yield capsule except BufferReadError as e: if self._final: raise e if not self._buffer: return 0 size = self._buffer.capacity - self._buffer.tell() if size >= UINT_VAR_MAX_SIZE: raise e # Ignore the error because there may not be sufficient input. return
def _receive_stream_data_uni(self, stream: H3Stream, data: bytes, stream_ended: bool) -> List[H3Event]: http_events: List[H3Event] = [] stream.buffer += data if stream_ended: stream.ended = True buf = Buffer(data=stream.buffer) consumed = 0 unblocked_streams: Set[int] = set() while stream.stream_type == StreamType.PUSH or not buf.eof(): # fetch stream type for unidirectional streams if stream.stream_type is None: try: stream.stream_type = buf.pull_uint_var() except BufferReadError: break consumed = buf.tell() # check unicity if stream.stream_type == StreamType.CONTROL: if self._peer_control_stream_id is not None: raise StreamCreationError( "Only one control stream is allowed") self._peer_control_stream_id = stream.stream_id elif stream.stream_type == StreamType.QPACK_DECODER: if self._peer_decoder_stream_id is not None: raise StreamCreationError( "Only one QPACK decoder stream is allowed") self._peer_decoder_stream_id = stream.stream_id elif stream.stream_type == StreamType.QPACK_ENCODER: if self._peer_encoder_stream_id is not None: raise StreamCreationError( "Only one QPACK encoder stream is allowed") self._peer_encoder_stream_id = stream.stream_id if stream.stream_type == StreamType.CONTROL: # fetch next frame try: frame_type = buf.pull_uint_var() frame_length = buf.pull_uint_var() frame_data = buf.pull_bytes(frame_length) except BufferReadError: break consumed = buf.tell() self._handle_control_frame(frame_type, frame_data) elif stream.stream_type == StreamType.PUSH: # fetch push id if stream.push_id is None: try: stream.push_id = buf.pull_uint_var() except BufferReadError: break consumed = buf.tell() # remove processed data from buffer stream.buffer = stream.buffer[consumed:] return self._receive_request_or_push_data( stream, b"", stream_ended) elif stream.stream_type == StreamType.QPACK_DECODER: # feed unframed data to decoder data = buf.pull_bytes(buf.capacity - buf.tell()) consumed = buf.tell() try: self._encoder.feed_decoder(data) except pylsqpack.DecoderStreamError as exc: raise QpackDecoderStreamError() from exc self._decoder_bytes_received += len(data) elif stream.stream_type == StreamType.QPACK_ENCODER: # feed unframed data to encoder data = buf.pull_bytes(buf.capacity - buf.tell()) consumed = buf.tell() try: unblocked_streams.update(self._decoder.feed_encoder(data)) except pylsqpack.EncoderStreamError as exc: raise QpackEncoderStreamError() from exc self._encoder_bytes_received += len(data) else: # unknown stream type, discard data buf.seek(buf.capacity) consumed = buf.tell() # remove processed data from buffer stream.buffer = stream.buffer[consumed:] # process unblocked streams for stream_id in unblocked_streams: stream = self._stream[stream_id] # resume headers http_events.extend( self._handle_request_or_push_frame( frame_type=FrameType.HEADERS, frame_data=None, stream=stream, stream_ended=stream.ended and not stream.buffer, )) stream.blocked = False stream.blocked_frame_size = None # resume processing if stream.buffer: http_events.extend( self._receive_request_or_push_data(stream, b"", stream.ended)) return http_events
def _receive_request_or_push_data(self, stream: H3Stream, data: bytes, stream_ended: bool) -> List[H3Event]: """ Handle data received on a request or push stream. """ http_events: List[H3Event] = [] stream.buffer += data if stream_ended: stream.ended = True if stream.blocked: return http_events # shortcut for DATA frame fragments if (stream.frame_type == FrameType.DATA and stream.frame_size is not None and len(stream.buffer) < stream.frame_size): http_events.append( DataReceived( data=stream.buffer, push_id=stream.push_id, stream_id=stream.stream_id, stream_ended=False, )) stream.frame_size -= len(stream.buffer) stream.buffer = b"" return http_events # handle lone FIN if stream_ended and not stream.buffer: http_events.append( DataReceived( data=b"", push_id=stream.push_id, stream_id=stream.stream_id, stream_ended=True, )) return http_events buf = Buffer(data=stream.buffer) consumed = 0 while not buf.eof(): # fetch next frame header if stream.frame_size is None: try: stream.frame_type = buf.pull_uint_var() stream.frame_size = buf.pull_uint_var() except BufferReadError: break consumed = buf.tell() # log frame if (self._quic_logger is not None and stream.frame_type == FrameType.DATA): self._quic_logger.log_event( category="http", event="frame_parsed", data=qlog_encode_data_frame( byte_length=stream.frame_size, stream_id=stream.stream_id), ) # check how much data is available chunk_size = min(stream.frame_size, buf.capacity - consumed) if stream.frame_type != FrameType.DATA and chunk_size < stream.frame_size: break # read available data frame_data = buf.pull_bytes(chunk_size) consumed = buf.tell() # detect end of frame stream.frame_size -= chunk_size if not stream.frame_size: stream.frame_size = None try: http_events.extend( self._handle_request_or_push_frame( frame_type=stream.frame_type, frame_data=frame_data, stream=stream, stream_ended=stream.ended and buf.eof(), )) except pylsqpack.StreamBlocked: stream.blocked = True stream.blocked_frame_size = len(frame_data) break # remove processed data from buffer stream.buffer = stream.buffer[consumed:] return http_events
def test_pull_bytes_zero(self): buf = Buffer(data=b"\x08\x07\x06\x05\x04\x03\x02\x01") self.assertEqual(buf.pull_bytes(0), b"")
def test_pull_bytes_truncated(self): buf = Buffer(capacity=0) with self.assertRaises(BufferReadError): buf.pull_bytes(2) self.assertEqual(buf.tell(), 0)
def test_pull_bytes_negative(self): buf = Buffer(data=b"\x08\x07\x06\x05\x04\x03\x02\x01") with self.assertRaises(BufferReadError): buf.pull_bytes(-1)
def _receive_stream_data( self, stream_id: int, data: bytes, stream_ended: bool ) -> List[Event]: http_events: List[Event] = [] if stream_id in self._stream_buffers: self._stream_buffers[stream_id] += data else: self._stream_buffers[stream_id] = data consumed = 0 buf = Buffer(data=self._stream_buffers[stream_id]) while not buf.eof(): # fetch stream type for unidirectional streams if ( stream_is_unidirectional(stream_id) and stream_id not in self._stream_types ): try: stream_type = buf.pull_uint_var() except BufferReadError: break consumed = buf.tell() if stream_type == StreamType.CONTROL: assert self._peer_control_stream_id is None self._peer_control_stream_id = stream_id elif stream_type == StreamType.QPACK_DECODER: assert self._peer_decoder_stream_id is None self._peer_decoder_stream_id = stream_id elif stream_type == StreamType.QPACK_ENCODER: assert self._peer_encoder_stream_id is None self._peer_encoder_stream_id = stream_id self._stream_types[stream_id] = stream_type # fetch next frame try: frame_type = buf.pull_uint_var() frame_length = buf.pull_uint_var() frame_data = buf.pull_bytes(frame_length) except BufferReadError: break consumed = buf.tell() if (stream_id % 4) == 0: # client-initiated bidirectional streams carry requests and responses if frame_type == FrameType.DATA: http_events.append( DataReceived( data=frame_data, stream_id=stream_id, stream_ended=stream_ended and buf.eof(), ) ) elif frame_type == FrameType.HEADERS: control, headers = self._decoder.feed_header(stream_id, frame_data) cls = ResponseReceived if self._is_client else RequestReceived http_events.append( cls( headers=headers, stream_id=stream_id, stream_ended=stream_ended and buf.eof(), ) ) elif stream_id == self._peer_control_stream_id: # unidirectional control stream if frame_type == FrameType.SETTINGS: settings = parse_settings(frame_data) self._encoder.apply_settings( max_table_capacity=settings.get( Setting.QPACK_MAX_TABLE_CAPACITY, 0 ), blocked_streams=settings.get(Setting.QPACK_BLOCKED_STREAMS, 0), ) # remove processed data from buffer self._stream_buffers[stream_id] = self._stream_buffers[stream_id][consumed:] return http_events
def _receive_stream_data_uni(self, stream_id: int, data: bytes) -> List[HttpEvent]: http_events: List[HttpEvent] = [] stream = self._stream[stream_id] stream.buffer += data buf = Buffer(data=stream.buffer) consumed = 0 unblocked_streams: Set[int] = set() while not buf.eof(): # fetch stream type for unidirectional streams if stream.stream_type is None: try: stream.stream_type = buf.pull_uint_var() except BufferReadError: break consumed = buf.tell() if stream.stream_type == StreamType.CONTROL: assert self._peer_control_stream_id is None self._peer_control_stream_id = stream_id elif stream.stream_type == StreamType.QPACK_DECODER: assert self._peer_decoder_stream_id is None self._peer_decoder_stream_id = stream_id elif stream.stream_type == StreamType.QPACK_ENCODER: assert self._peer_encoder_stream_id is None self._peer_encoder_stream_id = stream_id if stream_id == self._peer_control_stream_id: # fetch next frame try: frame_type = buf.pull_uint_var() frame_length = buf.pull_uint_var() frame_data = buf.pull_bytes(frame_length) except BufferReadError: break consumed = buf.tell() # unidirectional control stream if frame_type == FrameType.SETTINGS: settings = parse_settings(frame_data) encoder = self._encoder.apply_settings( max_table_capacity=settings.get( Setting.QPACK_MAX_TABLE_CAPACITY, 0), blocked_streams=settings.get( Setting.QPACK_BLOCKED_STREAMS, 0), ) self._quic.send_stream_data(self._local_encoder_stream_id, encoder) else: # fetch unframed data data = buf.pull_bytes(buf.capacity - buf.tell()) consumed = buf.tell() if stream_id == self._peer_decoder_stream_id: self._encoder.feed_decoder(data) elif stream_id == self._peer_encoder_stream_id: unblocked_streams.update(self._decoder.feed_encoder(data)) # remove processed data from buffer stream.buffer = stream.buffer[consumed:] # process unblocked streams for stream_id in unblocked_streams: stream = self._stream[stream_id] decoder, headers = self._decoder.resume_header(stream_id) stream.blocked = False cls = ResponseReceived if self._is_client else RequestReceived http_events.append( cls( headers=headers, stream_id=stream_id, stream_ended=stream.ended and not stream.buffer, )) http_events.extend( self._receive_stream_data_bidi(stream_id, b"", stream.ended)) return http_events
def _receive_stream_data_bidi(self, stream_id: int, data: bytes, stream_ended: bool) -> List[HttpEvent]: """ Client-initiated bidirectional streams carry requests and responses. """ http_events: List[HttpEvent] = [] stream = self._stream[stream_id] stream.buffer += data if stream_ended: stream.ended = True if stream.blocked: return http_events # shortcut DATA frame bits if (stream.frame_size is not None and stream.frame_type == FrameType.DATA and len(stream.buffer) < stream.frame_size): http_events.append( DataReceived(data=stream.buffer, stream_id=stream_id, stream_ended=False)) stream.frame_size -= len(stream.buffer) stream.buffer = b"" return http_events # some peers (e.g. f5) end the stream with no data if stream_ended and not stream.buffer: http_events.append( DataReceived(data=b"", stream_id=stream_id, stream_ended=True)) return http_events buf = Buffer(data=stream.buffer) consumed = 0 while not buf.eof(): # fetch next frame header if stream.frame_size is None: try: stream.frame_type = buf.pull_uint_var() stream.frame_size = buf.pull_uint_var() except BufferReadError: break consumed = buf.tell() # check how much data is available chunk_size = min(stream.frame_size, buf.capacity - consumed) if (stream.frame_type == FrameType.HEADERS and chunk_size < stream.frame_size): break # read available data frame_data = buf.pull_bytes(chunk_size) consumed = buf.tell() # detect end of frame stream.frame_size -= chunk_size if not stream.frame_size: stream.frame_size = None if stream.frame_type == FrameType.DATA and (stream_ended or frame_data): http_events.append( DataReceived( data=frame_data, stream_id=stream_id, stream_ended=stream_ended and buf.eof(), )) elif stream.frame_type == FrameType.HEADERS: try: decoder, headers = self._decoder.feed_header( stream_id, frame_data) except StreamBlocked: stream.blocked = True break self._quic.send_stream_data(self._local_decoder_stream_id, decoder) cls = ResponseReceived if self._is_client else RequestReceived http_events.append( cls( headers=headers, stream_id=stream_id, stream_ended=stream_ended and buf.eof(), )) # remove processed data from buffer stream.buffer = stream.buffer[consumed:] return http_events