def __init__(self): self.pre_request = Event() self.request = Event() self.pre_response = Event() self.response = Event() self.request_data = Event() self.response_data = Event()
def __init__(self, io_stream): self._io_stream = io_stream self.data_event = Event() self.content_event = Event()
class ConnectionEvents(object): def __init__(self): self.pre_request = Event() self.request = Event() self.pre_response = Event() self.response = Event() self.request_data = Event() self.response_data = Event() def attach(self, recorder_session): self.pre_request.handle(recorder_session.pre_request) self.request.handle(recorder_session.request) self.pre_response.handle(recorder_session.pre_response) self.response.handle(recorder_session.response) self.request_data.handle(recorder_session.request_data) self.response_data.handle(recorder_session.response_data) def clear(self): self.pre_request.clear() self.request.clear() self.pre_response.clear() self.response.clear() self.request_data.clear() self.response_data.clear()
class ChunkedTransferStreamReader(object): '''Read chunked transfer encoded stream. Args: io_stream: An instance of :class:`.extended.IOStream`. Attributes: data_event (Event): An instance of :class:`.actor.Event` that will be fired when raw data is read from the stream. content_event (Event): An instance of :class:`.actor.Event` that will be fired when content data is decoded from the stream. ''' def __init__(self, io_stream): self._io_stream = io_stream self.data_event = Event() self.content_event = Event() @tornado.gen.coroutine def read_chunk(self): '''Read a single chunk of the chunked transfer encoding. Returns: int: The size of the content in the chunk. ''' _logger.debug('Reading chunk.') chunk_size_hex = yield self._io_stream.read_until(b'\n') self.data_event.fire(chunk_size_hex) try: chunk_size = int(chunk_size_hex.split(b';', 1)[0].strip(), 16) except ValueError as error: raise ProtocolError(error.args[0]) from error _logger.debug(__('Getting chunk size={0}.', chunk_size)) if not chunk_size: raise tornado.gen.Return(chunk_size) def callback(data): self.data_event.fire(data) self.content_event.fire(data) yield self._io_stream.read_bytes( chunk_size, streaming_callback=callback ) newline_data = yield self._io_stream.read_until(b'\n') self.data_event.fire(newline_data) if len(newline_data) > 2: # Should be either CRLF or LF # This could our problem or the server's problem raise ProtocolError('Error reading newline after chunk.') raise tornado.gen.Return(chunk_size) @tornado.gen.coroutine def read_trailer(self): '''Read the HTTP trailer fields. Returns: bytes: The trailer data. ''' _logger.debug('Reading chunked trailer.') trailer_data_list = [] while True: trailer_data = yield self._io_stream.read_until(b'\n') self.data_event.fire(trailer_data) trailer_data_list.append(trailer_data) if not trailer_data.strip(): break raise tornado.gen.Return(b''.join(trailer_data_list))
class ChunkedTransferStreamReader(object): '''Read chunked transfer encoded stream. Args: io_stream: An instance of :class:`.extended.IOStream`. Attributes: data_event (Event): An instance of :class:`.actor.Event` that will be fired when raw data is read from the stream. content_event (Event): An instance of :class:`.actor.Event` that will be fired when content data is decoded from the stream. ''' def __init__(self, io_stream): self._io_stream = io_stream self.data_event = Event() self.content_event = Event() @tornado.gen.coroutine def read_chunk(self): '''Read a single chunk of the chunked transfer encoding. Returns: int: The size of the content in the chunk. ''' _logger.debug('Reading chunk.') chunk_size_hex = yield self._io_stream.read_until(b'\n') self.data_event.fire(chunk_size_hex) try: chunk_size = int(chunk_size_hex.split(b';', 1)[0].strip(), 16) except ValueError as error: raise ProtocolError(error.args[0]) from error _logger.debug('Getting chunk size={0}.'.format(chunk_size)) if not chunk_size: raise tornado.gen.Return(chunk_size) def callback(data): self.data_event.fire(data) self.content_event.fire(data) yield self._io_stream.read_bytes(chunk_size, streaming_callback=callback) newline_data = yield self._io_stream.read_until(b'\n') self.data_event.fire(newline_data) if len(newline_data) > 2: # Should be either CRLF or LF # This could our problem or the server's problem raise ProtocolError('Error reading newline after chunk.') raise tornado.gen.Return(chunk_size) @tornado.gen.coroutine def read_trailer(self): '''Read the HTTP trailer fields. Returns: bytes: The trailer data. ''' _logger.debug('Reading chunked trailer.') trailer_data_list = [] while True: trailer_data = yield self._io_stream.read_until(b'\n') self.data_event.fire(trailer_data) trailer_data_list.append(trailer_data) if not trailer_data.strip(): break raise tornado.gen.Return(b''.join(trailer_data_list))