async def tcp_recv(reader: asyncio.StreamReader, delimiter: bytes = b'\n', timeout=None) -> str: """ Receives string result. Handle Incomplete error on your own. """ try: data_length: bytes = await asyncio.wait_for( reader.readuntil(delimiter), timeout=timeout) except TypeError: msg = "tcp_recv: expects" if not isinstance(delimiter, bytes): print(msg, f"<bytes> for delimiter, got {type(delimiter)} instead.") if not isinstance(timeout, Number) and timeout is not None: print(msg, f"<numbers> for delimiter, got {type(timeout)} instead.") raise # why this is ignored??? why I am getting ConnReset directly?? except ConnectionResetError: print("tcp_recv: Disconnected from Server.") raise except asyncio.IncompleteReadError: print("tcp_recv: Incomplete read error.") raise data = await asyncio.wait_for(reader.readexactly( int(data_length.strip(delimiter))), timeout=timeout) return data.decode()
async def connection_loop(execute_rpc: Callable[[Any], Any], reader: asyncio.StreamReader, writer: asyncio.StreamWriter, logger: logging.Logger, cancel_token: CancelToken) -> None: # TODO: we should look into using an io.StrinIO here for more efficient # writing to the end of the string. raw_request = '' while True: request_bytes = b'' try: request_bytes = await cancel_token.cancellable_wait(reader.readuntil(b'}')) except asyncio.LimitOverrunError as e: logger.info("Client request was too long. Erasing buffer and restarting...") request_bytes = await cancel_token.cancellable_wait(reader.read(e.consumed)) await cancel_token.cancellable_wait(write_error( writer, f"reached limit: {e.consumed} bytes, starting with '{request_bytes[:20]!r}'", )) continue raw_request += request_bytes.decode() bad_prefix, raw_request = strip_non_json_prefix(raw_request) if bad_prefix: logger.info("Client started request with non json data: %r", bad_prefix) await cancel_token.cancellable_wait( write_error(writer, 'Cannot parse json: ' + bad_prefix), ) try: request = json.loads(raw_request) except json.JSONDecodeError: # invalid json request, keep reading data until a valid json is formed logger.debug("Invalid JSON, waiting for rest of message: %r", raw_request) continue # reset the buffer for the next message raw_request = '' if not request: logger.debug("Client sent empty request") await cancel_token.cancellable_wait( write_error(writer, 'Invalid Request: empty'), ) continue try: result = await execute_rpc(request) except Exception as e: logger.exception("Unrecognized exception while executing RPC") await cancel_token.cancellable_wait( write_error(writer, "unknown failure: " + str(e)), ) else: writer.write(result.encode()) await cancel_token.cancellable_wait(writer.drain())
async def __process_http(self, first_chunk: bytes, down_reader: StreamReader, down_writer: StreamWriter): request = await wait_for( down_reader.readuntil(separator=HTTP_HEADERS_SEPARATOR), IO_TIMEOUT) headers = [] host, port = None, 80 while True: line = await wait_for( down_reader.readuntil(separator=HTTP_HEADERS_SEPARATOR), IO_TIMEOUT) headers.append(line) if line == HTTP_HEADERS_SEPARATOR: log.debug('End of HTTP headers.') break log.debug('Got HTTP header line %r[...].', line[:32]) key, value = line.split(b':', maxsplit=1) if key.lower() != b'host': continue address = value.strip().decode('ascii') if ':' in address: host, port = address.split(':', maxsplit=1) port = int(port) else: host = address if host is None: raise HttpProtocolError( 'Unable to find Host header in HTTP headers:') log.debug('Got HTTP host %r and port %d.', host, port) if self.__rules.redirect_to_socks(host): log.debug('Redirecting HTTP host %r@%d to SOCKS.', host, port) up_reader, up_writer = await self.__connect_socks_server( host, port) else: log.debug('Processing HTTP host %r@%d as is.', host, port) up_reader, up_writer = await wait_for( open_connection(host=host, port=port), CONNECT_TIMEOUT) await self.__pump_traffic(up_reader, up_writer, down_reader, down_writer, first_chunk, request, b''.join(headers))
async def read_lines_until(reader: StreamReader, timeout: float): lines = [] while not reader.at_eof(): try: line = await shield(wait_for(reader.readuntil(b'\r'), timeout)) lines.append(line.decode("ascii")) # pylint: disable=protected-access except concurrent.futures._base.TimeoutError: break return lines
async def readuntil_timeout(reader: asyncio.StreamReader, separator: bytes, timeout: float) -> Union[bytes, int]: """Read from socket with timeout.""" try: data = await asyncio.wait_for(reader.readuntil(separator), timeout) data = data.split(separator)[0] data = data.split(b"\r")[0] # remove CR if present return data except asyncio.TimeoutError: return READ_TIMEOUT except asyncio.IncompleteReadError: return SOCKET_CLOSED
async def server(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): ip, port = writer.get_extra_info("peername")[0:2] try: header = await asyncio.wait_for(reader.readuntil(b"\r\n\r\n"), conf.get("server", "request_timeout")) await rewrite_handler(reader, writer, (ip, header)) except (ConnectionError, asyncio.TimeoutError, asyncio.IncompleteReadError): pass except OSError as e: print(e) pass writer.close()
async def server(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): ip, port = writer.get_extra_info("peername")[0:2] while True: try: header = await asyncio.wait_for(reader.readuntil(b"\r\n\r\n"), self.timeout) except (ConnectionError, asyncio.TimeoutError, asyncio.IncompleteReadError): break except OSError as e: print(e) break if not await self.http1_handler(reader, writer, (ip, header)): break writer.close() self.log.debug(f"[{ip}:{port}]: connect lost")
async def _seperate_solutions(stream: StreamReader, timeout: Optional[timedelta]): deadline = None if timeout is not None: deadline = datetime.now() + timeout + timedelta(seconds=1) solution: bytes = b"" while not stream.at_eof(): try: if deadline is None: solution += await stream.readuntil(SEPARATOR) else: t = deadline - datetime.now() solution += await asyncio.wait_for(stream.readuntil(SEPARATOR), t.total_seconds()) yield solution solution = b"" except asyncio.LimitOverrunError as err: solution += await stream.readexactly(err.consumed)
async def handle_request(reader: StreamReader, writer: StreamWriter, read_timeout=0, document_root='', sleep_time=0, **kwargs): file_data: bytes = b"" method = '' try: try: head: bytes = await asyncio.wait_for(reader.readuntil(b'\r\n\r\n'), timeout=read_timeout) except TimeoutError: raise BadRequest('Wait too long to get http request.') except IncompleteReadError: logging.info('Empty request.') return None method, path = validate_head(head) loop: asyncio.AbstractEventLoop = asyncio.get_running_loop() file_size, file_data, content_type = await loop.run_in_executor( None, partial(get_file, document_root, path)) response = format_response(OK, content_type, file_size) except HttpException as e: response = format_response(e) except Exception as e: logging.exception('Some error occurred.') response = format_response(InternalServerError) addr = writer.get_extra_info('peername') logging.info(f"Received {head.decode()} from {addr!r}") logging.info(f"Send: {response}") writer.write(response.encode()) if method == HEAD: file_data = None if file_data: writer.write(file_data) await writer.drain() writer.close() logging.info("The connection was closed") sleep(sleep_time)
async def _get_raw_header_from_stream(cls, reader: StreamReader, timeout: int = 5) -> bytes: raw_headers = await asyncio.wait_for(reader.readuntil(b'\r\n\r\n'), timeout) return raw_headers