async def make_request(self, redirect=False): ''' Acts as the central hub for preparing requests to be sent, and returning them upon completion. Generally just pokes through self's attribs and makes decisions about what to do. Returns: sock: The socket to be returned to the calling session's pool. Response: The response object, after any redirects. If there were redirects, the redirect responses will be stored in the final response object's `.history`. ''' hconnection = h11.Connection(our_role=h11.CLIENT) (self.scheme, self.host, self.path, self.uri_parameters, self.query, _) = urlparse(self.uri) if not redirect: self.initial_scheme = self.scheme self.initial_netloc = self.host # leave default the host on 80 / 443 # otherwise use the base host with :port appended. host = (self.host if (self.port == '80' or self.port == '443') else self.host.split(':')[0] + ':' + self.port) # default header construction asks_headers = c_i_dict([('Host', host), ('Connection', 'keep-alive'), ('Accept-Encoding', 'gzip, deflate'), ('Accept', '*/*'), ('Content-Length', '0'), ('User-Agent', 'python-asks/2.2.0')]) # check for a CookieTracker object, and if it's there inject # the relevant cookies in to the (next) request. # What the f**k is this shit. if self.persist_cookies is not None: self.cookies.update( self.persist_cookies.get_additional_cookies( self.host, self.path)) # formulate path / query and intended extra querys for use in uri self._build_path() # handle building the request body, if any body = '' if any((self.data, self.files, self.json is not None)): content_type, content_len, body = await self._formulate_body() asks_headers['Content-Type'] = content_type asks_headers['Content-Length'] = content_len # add custom headers, if any # note that custom headers take precedence if self.headers is not None: asks_headers.update(self.headers) # add auth if self.auth is not None: asks_headers.update(await self._auth_handler_pre()) asks_headers.update(await self._auth_handler_post_get_auth()) # add cookies if self.cookies: cookie_str = '' for k, v in self.cookies.items(): cookie_str += '{}={}; '.format(k, v) asks_headers['Cookie'] = cookie_str[:-1] # Construct h11 body object, if any body. if body: if not isinstance(body, bytes): body = bytes(body, self.encoding) asks_headers['Content-Length'] = str(len(body)) req_body = h11.Data(data=body) else: req_body = None # Construct h11 request object. req = h11.Request(method=self.method, target=self.path, headers=asks_headers.items()) # call i/o handling func response_obj = await self._request_io(req, req_body, hconnection) # check to see if the final socket object is suitable to be returned # to the calling session's connection pool. # We don't want to return sockets that are of a difference schema or # different top level domain, as they are less likely to be useful. if redirect: if not (self.scheme == self.initial_scheme and self.host == self.initial_netloc): self.sock._active = False if self.streaming: return None, response_obj return self.sock, response_obj
async def send(self, request: requests.PreparedRequest, *args: typing.Any, **kwargs: typing.Any) -> requests.Response: urlparts = urlparse(request.url) hostname = urlparts.hostname port = urlparts.port if port is None: port = {"http": 80, "https": 443}[urlparts.scheme] target = urlparts.path if urlparts.query: target += "?" + urlparts.query headers = [("host", urlparts.netloc)] + list(request.headers.items()) reader, writer = await asyncio.open_connection(hostname, port) conn = h11.Connection(our_role=h11.CLIENT) message = h11.Request(method=request.method, target=target, headers=headers) data = conn.send(message) writer.write(data) if request.body: message = h11.Data(data=request.body.encode("utf-8")) data = conn.send(message) writer.write(data) message = h11.EndOfMessage() data = conn.send(message) writer.write(data) status_code = 0 headers = [] reason = b"" buffer = io.BytesIO() while True: event = conn.next_event() event_type = type(event) if event_type is h11.NEED_DATA: data = await reader.read(2048) conn.receive_data(data) elif event_type is h11.Response: status_code = event.status_code headers = [(key.decode(), value.decode()) for key, value in event.headers] reason = event.reason elif event_type is h11.Data: buffer.write(event.data) elif event_type is h11.EndOfMessage: buffer.seek(0) break writer.close() if hasattr(writer, 'wait_closed'): await writer.wait_closed() resp = urllib3.HTTPResponse( body=buffer, headers=headers, status=status_code, reason=reason, preload_content=False, ) return self.build_response(request, resp)
def __init__(self, *, framework: ASGIFramework = echo_framework) -> None: self.client_stream, server_stream = trio.testing.memory_stream_pair() server_stream.socket = MockSocket() self.client = h11.Connection(h11.CLIENT) self.server = H11Server(framework, Config(), server_stream)
async def handle_client(self, stream): # Uses max_size - 1 given h11 enforces the check only if it current # internal buffer doesn't contain an entire message. # Note that given we fetch by batches of MAX_INITIAL_HTTP_REQUEST_SIZE, # we can end up with a final message as big as 2 * MAX_INITIAL_HTTP_REQUEST_SIZE - 1 # if the client starts by sending a MAX_INITIAL_HTTP_REQUEST_SIZE - 1 # tcp trame, then another MAX_INITIAL_HTTP_REQUEST_SIZE. conn = h11.Connection( h11.SERVER, max_incomplete_event_size=MAX_INITIAL_HTTP_REQUEST_SIZE - 1) try: # Fetch the initial request while True: try: data = await stream.receive_some( MAX_INITIAL_HTTP_REQUEST_SIZE) except trio.BrokenResourceError: # The socket got broken in an unexpected way (the peer has most # likely left without telling us, or has reseted the connection) return conn.receive_data(data) event = conn.next_event() if event is h11.NEED_DATA: continue if isinstance(event, h11.Request): break if isinstance(event, h11.ConnectionClosed): # Peer has left return else: logger.error("Unexpected event", client_event=event) return # See https://h11.readthedocs.io/en/v0.10.0/api.html#flow-control if conn.they_are_waiting_for_100_continue: await stream.send_all( conn.send( h11.InformationalResponse(status_code=100, headers=[]))) def _get_header(key: bytes) -> Optional[bytes]: # h11 guarantees the headers key are always lowercase return next((v for k, v in event.headers if k == key), None) # Do https redirection if incoming request doesn't follow forward proto rules if self.config.forward_proto_enforce_https: header_key, header_expected_value = self.config.forward_proto_enforce_https header_value = _get_header(header_key) # If redirection header match and protocol match, then no need for a redirection. if header_value is not None and header_value != header_expected_value: location_url = ( b"https://" + self.config.backend_addr.netloc.encode("ascii") + event.target) await self._send_http_reply( stream=stream, conn=conn, status_code=301, headers={b"location": location_url}, ) return await stream.aclose() # Test for websocket upgrade considering: # - Upgrade header has been introduced in HTTP 1.1 RFC # - Connection&Upgrade fields are case-insensitive according to RFC # - Only `/ws` target are valid for upgrade, this allow us to reserve # other target for future use # - We fallback to HTTP in case of invalid upgrade query for simplicity if (event.http_version == b"1.1" and event.target == TRANSPORT_TARGET.encode() and (_get_header(b"connection") or b"").lower() == b"upgrade" and (_get_header(b"upgrade") or b"").lower() == b"websocket"): await self._handle_client_websocket(stream, event) else: await self._handle_client_http(stream, conn, event) except h11.RemoteProtocolError as exc: # Peer is drunk, tell him and leave... await self._send_http_reply(stream, conn, status_code=exc.error_status_hint) finally: # Note the stream might already be closed (e.g. through `Transport.aclose`) # but it's ok given this operation is idempotent await stream.aclose()
import h11 import curio from curio import socket import noio_ws as ws from noio_ws import utils from noio_ws.errors import NnwsProtocolError httpcon = h11.Connection(our_role=h11.SERVER) # our h11 http connection wscon = ws.Connection('SERVER') # our noio_ws websocket connection async def client_handler(connection, addr): print(f'Connection from {addr}') ws_shaker = utils.Handshake('SERVER') request = await http_next_event(connection) request = ws_shaker.verify_request(request) await http_send(connection, ws_shaker.server_handshake()) while True: event = await ws_next_event(connection) print('got ws message', vars(event)) if isinstance(event, ws.Message): if event.type != 'close': print(f'{event.message} from {addr}') await ws_send(connection, event.message, event.type) await ws_send(connection, '', 'close') else: print(event.type) print('WE EXITED CLEANLY...ISH') raise SystemExit
def __init__(self, reader, writer, klf_client): logging.info("Client connected to the REST server") self.reader = reader self.writer = writer self.klf_client = klf_client self.connection = h11.Connection(h11.SERVER)