async def handle(self, request, response): await self.on_request_handler(httptools.parse_url(self._current_url), request, response, self._loop) if not self._current_parser.should_keep_alive(): self._transport.close() self._current_parser = None self._current_request = None
def handle(self, request, response): f = asyncio.ensure_future( self._handle(httptools.parse_url(self._current_url), request, response, self._loop)) asyncio.wait(f) if not self._current_parser.should_keep_alive(): self._transport.close() self._current_parser = None self._current_request = None
def handle(self, request, response): parsed_url = httptools.parse_url(self._current_url) payload_size = parsed_url.path.decode('ascii')[1:] if not payload_size: payload_size = 1024 else: payload_size = int(payload_size) resp = _RESP_CACHE.get(payload_size) if resp is None: resp = b'X' * payload_size _RESP_CACHE[payload_size] = resp response.write(resp) if not self._current_parser.should_keep_alive(): self._transport.close() self._current_parser = None self._current_request = None
def handle(self, request, response: HttpResponseForInlineProcessor): parsed_url = httptools.parse_url(self._current_url) payload_size = parsed_url.path.decode("ascii")[1:] if not payload_size: payload_size = 1024 else: payload_size = int(payload_size) resp = _RESP_CACHE.get(payload_size) if resp is None: resp = b"X" * payload_size _RESP_CACHE[payload_size] = resp response.write(resp) self._current_request = None self._current_url = None self._current_headers = list() if not self._current_parser.should_keep_alive(): self.mark__socket_as_ready_to_be_closed(True)
def __init__(self, url_bytes, headers, version, method, transport): # TODO: Content-Encoding detection self._parsed_url = parse_url(url_bytes) self.app = None self.headers = headers self.version = version self.method = method self.transport = transport # Init but do not inhale self.body = [] self.parsed_json = None self.parsed_form = None self.parsed_files = None self.parsed_args = None self.uri_template = None self._cookies = None self.stream = None
def handle(self, request, response: HttpResponse): parsed_url = httptools.parse_url(self._current_url) payload_size = parsed_url.path.decode('ascii')[1:] if not payload_size: payload_size = 1024 else: payload_size = int(payload_size) resp = _RESP_CACHE.get(payload_size) if resp is None: resp = b'X' * payload_size _RESP_CACHE[payload_size] = resp response.write(resp) self._current_request = None self._current_url = None self._current_headers = list() # print('KEEP ALIVE:', self._current_parser.should_keep_alive()) if not self._current_parser.should_keep_alive(): self.server.mark_client_connection_as_ready_to_be_closed(self.transport_id, False)
def __init__(self, url_bytes, headers, version, method, transport): # TODO: Content-Encoding detection url_parsed = parse_url(url_bytes) self.url = url_parsed.path.decode('utf-8') self.headers = headers self.version = version self.method = method self.transport = transport self.query_string = None if url_parsed.query: self.query_string = url_parsed.query.decode('utf-8') # Init but do not inhale self.body = [] self.parsed_json = None self.parsed_form = None self.parsed_files = None self.parsed_args = None self._cookies = None
def __init__(self, url_bytes, headers, version, method, transport): self.raw_url = url_bytes # TODO: Content-Encoding detection self._parsed_url = parse_url(url_bytes) self.app = None self.headers = headers self.version = version self.method = method self.transport = transport # Init but do not inhale self.body = [] self.parsed_json = None self.parsed_form = None self.parsed_files = None self.parsed_args = None self.uri_template = None self._cookies = None self.stream = None
def on_headers_complete(self): parsed_path = parse_url(self.raw_path) self.version = self._parser.get_http_version() self.keep_alive = self._parser.should_keep_alive() self.upgrade = self._parser.should_upgrade() self.raw_method = self._parser.get_method() self.method = self.raw_method.decode('ascii') self.path = parsed_path.path.decode('ascii') for name, values in parse_qs(parsed_path.query,).items(): for value in values: self.raw_query.add(name.decode('ascii'), value) self.query.add(name.decode('ascii'), value.decode('ascii')) self.headers._post_process(self) self._headers_complete = True if self.headers.content_length: self._body_length = self.headers.content_length
def handle(self, request, response: HttpResponse): parsed_url = httptools.parse_url(self._current_url) payload_size = parsed_url.path.decode('ascii')[1:] if not payload_size: payload_size = 1024 else: payload_size = int(payload_size) resp = _RESP_CACHE.get(payload_size) if resp is None: resp = b'X' * payload_size _RESP_CACHE[payload_size] = resp response.write(resp) self._current_request = None self._current_url = None self._current_headers = list() # print('KEEP ALIVE:', self._current_parser.should_keep_alive()) if not self._current_parser.should_keep_alive(): self.server.mark_client_connection_as_ready_to_be_closed( self.transport_id, False)
async def request(method, url, headers=None, data=""): sslcontext = None parsed_url = httptools.parse_url(url.encode()) ip = parsed_url.host.decode() port = parsed_url.port if port: port = port.decode() if not port: if parsed_url.schema == b"https": sslcontext = ssl.create_default_context( purpose=ssl.Purpose.CLIENT_AUTH) port = 443 else: port = 80 path = parsed_url.path or b"/" path = path.decode() reader, writer = await asyncio.wait_for(asyncio.open_connection( ip, port, ssl=sslcontext), timeout=30) headers = { "User-Agent": "aioclient", "Host": ip, "Content-Length": len(data) } headers.update() headers.update(headers or {}) header_raw = "".join([f"{k}:{v}\r\n" for k, v in headers.items()]) http_raw = f"{method} {path} HTTP/1.1\r\n{header_raw}\r\n{data}".encode() response = Response() parser = httptools.HttpResponseParser(response) writer.write(http_raw) while True: chunk = await reader.read(100) parser.feed_data(chunk) if len(chunk) < 100: break response.status_code = parser.get_status_code() writer.close() return response
def on_url(self, url): method = self.parser.get_method() parsed_url = httptools.parse_url(url) path = parsed_url.path.decode("ascii") if "%" in path: path = urllib.parse.unquote(path) self.url = url self.expect_100_continue = False self.headers = [] self.scope = { "type": "http", "http_version": "1.1", "server": self.server, "client": self.client, "scheme": self.scheme, "method": method.decode("ascii"), "root_path": self.root_path, "path": path, "query_string": parsed_url.query if parsed_url.query else b"", "headers": self.headers, }
def __init__(self, url_bytes: bytes, headers: dict, version: str, method: str, transport) -> None: self._parsed_url = parse_url(url_bytes) self.app = None self.headers = headers self.version = version self.method = method self.transport = transport # Init but do not inhale self.body = [] self._parsed_json = _empty self._parsed_jsonrpc = _empty self.uri_template = None self.stream = None self.is_batch_jrpc = False self.is_single_jrpc = False self.timings = [(perf_counter(), 'http_create')] self._log = _empty
def __init__( self, url_bytes: bytes, headers: Header, version: str, method: str, transport: TransportProtocol, app: Sanic, ): self.raw_url = url_bytes # TODO: Content-Encoding detection self._parsed_url = parse_url(url_bytes) self._id: Optional[Union[uuid.UUID, str, int]] = None self.app = app self.headers = headers self.version = version self.method = method self.transport = transport # Init but do not inhale self.body = b"" self.conn_info: Optional[ConnInfo] = None self.ctx = SimpleNamespace() self.name: Optional[str] = None self.parsed_forwarded: Optional[Options] = None self.parsed_json = None self.parsed_form = None self.parsed_files = None self.parsed_args: DefaultDict[Tuple[bool, bool, str, str], RequestParameters] = defaultdict( RequestParameters) self.parsed_not_grouped_args: DefaultDict[Tuple[ bool, bool, str, str], List[Tuple[str, str]]] = defaultdict(list) self.uri_template: Optional[str] = None self.request_middleware_started = False self._cookies: Optional[Dict[str, str]] = None self._match_info: Dict[str, Any] = {} self.stream: Optional[Http] = None self.endpoint: Optional[str] = None
def __init__(self, url_bytes, headers, version, method, transport, app): self.raw_url = url_bytes # TODO: Content-Encoding detection self._parsed_url = parse_url(url_bytes) self.app = app self.headers = headers self.version = version self.method = method self.transport = transport # Init but do not inhale self.body_init() self.parsed_json = None self.parsed_form = None self.parsed_files = None self.parsed_args = defaultdict(RequestParameters) self.parsed_not_grouped_args = defaultdict(list) self.uri_template = None self._cookies = None self.stream = None self.endpoint = None
def on_url(self, url: bytes) -> None: method = self.parser.get_method() parsed_url = httptools.parse_url(url) raw_path = parsed_url.path path = raw_path.decode('ascii') if '%' in path: path = urllib.parse.unquote(path) self.url = url self.headers = [] self.scope = { 'type': 'http', 'http_version': '1.1', 'server': self.server, 'client': self.client, 'scheme': self.scheme, 'method': method.decode('ascii'), 'path': path, 'raw_path': raw_path, 'query_string': parsed_url.query or b'', 'headers': self.headers }
def on_url(self, url): url = parse_url(url) if url.query: self.request.query_string = url.query.decode('ASCII') self.request.path = url.path.decode('ASCII')
def get_parsed_url(url): if not isinstance(url, bytes): url = bytes(url.encode('utf8')) return parse_url(url)
def test_parser_url_8(self): with self.assertRaises(TypeError): httptools.parse_url(None)
def test_parser_url_7(self): url = httptools.parse_url(b'/') with self.assertRaisesRegex(AttributeError, 'not writable'): url.port = 0
def parse(self, url:bytes): parsed = httptools.parse_url(url) return (parsed.schema, parsed.host, parsed.port, parsed.path, parsed.query, parsed.fragment, parsed.userinfo)
def on_url(self, url: bytes): parsed = parse_url(url) self.path = parsed.path.decode() self.query_string = (parsed.query or b'').decode() self.query = MultiDict(parse.parse_qs(self.query_string))
def on_url(self, url: bytes): self.request.url = url parsed = parse_url(url) self.request.path = unquote(parsed.path.decode()) self.request.query_string = (parsed.query or b'').decode()
def on_url(self, url): self.url_requested = parse_url(url)
def on_url(self, url: bytes) -> None: self.url = parse_url(url)
def on_url(self, url): self.ctx = Context() self.ctx.write = self.transport.write url = httptools.parse_url(url) self.ctx.req.path = url.path.decode() self.ctx.req.method = self.parser.get_method().decode()
async def request(self, url: bytes, method: bytes, headers: typing.Dict[bytes, typing.Union[bytes, typing.Iterable[bytes]]]=None, body: bytes=b'', allow_redirects: bool=True, max_redirects: int=10, buffer_length: int=65536, ssl: _ssl.SSLContext=None, timeout=10) -> HttpResponse: """ Submits a request to the URL given in the function. The request inherits all values that are given to it by the ClientSession including HTTP version, headers, cookies (if applicable for that domain) as well as all values passed in to the function. Uses the system default for SSL/TLS on HTTPS unless given a different SSLContext. Follow redirects by default but the behaviour can be overridden. :param url: URL to send the request to. :param method: HTTP method to use. :param headers: Headers to apply to the request. :param body: Body of the request. :param allow_redirects: If True, allow the request to automatically respond to redirects. :param max_redirects: Maximum number of redirects allowed before canceling request. :param buffer_length: Maximum nuber of bytes to read per cycle of reading and parsing. :param ssl: SSLContext object if the system default SSL/TLS context is not acceptable. :param timeout: Number of seconds to wait before timing out. :return: HttpResponse object. """ parsed_url = httptools.parse_url(url) host = parsed_url.host # If the connection is HTTPS default port is 443, otherwise 80. if parsed_url.schema.lower() == b'https': port = parsed_url.port if parsed_url.port else 443 if ssl is None: ssl = _ssl.create_default_context() else: port = parsed_url.port if parsed_url.port else 80 # Create the request. request = HttpRequest() request.method = method request.version = self._version request.body = body request.headers[b'Host'] = host request.on_url(url) # Apply headers. for key, val in self.headers.items(): request.headers[key] = val if headers is not None: for key, val in headers.items(): request.headers[key] = val # Create an HttpCookies object from the CookieJar. request.cookies = self.cookie_jar.get_cookies_for_url(request.url) response = HttpResponse() response_error = False self._parser.set_target(response) await self.open((host, port), ssl=ssl) async with self._lock: self._writer.write(request.to_bytes()) while True: data = await asyncio.wait_for(self._reader.read(buffer_length), timeout=timeout, loop=self._loop) if data: self._parser.feed_data(data) if response.is_complete(): break # Socket is unlocked at this point. if response.headers.get(b'Connection', [b''])[0] == b'close': await self.close() # If there's cookies to be added to the CookieJar, do so here. if response.cookies: self.cookie_jar.update_cookies(request.url, response.cookies.values()) # If there are redirects and we're allowed to follow, then follow them. if allow_redirects and response.status_code in _HTTP_REDIRECTS: if max_redirects <= 0 or b'Location' not in response.headers: response_error = True else: response = await self.request( (response.headers.get(b'Location') or response.headers.get(b'URI'))[0], method, headers=headers, body=body, allow_redirects=True, max_redirects=max_redirects-1 ) # If we're erroring or the response isn't complete, return a 500. if response_error or not response.is_complete(): response.version = self._version response.body = b'' response.status_code = 500 response.status = b'Internal Server Error' return response else: return response
def parse_URL(self): self.HttpURL_class = parse_url(self.url)
def __init__(self, url_bytes, headers, version, method): url_parsed = parse_url(url_bytes) self.url = url_parsed.path.decode('utf-8') self.headers = headers self.version = version self.method = method self.query_string = None if url_parsed.query: self.query_string = url_parsed.query.decode('utf-8') self.body = None self.parsed_json = None self.parsed_form = None self.parsed_files = None self.parsed_args = None self._cookies = None @property def json(self): """ """ if self.parsed_json is None: try: self.parsed_json = json_loads(self.body) except Exception: raise IvalidUsage("Failed when parsing body as json") return self.parsed_json @property def token(self): auth_header = self.headers.get("Failed when parseing body as json") if auth_header is not none: return auth_header.split()[1] return auth_header @property def form(self): if self.parsed_form is None: self.parsed_form = RequestParameters() self.parsed_files = RequestParameters() content_type = self.headers.get( 'Content-Type', DEFAULT_HTTP_CONTENT_TYPE) content_type, parameters = parse_header(content_type) try: if content_type == 'application/x-www-form-urlencoded': self.parsed_form = RequestParameters( parse_qs(self.body.deode('utf-8'))) elif content_type == 'multipart/form-data': boundary = parameters['boundary'].encode('utf-8') self.parsed_form, self.parsed_files = ( parse_multipart_form(self.body, bounder)) except Exception: log.exception("Failed when parsing form") return self.parsed_form