def __init__(self, arg1092, arg1935, *, params=None, headers=None, skip_auto_headers=frozenset(), data=None, cookies=None, auth=None, version=http.HttpVersion11, compress=None, chunked=None, expect100=False, loop=None, response_class=None, proxy=None, proxy_auth=None, proxy_from_env=False, timer=None): if (var1693 is None): var1693 = asyncio.get_event_loop() assert isinstance(arg1935, URL), url assert isinstance(proxy, (URL, type(None))), proxy if params: var1877 = MultiDict(arg1935.query) var330 = arg1935.with_query(params) var1877.extend(var330.query) arg1935 = arg1935.with_query(var1877) self.attribute1914 = arg1935.with_fragment(None) self.attribute683 = arg1935 self.attribute669 = arg1092.upper() self.attribute126 = chunked self.attribute1089 = compress self.attribute1757 = var1693 self.attribute1138 = None self.attribute333 = (response_class or ClientResponse) self.attribute2226 = (timer if (timer is not None) else TimerNoop()) if var1693.get_debug(): self.attribute2397 = traceback.extract_stack(sys._getframe(1)) self.function2543(version) self.function2715(arg1935) self.function229(headers) self.function1475(skip_auto_headers) self.function355(cookies) self.function964(data) self.function2129(var450) self.function150(proxy, proxy_auth, proxy_from_env) self.function71(data, skip_auto_headers) self.function1156() self.function2358(expect100)
def __init__(self, *args, **kwargs) -> None: method: str = args[0] url: URL = args[1] if kwargs["params"]: q = MultiDict(url.query) url2 = url.with_query(kwargs["params"]) q.extend(url2.query) url = url.with_query(q) args = ( method, url, ) kwargs["params"] = None self.__dict__["_auth"] = kwargs["auth"] if kwargs["auth"] is Auth: kwargs["auth"] = None if url.host in Hosts.items: if isinstance(Hosts.items[url.host].name, str): api_name = Hosts.items[url.host].name elif callable(Hosts.items[url.host].name): api_name = Hosts.items[url.host].name(kwargs["headers"]) if api_name in kwargs["session"].__dict__["_apis"]: args = Hosts.items[url.host].func(args, kwargs) super().__init__(*args, **kwargs)
def merge_params(url: 'Union[URL, str]', params: 'Dict' = None) -> 'URL': url = URL(url) if params: query_params = MultiDict(url.query) query_params.extend(url.with_query(params).query) return url.with_query(query_params) return url
def _make_aiohttp_session_from_remote(self): """ Same as DownloaderFactory._make_aiohttp_session_from_remote, excluding TLS configuration. Returns: :class:`aiohttp.ClientSession` """ tcp_conn_opts = {"force_close": True} headers = MultiDict( {"User-Agent": NoAuthDownloaderFactory.user_agent()}) if self._remote.headers is not None: for header_dict in self._remote.headers: user_agent_header = header_dict.pop("User-Agent", None) if user_agent_header: headers[ "User-Agent"] = f"{headers['User-Agent']}, {user_agent_header}" headers.extend(header_dict) conn = aiohttp.TCPConnector(**tcp_conn_opts) total = self._remote.total_timeout sock_connect = self._remote.sock_connect_timeout sock_read = self._remote.sock_read_timeout connect = self._remote.connect_timeout timeout = aiohttp.ClientTimeout(total=total, sock_connect=sock_connect, sock_read=sock_read, connect=connect) return aiohttp.ClientSession(connector=conn, timeout=timeout, headers=headers)
def binance(args: tuple[str, URL], kwargs: dict[str, Any]) -> tuple[str, URL]: method: str = args[0] url: URL = args[1] data: dict[str, Any] = kwargs["data"] or {} headers: CIMultiDict = kwargs["headers"] session: aiohttp.ClientSession = kwargs["session"] key: str = session.__dict__["_apis"][Hosts.items[url.host].name][0] secret: bytes = session.__dict__["_apis"][Hosts.items[url.host].name][1] expires = str(int(time.time() * 1000)) if method == METH_GET: if url.scheme == "https": query = MultiDict(url.query) query.extend({"timestamp": expires}) query_string = "&".join(f"{k}={v}" for k, v in query.items()) signature = hmac.new( secret, query_string.encode(), hashlib.sha256 ).hexdigest() query.extend({"signature": signature}) url = url.with_query(query) args = ( method, url, ) else: data.update({"timestamp": expires}) body = FormData(data)() signature = hmac.new(secret, body._value, hashlib.sha256).hexdigest() body._value += f"&signature={signature}".encode() body._size = len(body._value) kwargs.update({"data": body}) headers.update({"X-MBX-APIKEY": key}) return args
def __init__(self, method, url, *, params=None, headers=None, skip_auto_headers=frozenset(), data=None, cookies=None, auth=None, version=http.HttpVersion11, compress=None, chunked=None, expect100=False, loop=None, response_class=None, proxy=None, proxy_auth=None, proxy_from_env=False, timer=None, session=None): if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy self._session = session if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.url = url.with_fragment(None) self.original_url = url self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None self.response_class = response_class or ClientResponse self._timer = timer if timer is not None else TimerNoop() if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_from_env) self.update_body_from_data(data) self.update_transfer_encoding() self.update_expect_continue(expect100)
def __init__(self, method, url, *, params=None, headers=None, skip_auto_headers=frozenset(), data=None, cookies=None, auth=None, version=http.HttpVersion11, compress=None, chunked=None, expect100=False, loop=None, response_class=None, proxy=None, proxy_auth=None, timer=None, session=None, auto_decompress=True, verify_ssl=None, fingerprint=None, ssl_context=None, proxy_headers=None): if verify_ssl is False and ssl_context is not None: raise ValueError( "Either disable ssl certificate validation by " "verify_ssl=False or specify ssl_context, not both.") if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy self._session = session if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.url = url.with_fragment(None) self.original_url = url self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None self.response_class = response_class or ClientResponse self._timer = timer if timer is not None else TimerNoop() self._auto_decompress = auto_decompress self._verify_ssl = verify_ssl self._ssl_context = ssl_context if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_fingerprint(fingerprint) self.update_body_from_data(data) if data or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100)
def _make_aiohttp_session_from_remote(self): """ Build a :class:`aiohttp.ClientSession` from the remote's settings and timing settings. This method is what provides the force_close of the TCP connection with each request. Returns: :class:`aiohttp.ClientSession` """ tcp_conn_opts = {"force_close": True} sslcontext = None if self._remote.ca_cert: sslcontext = ssl.create_default_context( cadata=self._remote.ca_cert) if self._remote.client_key and self._remote.client_cert: if not sslcontext: sslcontext = ssl.create_default_context() with NamedTemporaryFile() as key_file: key_file.write(bytes(self._remote.client_key, "utf-8")) key_file.flush() with NamedTemporaryFile() as cert_file: cert_file.write(bytes(self._remote.client_cert, "utf-8")) cert_file.flush() sslcontext.load_cert_chain(cert_file.name, key_file.name) if not self._remote.tls_validation: if not sslcontext: sslcontext = ssl.create_default_context() sslcontext.check_hostname = False sslcontext.verify_mode = ssl.CERT_NONE if sslcontext: tcp_conn_opts["ssl_context"] = sslcontext headers = MultiDict({"User-Agent": user_agent()}) if self._remote.headers is not None: for header_dict in self._remote.headers: user_agent_header = header_dict.pop("User-Agent", None) if user_agent_header: headers[ "User-Agent"] = f"{headers['User-Agent']}, {user_agent_header}" headers.extend(header_dict) conn = aiohttp.TCPConnector(**tcp_conn_opts) total = self._remote.total_timeout sock_connect = self._remote.sock_connect_timeout sock_read = self._remote.sock_read_timeout connect = self._remote.connect_timeout timeout = aiohttp.ClientTimeout(total=total, sock_connect=sock_connect, sock_read=sock_read, connect=connect) return aiohttp.ClientSession(connector=conn, timeout=timeout, headers=headers)
def normalize_url_params(url: StrOrURL, params: RequestParams = None) -> URL: """Normalize any combination of request parameter formats that aiohttp accepts""" if isinstance(url, str): url = URL(url) # Handle `params` argument, and combine with URL query string if it exists if params: norm_params = MultiDict(url.query) norm_params.extend(url.with_query(params).query) url = url.with_query(norm_params) # Apply additional normalization and convert back to URL object return URL(url_normalize(str(url)))
def function1732(self): 'Return POST parameters.' if (self.attribute615 is not None): return self.attribute615 if (self.attribute1016 not in self.var2369): self.attribute615 = MultiDictProxy(MultiDict()) return self.attribute615 var1401 = self.var1401 if (var1401 not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self.attribute615 = MultiDictProxy(MultiDict()) return self.attribute615 var3997 = MultiDict() if (var1401 == 'multipart/form-data'): function942 = yield from self.function942() var3149 = yield from function942.next() while (var3149 is not None): var2068 = 0 var3021 = self.attribute1376 var1401 = var3149.function249.get(hdrs.CONTENT_TYPE) if var3149.filename: var2128 = tempfile.TemporaryFile() var4662 = yield from var3149.read_chunk(size=(2 ** 16)) while chunk: var4662 = var3149.decode(var4662) var2128.write(var4662) var2068 += len(var4662) if ((var3021 > 0) and (var2068 > var3021)): raise ValueError('Maximum request body size exceeded') var4662 = yield from var3149.read_chunk(size=(2 ** 16)) var2128.seek(0) var2836 = var1064(var3149.name, var3149.filename, var2128, var1401, var3149.function249) var3997.add(var3149.name, var2836) else: var2833 = yield from var3149.function976(decode=True) if ((var1401 is None) or var1401.startswith('text/')): var4034 = var3149.get_charset(default='utf-8') var2833 = var2833.decode(var4034) var3997.add(var3149.name, var2833) var2068 += len(var2833) if ((var3021 > 0) and (var2068 > var3021)): raise ValueError('Maximum request body size exceeded') var3149 = yield from function942.next() else: var1349 = yield from self.function976() if var1349: var4034 = (self.charset or 'utf-8') var3997.extend(parse_qsl(var1349.rstrip().decode(var4034), keep_blank_values=True, encoding=var4034)) self.attribute615 = MultiDictProxy(var3997) return self.attribute615
def __init__(self, method, url, *, params=None, headers=None, skip_auto_headers=frozenset(), data=None, cookies=None, auth=None, version=http.HttpVersion11, compress=None, chunked=None, expect100=False, loop=None, response_class=None, proxy=None, proxy_auth=None, proxy_from_env=False, timer=None, session=None, auto_decompress=True): if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy self._session = session if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.url = url.with_fragment(None) self.original_url = url self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None self.response_class = response_class or ClientResponse self._timer = timer if timer is not None else TimerNoop() self._auto_decompress = auto_decompress if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_from_env) self.update_body_from_data(data) self.update_transfer_encoding() self.update_expect_continue(expect100)
def __init__(self, *args, **kwargs) -> None: method: str = args[0] url: URL = args[1] if kwargs['params']: q = MultiDict(url.query) url2 = url.with_query(kwargs['params']) q.extend(url2.query) url = url.with_query(q) args = ( method, url, ) kwargs['params'] = None if url.host in Hosts.items: if Hosts.items[ url.host].name in kwargs['session'].__dict__['_apis']: args = Hosts.items[url.host].func(args, kwargs) super().__init__(*args, **kwargs)
def mexc_v3(args: tuple[str, URL], kwargs: dict[str, Any]) -> tuple[str, URL]: method: str = args[0] url: URL = args[1] data: dict[str, Any] = kwargs["data"] or {} headers: CIMultiDict = kwargs["headers"] session: aiohttp.ClientSession = kwargs["session"] key: str = session.__dict__["_apis"][Hosts.items[url.host].name][0] secret: bytes = session.__dict__["_apis"][Hosts.items[url.host].name][1] timestamp = str(int(time.time() * 1000)) query = MultiDict(url.query) body = FormData(data)() if query: query.extend({"timestamp": timestamp}) url = url.with_query(query) query = MultiDict(url.query) else: body._value += f"×tamp={timestamp}".encode() query_string = url.raw_query_string.encode() signature = hmac.new( secret, query_string + body._value, hashlib.sha256 ).hexdigest() if query: query.extend({"signature": signature}) else: body._value += f"&signature={signature}".encode() body._size += len(body._value) url = url.with_query(query) args = (method, url) kwargs.update({"data": body._value}) headers.update({"X-MEXC-APIKEY": key, "Content-Type": "application/json"}) return args
async def io(req: Request): if req.content_type == 'application/x-www-form-urlencoded': charset = req.charset or 'utf-8' out = MultiDict(req.query) bytes_body = await req.read() if bytes_body: out.extend( parse_qsl(bytes_body.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) # Make immutable dict with auth[param] keys like auth_param res = MultiDictProxy( MultiDict(( k.startswith('auth[') and k.endswith(']') and 'auth_' + k[5:-1] or k, v, ) for k, v in out.items())) print('\n'.join('{}: {}'.format(k, v) for k, v in res.items())) return Response(status=200, text='OK')
def get_form_data_from_form(form): form_data = MultiDict() for el in form.xpath( './/input[@name]|select[@name]|textarea[@name]|button[@name]'): data = {} if el.tag == 'input': if el.attrib.get('type') == 'radio' or el.attrib.get( 'type') == 'checkbox': if el.attrib.get('checked', None): data[el.attrib['name']] = el.attrib.get('value', '') else: data[el.attrib['name']] = el.attrib.get('value', '') elif el.tag == 'select': options = el.xpath('./option[@selected]') if options: data[el.attrib['name']] = options[0].attrib.get('value', '') elif el.tag == 'textarea': data[el.sttrib['name']] = el.text or '' elif el.tag == 'button': if el.attrib.get('type', None) == 'submit': data[el.attrib['name']] = el.attrib.get('value', '') form_data.extend(data) return form_data
def binance(args: Tuple[str, URL], kwargs: Dict[str, Any]) -> Tuple[str, URL]: method: str = args[0] url: URL = args[1] data: Dict[str, Any] = kwargs['data'] or {} headers: CIMultiDict = kwargs['headers'] session: aiohttp.ClientSession = kwargs['session'] key: str = session.__dict__['_apis'][Hosts.items[url.host].name][0] secret: bytes = session.__dict__['_apis'][Hosts.items[ url.host].name][1] expires = str(int(time.time() * 1000)) if method == METH_GET: if url.scheme == 'https': query = MultiDict(url.query) query.extend({'timestamp': expires}) query_string = '&'.join(f'{k}={v}' for k, v in query.items()) signature = hmac.new(secret, query_string.encode(), hashlib.sha256).hexdigest() query.extend({'signature': signature}) url = url.with_query(query) args = ( method, url, ) else: data.update({'timestamp': expires}) body = FormData(data)() signature = hmac.new(secret, body._value, hashlib.sha256).hexdigest() body._value += f'&signature={signature}'.encode() body._size = len(body._value) kwargs.update({'data': body}) headers.update({'X-MBX-APIKEY': key}) return args
def bybit(args: Tuple[str, URL], kwargs: Dict[str, Any]) -> Tuple[str, URL]: method: str = args[0] url: URL = args[1] data: Dict[str, Any] = kwargs['data'] or {} session: aiohttp.ClientSession = kwargs['session'] key: str = session.__dict__['_apis'][Hosts.items[url.host].name][0] secret: bytes = session.__dict__['_apis'][Hosts.items[ url.host].name][1] expires = str(int((time.time() + 1.0) * 1000)) if method == METH_GET: query = MultiDict(url.query) if url.scheme == 'https': query.extend({'api_key': key, 'timestamp': expires}) query_string = '&'.join(f'{k}={v}' for k, v in sorted(query.items())) sign = hmac.new(secret, query_string.encode(), hashlib.sha256).hexdigest() query.extend({'sign': sign}) else: path = f'{method}/realtime{expires}' signature = hmac.new(secret, path.encode(), hashlib.sha256).hexdigest() query.extend({ 'api_key': key, 'expires': expires, 'signature': signature }) url = url.with_query(query) args = ( method, url, ) else: data.update({'api_key': key, 'timestamp': expires}) body = FormData(sorted(data.items()))() sign = hmac.new(secret, body._value, hashlib.sha256).hexdigest() body._value += f'&sign={sign}'.encode() body._size = len(body._value) kwargs.update({'data': body}) return args
def bybit(args: tuple[str, URL], kwargs: dict[str, Any]) -> tuple[str, URL]: method: str = args[0] url: URL = args[1] data: dict[str, Any] = kwargs["data"] or {} session: aiohttp.ClientSession = kwargs["session"] key: str = session.__dict__["_apis"][Hosts.items[url.host].name][0] secret: bytes = session.__dict__["_apis"][Hosts.items[url.host].name][1] if url.scheme == "https": expires = str(int((time.time() - 5.0) * 1000)) recv_window = ( "recv_window" if not url.path.startswith("/spot") else "recvWindow" ) auth_params = {"api_key": key, "timestamp": expires, recv_window: 10000} if method in (METH_GET, METH_DELETE): query = MultiDict(url.query) query.extend(auth_params) query_string = "&".join(f"{k}={v}" for k, v in sorted(query.items())) sign = hmac.new( secret, query_string.encode(), hashlib.sha256 ).hexdigest() query.extend({"sign": sign}) url = url.with_query(query) args = (method, url) else: data.update(auth_params) body = FormData(sorted(data.items()))() sign = hmac.new(secret, body._value, hashlib.sha256).hexdigest() body._value += f"&sign={sign}".encode() body._size = len(body._value) kwargs.update({"data": body}) elif url.scheme == "wss": query = MultiDict(url.query) expires = str(int((time.time() + 5.0) * 1000)) path = f"{method}/realtime{expires}" signature = hmac.new(secret, path.encode(), hashlib.sha256).hexdigest() query.extend({"api_key": key, "expires": expires, "signature": signature}) url = url.with_query(query) args = (method, url) return args
def __init__(self, method: str, url: URL, *, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, skip_auto_headers: Iterable[str] = frozenset(), data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, version: http.HttpVersion = http.HttpVersion11, compress: Optional[str] = None, chunked: Optional[bool] = None, expect100: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, response_class: Optional[Type['ClientResponse']] = None, proxy: Optional[URL] = None, proxy_auth: Optional[BasicAuth] = None, timer: Optional[BaseTimerContext] = None, session: Optional['ClientSession'] = None, ssl: Union[SSLContext, bool, Fingerprint, None] = None, proxy_headers: Optional[LooseHeaders] = None, traces: Optional[List['Trace']] = None): if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None self._session = cast('ClientSession', session) if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None if response_class is None: real_response_class = ClientResponse else: real_response_class = response_class self.response_class = real_response_class # type: Type[ClientResponse] self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) if traces is None: traces = [] self._traces = traces
async def post(self) -> MultiDictProxy: """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() # type: MultiDict if content_type == 'multipart/form-data': multipart = await self.multipart() max_size = self._client_max_size field = await multipart.next() while field is not None: size = 0 content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, cast(io.BufferedReader, tmp), content_type, field.headers) out.add(field.name, ff) else: value = await field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) field = await multipart.next() else: data = await self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl( data.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if content_type not in ( "", "application/x-www-form-urlencoded", "multipart/form-data", ): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]] if content_type == "multipart/form-data": multipart = await self.multipart() max_size = self._client_max_size field = await multipart.next() while field is not None: size = 0 field_ct = field.headers.get(hdrs.CONTENT_TYPE) if isinstance(field, BodyPartReader): assert field.name is not None # Note that according to RFC 7578, the Content-Type header # is optional, even for files, so we can't assume it's # present. # https://tools.ietf.org/html/rfc7578#section-4.4 if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: tmp.close() raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size) chunk = await field.read_chunk(size=2**16) tmp.seek(0) if field_ct is None: field_ct = "application/octet-stream" ff = FileField( field.name, field.filename, cast(io.BufferedReader, tmp), field_ct, field.headers, ) out.add(field.name, ff) else: # deal with ordinary data value = await field.read(decode=True) if field_ct is None or field_ct.startswith("text/"): charset = field.get_charset(default="utf-8") out.add(field.name, value.decode(charset)) else: out.add(field.name, value) size += len(value) if 0 < max_size < size: raise HTTPRequestEntityTooLarge(max_size=max_size, actual_size=size) else: raise ValueError( "To decode nested multipart you need " "to use custom reader", ) field = await multipart.next() else: data = await self.read() if data: charset = self.charset or "utf-8" bytes_query = data.rstrip() try: query = bytes_query.decode(charset) except LookupError: raise HTTPUnsupportedMediaType() out.extend( parse_qsl(qs=query, keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post
def __init__( self, method: str, url: URL, *, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, skip_auto_headers: Iterable[str] = frozenset(), data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, version: http.HttpVersion = http.HttpVersion11, compress: Optional[str] = None, chunked: Optional[bool] = None, expect100: bool = False, loop: asyncio.AbstractEventLoop, response_class: Optional[Type["ClientResponse"]] = None, proxy: Optional[URL] = None, proxy_auth: Optional[BasicAuth] = None, timer: Optional[BaseTimerContext] = None, session: Optional["ClientSession"] = None, ssl: Union[SSLContext, bool, Fingerprint, None] = None, proxy_headers: Optional[LooseHeaders] = None, traces: Optional[List["Trace"]] = None, ): match = _CONTAINS_CONTROL_CHAR_RE.search(method) if match: raise ValueError( f"Method cannot contain non-token characters {method!r} " f"(found at least {match.group()!r})") assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None self._session = cast("ClientSession", session) if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None if response_class is None: real_response_class = ClientResponse else: real_response_class = response_class self.response_class: Type[ClientResponse] = real_response_class self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data is not None or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) if traces is None: traces = [] self._traces = traces
def post(self): """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() if content_type == 'multipart/form-data': multipart = yield from self.multipart() field = yield from multipart.next() while field is not None: size = 0 max_size = self._client_max_size content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = yield from field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if max_size > 0 and size > max_size: raise ValueError( 'Maximum request body size exceeded') chunk = yield from field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, tmp, content_type, field.headers) out.add(field.name, ff) else: value = yield from field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if max_size > 0 and size > max_size: raise ValueError( 'Maximum request body size exceeded') field = yield from multipart.next() else: data = yield from self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl( data.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post
def request(self, method, path, params=(), auth=None, **kwargs): kwargs['auth'] = TokenAuth(auth) if isinstance(auth, dict) else auth params = MultiDict(params) params.extend(self.params) url = urljoin(self.url, path).rstrip('/') + self.trailing return super().request(method, url, params=params, **kwargs)
async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]': """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]] if content_type == 'multipart/form-data': multipart = await self.multipart() max_size = self._client_max_size field = await multipart.next() while field is not None: size = 0 field_ct = field.headers.get(hdrs.CONTENT_TYPE) if isinstance(field, BodyPartReader): if field.filename and field_ct: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size) chunk = await field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, cast(io.BufferedReader, tmp), field_ct, field.headers) out.add(field.name, ff) else: # deal with ordinary data value = await field.read(decode=True) if field_ct is None or \ field_ct.startswith('text/'): charset = field.get_charset(default='utf-8') out.add(field.name, value.decode(charset)) else: out.add(field.name, value) size += len(value) if 0 < max_size < size: raise HTTPRequestEntityTooLarge(max_size=max_size, actual_size=size) else: raise ValueError( 'To decode nested multipart you need ' 'to use custom reader', ) field = await multipart.next() else: data = await self.read() if data: charset = self.charset or 'utf-8' bytes_query = data.rstrip() try: query = bytes_query.decode(charset) except LookupError: raise HTTPUnsupportedMediaType() out.extend( parse_qsl(qs=query, keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post
def post(self): """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() if content_type == 'multipart/form-data': multipart = yield from self.multipart() field = yield from multipart.next() while field is not None: size = 0 max_size = self._client_max_size content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = yield from field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if max_size > 0 and size > max_size: raise ValueError( 'Maximum request body size exceeded') chunk = yield from field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, tmp, content_type, field.headers) out.add(field.name, ff) else: value = yield from field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if max_size > 0 and size > max_size: raise ValueError('Maximum request body size exceeded') field = yield from multipart.next() else: data = yield from self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl(data.rstrip().decode(charset), encoding=charset)) self._post = MultiDictProxy(out) return self._post
def edit_quiz(id): db = db_session() if session['admin']: if request.method == 'GET': questions = db.query(Question).filter_by(quiz_id=id).all() quiz = db.query(Quiz).filter_by(id=id).one() return render_template('admin/quiz.html', quiz=quiz, questions=questions, id=id) elif request.method == 'POST': form_data = MultiDict() form_data.extend({'question': {}}) form_data.extend({'option': {}}) for each in request.form.items(): if each[0] == 'add_option': form_data['question'][each[1]]['add_option'] = True elif each[0] == 'add_question': form_data['add_question'] = True elif each[0] == 'delete_question': form_data['question'][each[1]]['delete'] = True elif each[0] == 'delete_option': form_data['option'][each[1]]['delete'] = True else: line_data = each[0].split("_") if not form_data[line_data[0]].get(line_data[1]): form_data[line_data[0]].update( {line_data[1]: { line_data[2]: each[1] }}) else: form_data[line_data[0]][line_data[1]].update( {line_data[2]: each[1]}) if 'add_question' in form_data: new_question = Question(quiz_id=id, prompt='New Question', description='New Description') db.add(new_question) for i in range(4): db.add( Option(text='Option ' + str(i + 1), question=new_question)) for question_id in form_data['question']: if db.query(Question).filter_by(id=question_id): if 'add_option' in form_data['question'][question_id]: db.add( Option(question_id=question_id, text='New Option')) db.merge( Question(id=question_id, prompt=form_data['question'][question_id] ['prompt'], description=form_data['question'][question_id] ['description'], option_type=form_data['question'][question_id] ['optiontype'])) for option_id in form_data['option']: option = db.query(Option).get(option_id) if option: if 'delete' in form_data['option'][option_id]: db.delete(option) else: try: if 'correct' in form_data['question'][str( option.question_id)]: option.correct = form_data['question'][str( option.question_id)]['correct'] == str( option_id) else: option.correct = 'correct' in form_data[ 'option'][option_id] except KeyError: pass db.merge( Option( id=option_id, text=form_data['option'][option_id]['text'])) for question_id in form_data['question']: if 'delete' in form_data['question'][question_id]: if 'delete' in form_data['question'][question_id]: db.query(Option).filter_by( question_id=question_id).delete() db.query(Question).filter_by(id=question_id).delete() else: question = db.query(Question).get(question_id) if question.option_type == 'radio': # check that only one option is correct on a radio button question found = False for option in question.option: if option.correct: if not found: found = True else: option.correct = False for file in request.files: if request.files[file] and allowed_file( request.files[file].filename): print("New file %s: %s" % (file, request.files[file])) object_data = file.split("_") filename = secure_filename(request.files[file].filename) request.files[file].save( os.path.join(app.config['UPLOAD_FOLDER'], filename)) path = url_for('uploaded_file', filename=filename) if object_data[0] == 'question': db.merge(Question(id=object_data[1], image=path)) elif object_data[0] == 'option': db.merge(Option(id=object_data[1], image=path)) db.commit() return redirect(url_for('edit_quiz', id=id)) else: return redirect(url_for('index'))
def __init__(self, method: str, url: URL, *, params: Optional[Mapping[str, str]]=None, headers: Optional[LooseHeaders]=None, skip_auto_headers: Iterable[str]=frozenset(), data: Any=None, cookies: Optional[LooseCookies]=None, auth: Optional[BasicAuth]=None, version: http.HttpVersion=http.HttpVersion11, compress: Optional[str]=None, chunked: Optional[bool]=None, expect100: bool=False, loop: Optional[asyncio.AbstractEventLoop]=None, response_class: Optional[Type['ClientResponse']]=None, proxy: Optional[URL]=None, proxy_auth: Optional[BasicAuth]=None, timer: Optional[BaseTimerContext]=None, session: Optional['ClientSession']=None, ssl: Union[SSLContext, bool, Fingerprint, None]=None, proxy_headers: Optional[LooseHeaders]=None, traces: Optional[List['Trace']]=None): if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None self._session = cast('ClientSession', session) if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None if response_class is None: real_response_class = ClientResponse else: real_response_class = response_class self.response_class = real_response_class # type: Type[ClientResponse] self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) if traces is None: traces = [] self._traces = traces
async def post(self) -> MultiDictProxy: """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() # type: MultiDict if content_type == 'multipart/form-data': multipart = await self.multipart() max_size = self._client_max_size field = await multipart.next() while field is not None: size = 0 content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: raise HTTPRequestEntityTooLarge(max_size=max_size, actual_size=size) chunk = await field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, cast(io.BufferedReader, tmp), content_type, field.headers) out.add(field.name, ff) else: value = await field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if 0 < max_size < size: raise HTTPRequestEntityTooLarge(max_size=max_size, actual_size=size) field = await multipart.next() else: data = await self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl(data.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post