async def email_confirmation(request: web.Request): """Handled access from a link sent to user by email Retrieves confirmation key and redirects back to some location front-end * registration, change-email: - sets user as active - redirects to login * reset-password: - redirects to login - attaches page and token info onto the url - info appended as fragment, e.g. https://osparc.io#reset-password?code=131234 - front-end should interpret that info as: - show the reset-password page - use the token to submit a POST /v0/auth/confirmation/{code} and finalize reset action """ params, _, _ = await extract_and_validate(request) db: AsyncpgStorage = get_plugin_storage(request.app) cfg: LoginOptions = get_plugin_options(request.app) code = params["code"] confirmation = await validate_confirmation_code(code, db, cfg) if confirmation: action = confirmation["action"] redirect_url = URL(cfg.LOGIN_REDIRECT) if action == REGISTRATION: user = await db.get_user({"id": confirmation["user_id"]}) await db.update_user(user, {"status": ACTIVE}) await db.delete_confirmation(confirmation) log.debug("User %s registered", user) redirect_url = redirect_url.with_fragment("?registered=true") elif action == CHANGE_EMAIL: user = await db.get_user({"id": confirmation["user_id"]}) await db.update_user(user, {"email": confirmation["data"]}) await db.delete_confirmation(confirmation) log.debug("User %s changed email", user) elif action == RESET_PASSWORD: # NOTE: By using fragments (instead of queries or path parameters), the browser does NOT reloads page redirect_url = redirect_url.with_fragment( "reset-password?code=%s" % code) log.debug("Reset password requested %s", confirmation) raise web.HTTPFound(location=redirect_url)
def __init__( self, method: str, url: URL, *, writer: "asyncio.Task[None]", continue100: Optional["asyncio.Future[bool]"], timer: BaseTimerContext, request_info: RequestInfo, traces: List["Trace"], loop: asyncio.AbstractEventLoop, session: "ClientSession", ) -> None: assert isinstance(url, URL) super().__init__() self.method = method self.cookies: SimpleCookie[str] = SimpleCookie() self._real_url = url self._url = url.with_fragment(None) self._body: Optional[bytes] = None self._writer: Optional[asyncio.Task[None]] = writer self._continue = continue100 # None by default self._closed = True self._history: Tuple[ClientResponse, ...] = () self._request_info = request_info self._timer = timer if timer is not None else TimerNoop() self._cache: Dict[str, Any] = {} self._traces = traces self._loop = loop # store a reference to session #1985 self._session: Optional[ClientSession] = session if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1))
def __init__(self, method: str, url: URL, *, writer: 'asyncio.Task[None]', continue100: Optional['asyncio.Future[bool]'], timer: BaseTimerContext, request_info: RequestInfo, traces: List['Trace'], loop: asyncio.AbstractEventLoop, session: 'ClientSession') -> None: assert isinstance(url, URL) self.method = method self.cookies = SimpleCookie() self._real_url = url self._url = url.with_fragment(None) self._body = None # type: Any self._writer = writer # type: Optional[asyncio.Task[None]] self._continue = continue100 # None by default self._closed = True self._history = () # type: Tuple[ClientResponse, ...] self._request_info = request_info self._timer = timer if timer is not None else TimerNoop() self._cache = {} # type: Dict[str, Any] self._traces = traces self._loop = loop # store a reference to session #1985 self._session = session # type: Optional[ClientSession] if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1))
async def onfinish (self): tab = self.loader.tab yield self.script await tab.Runtime.evaluate (expression=str (self.script), returnByValue=True) viewport = await getFormattedViewportMetrics (tab) dom = await tab.DOM.getDocument (depth=-1, pierce=True) self.logger.debug ('dom snapshot document', uuid='0c720784-8bd1-4fdc-a811-84394d753539', dom=dom) haveUrls = set () for doc in ChromeTreeWalker (dom['root']).split (): url = URL (doc['documentURL']) if url in haveUrls: # ignore duplicate URLs. they are usually caused by # javascript-injected iframes (advertising) with no(?) src self.logger.warning ('dom snapshot duplicate', uuid='d44de989-98d4-456e-82e7-9d4c49acab5e') elif url.scheme in ('http', 'https'): self.logger.debug ('dom snapshot', uuid='ece7ff05-ccd9-44b5-b6a8-be25a24b96f4', base=doc["baseURL"]) haveUrls.add (url) walker = ChromeTreeWalker (doc) # remove script, to make the page static and noscript, because at the # time we took the snapshot scripts were enabled disallowedTags = ['script', 'noscript'] disallowedAttributes = html.eventAttributes stream = StripAttributeFilter (StripTagFilter (walker, disallowedTags), disallowedAttributes) serializer = HTMLSerializer () yield DomSnapshotEvent (url.with_fragment(None), serializer.render (stream, 'utf-8'), viewport)
def get_invitation_url(confirmation, origin: URL = None) -> URL: code = confirmation["code"] is_invitation = confirmation["action"] == ConfirmationAction.INVITATION.name if origin is None or not is_invitation: origin = URL() # https://some-web-url.io/#/registration/?invitation={code} return origin.with_fragment("/registration/?invitation={}".format(code))
def test_response_real_url(loop, session) -> None: url = URL('http://def-cl-resp.org/#urlfragment') response = ClientResponse('get', url, request_info=mock.Mock(), writer=mock.Mock(), continue100=None, timer=TimerNoop(), traces=[], loop=loop, session=session) assert response.url == url.with_fragment(None) assert response.real_url == url
async def email_confirmation(request: web.Request): """ Handled access from a link sent to user by email Retrieves confirmation key and redirects back to some location front-end * registration, change-email: - sets user as active - redirects to login * reset-password: - redirects to login - attaches page and token info onto the url - info appended as fragment, e.g. https://osparc.io#page=reset-password;code=131234 - front-end should interpret that info as: - show the reset-password page - use the token to submit a POST /v0/auth/confirmation/{code} and finalize reset action """ params, _, _ = await extract_and_validate(request) db = get_storage(request.app) code = params['code'] confirmation = await validate_confirmation_code(code, db) if confirmation: action = confirmation['action'] redirect_url = URL(request.app[APP_LOGIN_CONFIG]['LOGIN_REDIRECT']) if action == REGISTRATION: user = await db.get_user({'id': confirmation['user_id']}) await db.update_user(user, {'status': ACTIVE}) await db.delete_confirmation(confirmation) log.debug("User %s registered", user) #TODO: flash_response([cfg.MSG_ACTIVATED, cfg.MSG_LOGGED_IN]) elif action == CHANGE_EMAIL: user = await db.get_user({'id': confirmation['user_id']}) await db.update_user(user, {'email': confirmation['data']}) await db.delete_confirmation(confirmation) log.debug("User %s changed email", user) #TODO: flash_response(cfg.MSG_EMAIL_CHANGED) elif action == RESET_PASSWORD: # NOTE: By using fragments (instead of queries or path parameters), the browser does NOT reloads page redirect_url = redirect_url.with_fragment( "page=reset-password;code=%s" % code) log.debug("Reset password requested %s", confirmation) # TODO: inject flash messages to be shown by main website raise web.HTTPFound(location=redirect_url)
def test_with_fragment_None(): url = URL("http://example.com/path#frag") url2 = url.with_fragment(None) assert str(url2) == "http://example.com/path"
def test_with_fragment_safe(): url = URL("http://example.com") u2 = url.with_fragment("a:b?c@d/e") assert str(u2) == "http://example.com/#a:b?c@d/e"
def test_with_fragment_non_ascii(): url = URL("http://example.com") url2 = url.with_fragment("фрагм") assert url2.raw_fragment == "%D1%84%D1%80%D0%B0%D0%B3%D0%BC" assert url2.fragment == "фрагм"
def __init__( self, method: str, url: URL, *, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, skip_auto_headers: Iterable[str] = frozenset(), data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, version: http.HttpVersion = http.HttpVersion11, compress: Optional[str] = None, chunked: Optional[bool] = None, expect100: bool = False, loop: asyncio.AbstractEventLoop, response_class: Optional[Type["ClientResponse"]] = None, proxy: Optional[URL] = None, proxy_auth: Optional[BasicAuth] = None, timer: Optional[BaseTimerContext] = None, session: Optional["ClientSession"] = None, ssl: Union[SSLContext, bool, Fingerprint, None] = None, proxy_headers: Optional[LooseHeaders] = None, traces: Optional[List["Trace"]] = None, ): match = _CONTAINS_CONTROL_CHAR_RE.search(method) if match: raise ValueError( f"Method cannot contain non-token characters {method!r} " f"(found at least {match.group()!r})") assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None self._session = cast("ClientSession", session) if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None if response_class is None: real_response_class = ClientResponse else: real_response_class = response_class self.response_class: Type[ClientResponse] = real_response_class self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data is not None or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) if traces is None: traces = [] self._traces = traces
def test_with_fragment(): url = URL("http://example.com") assert str(url.with_fragment("frag")) == "http://example.com/#frag"
def test_with_fragment_bad_type(): url = URL("http://example.com") with pytest.raises(TypeError): url.with_fragment(123)
def __init__(self, method: str, url: URL, *, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, skip_auto_headers: Iterable[str] = frozenset(), data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, version: http.HttpVersion = http.HttpVersion11, compress: Optional[str] = None, chunked: Optional[bool] = None, expect100: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, response_class: Optional[Type['ClientResponse']] = None, proxy: Optional[URL] = None, proxy_auth: Optional[BasicAuth] = None, timer: Optional[BaseTimerContext] = None, session: Optional['ClientSession'] = None, ssl: Union[SSLContext, bool, Fingerprint, None] = None, proxy_headers: Optional[LooseHeaders] = None, traces: Optional[List['Trace']] = None): if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None self._session = cast('ClientSession', session) if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None if response_class is None: real_response_class = ClientResponse else: real_response_class = response_class self.response_class = real_response_class # type: Type[ClientResponse] self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) if traces is None: traces = [] self._traces = traces
async def _request(self, method, url, *, params=None, data=None, json=None, headers=None, skip_auto_headers=None, auth=None, allow_redirects=True, max_redirects=10, compress=None, chunked=None, expect100=False, read_until_eof=True, proxy=None, proxy_auth=None, timeout=sentinel, verify_ssl=None, fingerprint=None, ssl_context=None, ssl=None, proxy_headers=None, trace_request_ctx=None): # NOTE: timeout clamps existing connect and read timeouts. We cannot # set the default to None because we need to detect if the user wants # to use the existing timeouts by setting timeout to None. if self.closed: raise RuntimeError('Session is closed') ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) if data is not None and json is not None: raise ValueError( 'data and json parameters can not be used at the same time') elif json is not None: data = payload.JsonPayload(json, dumps=self._json_serialize) if not isinstance(chunked, bool) and chunked is not None: warnings.warn( 'Chunk size is deprecated #1615', DeprecationWarning) redirects = 0 history = [] version = self._version # Merge with default headers and transform to CIMultiDict headers = self._prepare_headers(headers) proxy_headers = self._prepare_headers(proxy_headers) try: url = URL(url) except ValueError: raise InvalidURL(url) skip_headers = set(self._skip_auto_headers) if skip_auto_headers is not None: for i in skip_auto_headers: skip_headers.add(istr(i)) if proxy is not None: try: proxy = URL(proxy) except ValueError: raise InvalidURL(proxy) # timeout is cumulative for all request operations # (request, redirects, responses, data consuming) tm = TimeoutHandle( self._loop, timeout if timeout is not sentinel else self._read_timeout) handle = tm.start() traces = [ Trace( self, trace_config, trace_config.trace_config_ctx( trace_request_ctx=trace_request_ctx) ) for trace_config in self._trace_configs ] for trace in traces: await trace.send_request_start( method, url, headers ) timer = tm.timer() try: with timer: while True: url, auth_from_url = strip_auth_from_url(url) if auth and auth_from_url: raise ValueError("Cannot combine AUTH argument with " "credentials encoded in URL") if auth is None: auth = auth_from_url if auth is None: auth = self._default_auth # It would be confusing if we support explicit # Authorization header with auth argument if (headers is not None and auth is not None and hdrs.AUTHORIZATION in headers): raise ValueError("Cannot combine AUTHORIZATION header " "with AUTH argument or credentials " "encoded in URL") url = url.with_fragment(None) cookies = self._cookie_jar.filter_cookies(url) if proxy is not None: proxy = URL(proxy) elif self._trust_env: for scheme, proxy_info in proxies_from_env().items(): if scheme == url.scheme: proxy = proxy_info.proxy proxy_auth = proxy_info.proxy_auth break req = self._request_class( method, url, params=params, headers=headers, skip_auto_headers=skip_headers, data=data, cookies=cookies, auth=auth, version=version, compress=compress, chunked=chunked, expect100=expect100, loop=self._loop, response_class=self._response_class, proxy=proxy, proxy_auth=proxy_auth, timer=timer, session=self, auto_decompress=self._auto_decompress, ssl=ssl, proxy_headers=proxy_headers) # connection timeout try: with CeilTimeout(self._conn_timeout, loop=self._loop): conn = await self._connector.connect( req, traces=traces ) except asyncio.TimeoutError as exc: raise ServerTimeoutError( 'Connection timeout ' 'to host {0}'.format(url)) from exc tcp_nodelay(conn.transport, True) tcp_cork(conn.transport, False) try: resp = req.send(conn) try: await resp.start(conn, read_until_eof) except Exception: resp.close() conn.close() raise except ClientError: raise except OSError as exc: raise ClientOSError(*exc.args) from exc self._cookie_jar.update_cookies(resp.cookies, resp.url) # redirects if resp.status in ( 301, 302, 303, 307, 308) and allow_redirects: for trace in traces: await trace.send_request_redirect( method, url, headers, resp ) redirects += 1 history.append(resp) if max_redirects and redirects >= max_redirects: resp.close() break else: resp.release() # For 301 and 302, mimic IE, now changed in RFC # https://github.com/kennethreitz/requests/pull/269 if (resp.status == 303 and resp.method != hdrs.METH_HEAD) \ or (resp.status in (301, 302) and resp.method == hdrs.METH_POST): method = hdrs.METH_GET data = None if headers.get(hdrs.CONTENT_LENGTH): headers.pop(hdrs.CONTENT_LENGTH) r_url = (resp.headers.get(hdrs.LOCATION) or resp.headers.get(hdrs.URI)) if r_url is None: # see github.com/aio-libs/aiohttp/issues/2022 break try: r_url = URL( r_url, encoded=not self.requote_redirect_url) except ValueError: raise InvalidURL(r_url) scheme = r_url.scheme if scheme not in ('http', 'https', ''): resp.close() raise ValueError( 'Can redirect only to http or https') elif not scheme: r_url = url.join(r_url) if url.origin() != r_url.origin(): auth = None headers.pop(hdrs.AUTHORIZATION, None) url = r_url params = None resp.release() continue break # check response status if self._raise_for_status: resp.raise_for_status() # register connection if handle is not None: if resp.connection is not None: resp.connection.add_callback(handle.cancel) else: handle.cancel() resp._history = tuple(history) for trace in traces: await trace.send_request_end( method, url, headers, resp ) return resp except Exception as e: # cleanup timer tm.close() if handle: handle.cancel() handle = None for trace in traces: await trace.send_request_exception( method, url, headers, e ) raise
def test_with_fragment_percent_encoded(): url = URL("http://example.com") url2 = url.with_fragment("%cf%80") assert str(url2) == "http://example.com/#%25cf%2580" assert url2.raw_fragment == "%25cf%2580" assert url2.fragment == "%cf%80"
def test_with_fragment(): url = URL("http://example.com") url2 = url.with_fragment("frag") assert str(url2) == "http://example.com/#frag" assert url2.raw_fragment == "frag" assert url2.fragment == "frag"
def test_with_fragment_None_matching(): url = URL("http://example.com/path") url2 = url.with_fragment(None) assert url is url2
def test_with_fragment(): url = URL('http://example.com') assert str(url.with_fragment('frag')) == 'http://example.com/#frag'
def test_with_fragment_matching(): url = URL("http://example.com/path#frag") url2 = url.with_fragment("frag") assert url is url2
def test_with_fragment_safe(): url = URL('http://example.com') u2 = url.with_fragment('a:b?c@d/e') assert str(u2) == 'http://example.com/#a:b?c@d/e'
def _request(self, method, url, *, params=None, data=None, json=None, headers=None, skip_auto_headers=None, auth=None, allow_redirects=True, max_redirects=10, encoding=None, compress=None, chunked=None, expect100=False, read_until_eof=True, proxy=None, proxy_auth=None, timeout=sentinel, verify_ssl=None, fingerprint=None, ssl_context=None, proxy_headers=None): # NOTE: timeout clamps existing connect and read timeouts. We cannot # set the default to None because we need to detect if the user wants # to use the existing timeouts by setting timeout to None. if encoding is not None: warnings.warn( "encoding parameter is not supported, " "please use FormData(charset='utf-8') instead", DeprecationWarning) if self.closed: raise RuntimeError('Session is closed') if data is not None and json is not None: raise ValueError( 'data and json parameters can not be used at the same time') elif json is not None: data = payload.JsonPayload(json, dumps=self._json_serialize) if not isinstance(chunked, bool) and chunked is not None: warnings.warn( 'Chunk size is deprecated #1615', DeprecationWarning) redirects = 0 history = [] version = self._version # Merge with default headers and transform to CIMultiDict headers = self._prepare_headers(headers) proxy_headers = self._prepare_headers(proxy_headers) if auth is None: auth = self._default_auth # It would be confusing if we support explicit Authorization header # with `auth` argument if (headers is not None and auth is not None and hdrs.AUTHORIZATION in headers): raise ValueError("Can't combine `Authorization` header with " "`auth` argument") skip_headers = set(self._skip_auto_headers) if skip_auto_headers is not None: for i in skip_auto_headers: skip_headers.add(istr(i)) if proxy is not None: try: proxy = URL(proxy) except ValueError: raise InvalidURL(url) try: url = URL(url) except ValueError: raise InvalidURL(url) # timeout is cumulative for all request operations # (request, redirects, responses, data consuming) tm = TimeoutHandle( self._loop, timeout if timeout is not sentinel else self._read_timeout) handle = tm.start() url = URL(url) timer = tm.timer() try: with timer: while True: url = url.with_fragment(None) cookies = self._cookie_jar.filter_cookies(url) if proxy is not None: proxy = URL(proxy) elif self._trust_env: for scheme, proxy_info in proxies_from_env().items(): if scheme == url.scheme: proxy, proxy_auth = proxy_info break req = self._request_class( method, url, params=params, headers=headers, skip_auto_headers=skip_headers, data=data, cookies=cookies, auth=auth, version=version, compress=compress, chunked=chunked, expect100=expect100, loop=self._loop, response_class=self._response_class, proxy=proxy, proxy_auth=proxy_auth, timer=timer, session=self, auto_decompress=self._auto_decompress, verify_ssl=verify_ssl, fingerprint=fingerprint, ssl_context=ssl_context, proxy_headers=proxy_headers) # connection timeout try: with CeilTimeout(self._conn_timeout, loop=self._loop): conn = yield from self._connector.connect(req) except asyncio.TimeoutError as exc: raise ServerTimeoutError( 'Connection timeout ' 'to host {0}'.format(url)) from exc conn.writer.set_tcp_nodelay(True) try: resp = req.send(conn) try: yield from resp.start(conn, read_until_eof) except: resp.close() conn.close() raise except ClientError: raise except OSError as exc: raise ClientOSError(*exc.args) from exc self._cookie_jar.update_cookies(resp.cookies, resp.url) # redirects if resp.status in ( 301, 302, 303, 307, 308) and allow_redirects: redirects += 1 history.append(resp) if max_redirects and redirects >= max_redirects: resp.close() break else: resp.release() # For 301 and 302, mimic IE, now changed in RFC # https://github.com/kennethreitz/requests/pull/269 if (resp.status == 303 and resp.method != hdrs.METH_HEAD) \ or (resp.status in (301, 302) and resp.method == hdrs.METH_POST): method = hdrs.METH_GET data = None if headers.get(hdrs.CONTENT_LENGTH): headers.pop(hdrs.CONTENT_LENGTH) r_url = (resp.headers.get(hdrs.LOCATION) or resp.headers.get(hdrs.URI)) if r_url is None: # see github.com/aio-libs/aiohttp/issues/2022 break try: r_url = URL( r_url, encoded=not self.requote_redirect_url) except ValueError: raise InvalidURL(r_url) scheme = r_url.scheme if scheme not in ('http', 'https', ''): resp.close() raise ValueError( 'Can redirect only to http or https') elif not scheme: r_url = url.join(r_url) url = r_url params = None resp.release() continue break # check response status if self._raise_for_status: resp.raise_for_status() # register connection if handle is not None: if resp.connection is not None: resp.connection.add_callback(handle.cancel) else: handle.cancel() resp._history = tuple(history) return resp except: # cleanup timer tm.close() if handle: handle.cancel() handle = None raise
def test_with_fragment_non_ascii(): url = URL('http://example.com') url2 = url.with_fragment('фрагм') assert url2.raw_fragment == '%D1%84%D1%80%D0%B0%D0%B3%D0%BC' assert url2.fragment == 'фрагм'
def __init__(self, method: str, url: URL, *, params: Optional[Mapping[str, str]]=None, headers: Optional[LooseHeaders]=None, skip_auto_headers: Iterable[str]=frozenset(), data: Any=None, cookies: Optional[LooseCookies]=None, auth: Optional[BasicAuth]=None, version: http.HttpVersion=http.HttpVersion11, compress: Optional[str]=None, chunked: Optional[bool]=None, expect100: bool=False, loop: Optional[asyncio.AbstractEventLoop]=None, response_class: Optional[Type['ClientResponse']]=None, proxy: Optional[URL]=None, proxy_auth: Optional[BasicAuth]=None, timer: Optional[BaseTimerContext]=None, session: Optional['ClientSession']=None, ssl: Union[SSLContext, bool, Fingerprint, None]=None, proxy_headers: Optional[LooseHeaders]=None, traces: Optional[List['Trace']]=None): if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None self._session = cast('ClientSession', session) if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None if response_class is None: real_response_class = ClientResponse else: real_response_class = response_class self.response_class = real_response_class # type: Type[ClientResponse] self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) if traces is None: traces = [] self._traces = traces