def test_with_query_only_single_arg_is_supported(): url = URL("http://example.com") u1 = url.with_query(b=3) u2 = URL("http://example.com/?b=3") assert u1 == u2 with pytest.raises(ValueError): url.with_query("a=1", "a=b")
def url_for(self, *, filename, append_version=None): if append_version is None: append_version = self._append_version if isinstance(filename, Path): filename = str(filename) while filename.startswith('/'): filename = filename[1:] filename = '/' + filename url = self._prefix + URL(filename).raw_path url = URL(url) if append_version is True: try: if filename.startswith('/'): filename = filename[1:] filepath = self._directory.joinpath(filename).resolve() if not self._follow_symlinks: filepath.relative_to(self._directory) except (ValueError, FileNotFoundError): # ValueError for case when path point to symlink # with follow_symlinks is False return url # relatively safe if filepath.is_file(): # TODO cache file content # with file watcher for cache invalidation with open(str(filepath), mode='rb') as f: file_bytes = f.read() h = self._get_file_hash(file_bytes) url = url.with_query({self.VERSION_KEY: h}) return url return url
def request(self, method, url, *, auth=None, status=200, text=None, data=None, content=None, json=None, params=None, headers={}, exc=None, cookies=None): """Mock a request.""" if json is not None: text = _json.dumps(json) if text is not None: content = text.encode('utf-8') if content is None: content = b'' if not isinstance(url, retype): url = URL(url) if params: url = url.with_query(params) self._mocks.append(AiohttpClientMockResponse( method, url, status, content, cookies, exc, headers))
def maker(method, path, query_params={}, headers=None, match_info=None, loop=None): path = URL(path) if query_params: path = path.with_query(query_params) if headers is None: headers = CIMultiDict( { "HOST": "server.example.com", "UPGRADE": "websocket", "CONNECTION": "Upgrade", "SEC-WEBSOCKET-KEY": "dGhlIHNhbXBsZSBub25jZQ==", "ORIGIN": "http://example.com", "SEC-WEBSOCKET-PROTOCOL": "chat, superchat", "SEC-WEBSOCKET-VERSION": "13", } ) writer = mock.Mock() writer.write_headers = make_mocked_coro(None) writer.write = make_mocked_coro(None) writer.drain = make_mocked_coro(None) transport = mock.Mock() transport._drain_helper = make_mocked_coro() ret = make_mocked_request(method, str(path), headers, writer=writer, loop=loop) if match_info is None: match_info = UrlMappingMatchInfo({}, mock.Mock()) match_info.add_app(app) ret._match_info = match_info return ret
def aget(self, **kw): session = aiohttp.ClientSession() url = URL(self.url) if kw: url = url.with_query(**kw) logger.debug("GET %s", url) try: response = yield from session.get(url, timeout=10) payload = yield from response.read() finally: yield from session.close() response.raise_for_status() payload = payload.decode('utf-8') if response.content_type == 'text/x-python': payload = ast.literal_eval(payload) return Payload.factory(response.status, response.headers, payload)
def apost(self, headers={}, data=None, **kw): session = aiohttp.ClientSession() url = URL(self.url) if kw: url = url.with_query(**kw) logger.debug("POST %s", url) try: response = yield from session.post( url, headers=headers, data=data, timeout=10, ) payload = yield from response.read() finally: yield from session.close() response.raise_for_status() payload = payload.decode('utf-8') return Payload.factory(response.status, response.headers, payload)
async def match_request(self, method, url, *, data=None, auth=None, params=None, headers=None, allow_redirects=None, timeout=None, json=None): """Match a request against pre-registered requests.""" data = data or json url = URL(url) if params: url = url.with_query(params) for response in self._mocks: if response.match_request(method, url, params): self.mock_calls.append((method, url, data, headers)) if response.exc: raise response.exc return response assert False, "No mock registered for {} {} {}".format(method.upper(), url, params)
def test_with_multidict_with_spaces_and_non_ascii(): url = URL("http://example.com") url2 = url.with_query({"a b": "ю б"}) assert url2.raw_query_string == "a+b=%D1%8E+%D0%B1"
def test_with_query_None(): url = URL("http://example.com/path?a=b") assert url.with_query(None).query_string == ""
def _crawl_resource_list(self, url: URL) -> List[URL]: zero_url = url.with_query({"limit": 0, "offset": 0}) count = self._crawl(zero_url, save=False)["count"] full_url = url.with_query({"limit": count, "offset": 0}) resource_list = self._crawl(full_url) return [URL(resource_ref["url"]) for resource_ref in resource_list["results"]]
def test_with_query_list_bool(): url = URL("http://example.com") with pytest.raises(TypeError): url.with_query([("a", False)])
def test_with_query_list_non_pairs(): url = URL("http://example.com") with pytest.raises(ValueError): url.with_query(["a=1", "b=2" "c=3"])
def test_with_query_str(): url = URL("http://example.com") assert str(url.with_query("a=1&b=2")) == "http://example.com/?a=1&b=2"
def __init__( self, method: str, url: URL, *, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, skip_auto_headers: Iterable[str] = frozenset(), data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, version: http.HttpVersion = http.HttpVersion11, compress: Optional[str] = None, chunked: Optional[bool] = None, expect100: bool = False, loop: asyncio.AbstractEventLoop, response_class: Optional[Type["ClientResponse"]] = None, proxy: Optional[URL] = None, proxy_auth: Optional[BasicAuth] = None, timer: Optional[BaseTimerContext] = None, session: Optional["ClientSession"] = None, ssl: Union[SSLContext, bool, Fingerprint, None] = None, proxy_headers: Optional[LooseHeaders] = None, traces: Optional[List["Trace"]] = None, ): assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None self._session = cast("ClientSession", session) if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None if response_class is None: real_response_class = ClientResponse else: real_response_class = response_class self.response_class = real_response_class # type: Type[ClientResponse] self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) if traces is None: traces = [] self._traces = traces
def test_with_query_list_of_pairs(): url = URL("http://example.com") assert str(url.with_query([("a", "1")])) == "http://example.com/?a=1"
def test_with_query_str_non_ascii_and_spaces(): url = URL("http://example.com") url2 = url.with_query("a=1 2&b=знач") assert url2.raw_query_string == "a=1+2&b=%D0%B7%D0%BD%D0%B0%D1%87" assert url2.query_string == "a=1 2&b=знач"
async def request( self, method: str, url: URL, *, auth: str, params: Optional[Mapping[str, str]] = None, data: Any = None, json: Any = None, headers: Optional[Dict[str, str]] = None, timeout: Optional[aiohttp.ClientTimeout] = None, ) -> AsyncIterator[aiohttp.ClientResponse]: assert url.is_absolute() log.debug("Fetch [%s] %s", method, url) if headers is not None: real_headers: CIMultiDict[str] = CIMultiDict(headers) else: real_headers = CIMultiDict() real_headers["Authorization"] = auth if "Content-Type" not in real_headers: if json is not None: real_headers["Content-Type"] = "application/json" trace_request_ctx = SimpleNamespace() trace_id = self._trace_id if trace_id is None: trace_id = gen_trace_id() trace_request_ctx.trace_id = trace_id if params: url = url.with_query(params) async with self._session.request( method, url, headers=real_headers, json=json, data=data, timeout=timeout, trace_request_ctx=trace_request_ctx, ) as resp: if 400 <= resp.status: err_text = await resp.text() if resp.content_type.lower() == "application/json": try: payload = jsonmodule.loads(err_text) except ValueError: # One example would be a HEAD request for application/json payload = {} if "error" in payload: err_text = payload["error"] else: payload = {} if resp.status == 400 and "errno" in payload: os_errno: Any = payload["errno"] os_errno = errno.__dict__.get(os_errno, os_errno) raise OSError(os_errno, err_text) err_cls = self._exception_map.get(resp.status, IllegalArgumentError) raise err_cls(err_text) else: try: yield resp except GeneratorExit: # There is a bug in CPython and/or aiohttp, # if GeneratorExit is reraised @asynccontextmanager # reports this as an error # Need to investigate and fix. raise asyncio.CancelledError
def test_with_query(): url = URL("http://example.com") assert str(url.with_query({"a": "1"})) == "http://example.com/?a=1"
def test_with_query_empty_value(): url = URL("http://example.com/") assert str(url.with_query({"a": ""})) == "http://example.com/?a="
def test_with_query_bytearray(): url = URL("http://example.com") with pytest.raises(TypeError): url.with_query(bytearray(b"123"))
async def search(q: str = None, limit: int = 50, *, exact: bool = None, filter_by_taxon_concept_id: int = None, filter_by_hierarchy_entry_id: int = None, filter_by_string: int = None, cache_ttl: int = None) -> AttrSeq: """Search pages. Parameters ---------- q the query string limit the number of item you want to get exact will find taxon pages if the title or any synonym or common name exactly matches the search term filter_by_taxon_concept_id given an EOL page ID, search results will be limited to members of that taxonomic group filter_by_hierarchy_entry_id given a Hierarchy Entry ID, search results will be limited to members of that taxonomic group filter_by_string given a search term, an exact search will be made and that matching page will be used as the taxonomic group against which to filter search results cache_ttl the number of seconds you wish to have the response cached Returns ------- list of AttrDict Examples: [{ "id": 46559121, "title": "Lutra lutra", "link": "https://eol.org/pages/46559121", "content": "Lutra lutra; Lutra lutra (Linnaeus, 1758); <i>Lutra lutra</i>" }, ... ] """ kwargs = deepcopy(locals()) url = URL(base_url.with_path('/api/search/1.0.json')) for k in [k for k in kwargs]: if kwargs[k] is None or k == 'limit': del kwargs[k] elif not isinstance(kwargs[k], str): kwargs[k] = str(kwargs[k]).lower() url = url.with_query(**kwargs) page = 1 result = [] while limit > 0: async with aiohttp.ClientSession() as session: async with session.get(url) as resp: data = await resp.json() result.append(data['results']) if data['startIndex'] + data['itemsPerPage'] >= data[ 'totalResults']: return AttrSeq(chain.from_iterable(result)) limit -= data['itemsPerPage'] page += 1
def test_with_query_params2(): url = URL("http://example.com/get") url2 = url.with_query({"key": "1;2;3"}) assert str(url2) == "http://example.com/get?key=1%3B2%3B3"
for param in SAFE_QUERY_PARAMS: params.pop(param, None) encoded = jwt.encode( { "iss": refresh_token_id, "path": url.path, "params": params, "iat": now, "exp": now + expiration, }, secret, algorithm="HS256", ) params[SIGN_QUERY_PARAM] = encoded url = url.with_query(params) return f"{url.path}?{url.query_string}" @callback def async_user_not_allowed_do_auth( hass: HomeAssistant, user: User, request: Request | None = None) -> str | None: """Validate that user is not allowed to do auth things.""" if not user.is_active: return "User is not active" if not user.local_only: return None
def test_with_query_kwargs(): url = URL("http://example.com") q = url.with_query(query="1", query2="1").query assert q == dict(query="1", query2="1")
def test_with_query(): url = URL('http://example.com') assert str(url.with_query({'a': '1'})) == 'http://example.com/?a=1'
def test_with_query_empty_str(): url = URL("http://example.com/?a=b") assert str(url.with_query("")) == "http://example.com/"
def test_with_query_multidict(): url = URL('http://example.com/path') q = MultiDict([('a', 'b'), ('c', 'd')]) assert str(url.with_query(q)) == 'http://example.com/path?a=b&c=d'
def test_with_multidict_with_spaces_and_non_ascii(): url = URL('http://example.com') url2 = url.with_query({'a b': 'ю б'}) assert url2.raw_query_string == 'a+b=%D1%8E+%D0%B1'
def test_with_query_multidict_with_unsafe(): url = URL('http://example.com/path') url2 = url.with_query({'a+b': '?=+&;'}) assert url2.raw_query_string == 'a%2Bb=?%3D%2B%26%3B' assert url2.query_string == 'a%2Bb=?%3D%2B%26%3B' assert url2.query == {'a+b': '?=+&;'}
def test_with_query_multidict(): url = URL("http://example.com/path") q = MultiDict([("a", "b"), ("c", "d")]) assert str(url.with_query(q)) == "http://example.com/path?a=b&c=d"
def test_with_query_None(): url = URL('http://example.com/path?a=b') assert url.with_query(None).query_string == ''
def test_with_query_multidict_with_unsafe(): url = URL("http://example.com/path") url2 = url.with_query({"a+b": "?=+&;"}) assert url2.raw_query_string == "a%2Bb=?%3D%2B%26%3B" assert url2.query_string == "a%2Bb=?%3D%2B%26%3B" assert url2.query == {"a+b": "?=+&;"}
def test_with_query_bad_type(): url = URL('http://example.com') with pytest.raises(TypeError): url.with_query(123)
def test_with_query_bad_type(): url = URL("http://example.com") with pytest.raises(TypeError): url.with_query(123)
def test_with_query_bytearray(): url = URL('http://example.com') with pytest.raises(TypeError): url.with_query(bytearray(b'123'))
def test_with_query_memoryview(): url = URL("http://example.com") with pytest.raises(TypeError): url.with_query(memoryview(b"123"))
def test_with_query_memoryview(): url = URL('http://example.com') with pytest.raises(TypeError): url.with_query(memoryview(b'123'))
def test_with_query_only(): url = URL() url2 = url.with_query(key="value") assert str(url2) == "?key=value"
def test_with_query_params2(): url = URL('http://example.com/get') url2 = url.with_query({'key': '1;2;3'}) assert str(url2) == 'http://example.com/get?key=1%3B2%3B3'
def test_with_query_only(): url = URL() url2 = url.with_query(key='value') assert str(url2) == '?key=value'
def test_with_query_kwargs_and_args_are_mutually_exclusive(): url = URL("http://example.com") with pytest.raises(ValueError): url.with_query({"a": "2", "b": "4"}, a="1")
def test_with_query_list_of_pairs(): url = URL('http://example.com') assert str(url.with_query([('a', '1')])) == 'http://example.com/?a=1'
def test_with_query_empty_dict(): url = URL("http://example.com/?a=b") new_url = url.with_query({}) assert new_url.query_string == "" assert str(new_url) == "http://example.com/"
class TestServer: def __init__(self, app, *, scheme="http", host='127.0.0.1'): self.app = app self._loop = app.loop self.port = None self.server = None self.handler = None self._root = None self.host = host self.scheme = scheme self._closed = False @asyncio.coroutine def start_server(self, **kwargs): if self.server: return self.port = unused_port() self._root = URL('{}://{}:{}'.format(self.scheme, self.host, self.port)) self.handler = self.app.make_handler(**kwargs) self.server = yield from self._loop.create_server(self.handler, self.host, self.port) def make_url(self, path): assert path.startswith('/') path = path[1:] if path.startswith('?'): # add a query to root path return self._root.with_query(path[1:]) else: return self._root / path @asyncio.coroutine def close(self): """Close all fixtures created by the test client. After that point, the TestClient is no longer usable. This is an idempotent function: running close multiple times will not have any additional effects. close is also run when the object is garbage collected, and on exit when used as a context manager. """ if self.server is not None and not self._closed: self.server.close() yield from self.server.wait_closed() yield from self.app.shutdown() yield from self.handler.finish_connections() yield from self.app.cleanup() self._root = None self.port = None self._closed = True def __enter__(self): self._loop.run_until_complete(self.start_server()) return self def __exit__(self, exc_type, exc_value, traceback): self._loop.run_until_complete(self.close()) if PY_35: @asyncio.coroutine def __aenter__(self): yield from self.start_server() return self @asyncio.coroutine def __aexit__(self, exc_type, exc_value, traceback): yield from self.close()
def test_with_query_str(): url = URL('http://example.com') assert str(url.with_query('a=1&b=2')) == 'http://example.com/?a=1&b=2'
def test_with_query_kwargs(): url = URL('http://example.com') q = url.with_query(query='1', query2='1').query assert q == dict(query='1', query2='1')
def __init__(self, method: str, url: URL, *, params: Optional[Mapping[str, str]]=None, headers: Optional[LooseHeaders]=None, skip_auto_headers: Iterable[str]=frozenset(), data: Any=None, cookies: Optional[LooseCookies]=None, auth: Optional[BasicAuth]=None, version: http.HttpVersion=http.HttpVersion11, compress: Optional[str]=None, chunked: Optional[bool]=None, expect100: bool=False, loop: Optional[asyncio.AbstractEventLoop]=None, response_class: Optional[Type['ClientResponse']]=None, proxy: Optional[URL]=None, proxy_auth: Optional[BasicAuth]=None, timer: Optional[BaseTimerContext]=None, session: Optional['ClientSession']=None, ssl: Union[SSLContext, bool, Fingerprint, None]=None, proxy_headers: Optional[LooseHeaders]=None, traces: Optional[List['Trace']]=None): if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None self._session = cast('ClientSession', session) if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None if response_class is None: real_response_class = ClientResponse else: real_response_class = response_class self.response_class = real_response_class # type: Type[ClientResponse] self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) if traces is None: traces = [] self._traces = traces
def test_with_query_kwargs_and_args_are_mutually_exclusive(): url = URL('http://example.com') with pytest.raises(ValueError): url.with_query( {'a': '2', 'b': '4'}, a='1')
def ahttp(self, _method, _path, headers={}, **kw): url = URL('%s%s' % (_URL, _path)) kw = dict(kw, **url.query) data = None if kw and _method == 'GET': url = url.with_query(**kw) if kw and _method in {'PATCH', 'POST'}: data = bytes(_encode_json(kw), 'utf-8') headers = headers or {} if self._authorization: headers['Authorization'] = self._authorization pre_rate_limit = self.x_ratelimit_remaining logger.debug( "%s %s (remaining=%s)", _method, url, self.x_ratelimit_remaining, ) headers = {str(k): str(v) for k, v in headers.items()} session = aiohttp.ClientSession() session_method = getattr(session, _method.lower()) try: response = yield from session_method( url, headers=headers, data=data, timeout=self.TIMEOUT, ) self._process_resp(response.headers) post_rate_limit = self.x_ratelimit_remaining if 'json' in response.content_type: payload = yield from response.json() else: logger.debug("Fetching raw %s.", response.content_type) payload = yield from response.read() finally: if not asyncio.get_event_loop().is_closed(): logger.debug("Closing HTTP session.") yield from session.close() if response.status >= 300: req = JsonObject(method=_method, url=url) resp = JsonObject( code=response.status, json=payload, _headers=dict(response.headers.items()) ) if response.status == 404: raise ApiNotFoundError(url, req, resp) if resp.code == 422: for error in resp.json['errors']: logger.warn("%s", error['message']) raise ApiError(url, req, resp) if 'json' not in response.content_type: raise Exception( "GitHub API did not returns JSON: %s...", payload ) if isinstance(payload, list): payload = GHList(payload) else: payload = JsonObject(payload) payload.__dict__['_headers'] = dict(response.headers.items()) if pre_rate_limit > 0 and pre_rate_limit < post_rate_limit: logger.info( "GitHub rate limit reset. %d calls remained.", pre_rate_limit, ) return payload
def test_with_query_empty_str(): url = URL('http://example.com/?a=b') assert str(url.with_query('')) == 'http://example.com/'