class WiringFastAPITest(AsyncTestCase): client: AsyncClient def setUp(self) -> None: super().setUp() self.client = AsyncClient(app=web.app, base_url='http://test') def tearDown(self) -> None: self._run(self.client.aclose()) super().tearDown() def test_depends_marker_injection(self): class ServiceMock: async def process(self): return 'Foo' with web.container.service.override(ServiceMock()): response = self._run(self.client.get('/')) self.assertEqual(response.status_code, 200) self.assertEqual(response.json(), {'result': 'Foo'}) def test_depends_injection(self): response = self._run(self.client.get('/auth', auth=('john_smith', 'secret'))) self.assertEqual(response.status_code, 200) self.assertEqual(response.json(), {'username': '******', 'password': '******'})
async def async_download_slice_file(self, client: httpx.AsyncClient, start_range: int, range_length: int, urls: List[str], referer: str, typeid: int): """ 下载的核心函数,异步的下载调用的函数 range请求,请求完返回数据 :param client: httpx的异步client :param start_range: 开始起点 :param range_length: 最大步长 :param urls: 链接列表 :param referer: 引用 :param typeid: 是视频还是音频 :return: """ for _ in range(60): for url in urls: resp = await AsyncFaildRetry( retry_counts=2, include_status=[200, 206] )(lambda: client.get( url, headers={ 'referer': referer, 'range': f'bytes={start_range}-{start_range + range_length}', 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36 OPR/74.0.3911.107' }, timeout=60))() if not resp or resp.status_code not in [200, 206]: continue return (start_range, int(resp.headers['Content-Length']), resp.content, typeid)
async def test_internal(client: AsyncClient, factory: ComponentFactory) -> None: data = await create_session_token(factory, scopes=["exec:test", "read:all"]) await set_session_cookie(client, data.token) request_awaits = [] for _ in range(100): request_awaits.append( client.get( "/auth", params={ "scope": "exec:test", "delegate_to": "a-service", "delegate_scope": "read:all", }, )) responses = await asyncio.gather(*request_awaits) assert responses[0].status_code == 200 token = Token.from_str(responses[0].headers["X-Auth-Request-Token"]) for r in responses: assert r.status_code == 200 assert Token.from_str(r.headers["X-Auth-Request-Token"]) == token request_awaits = [] for _ in range(100): request_awaits.append( client.get( "/auth", params={ "scope": "exec:test", "delegate_to": "a-service", "delegate_scope": "exec:test", }, )) responses = await asyncio.gather(*request_awaits) assert responses[0].status_code == 200 new_token = Token.from_str(responses[0].headers["X-Auth-Request-Token"]) assert new_token != token for r in responses: assert r.status_code == 200 assert Token.from_str(r.headers["X-Auth-Request-Token"]) == new_token
def all_dbs(client: AsyncClient, params: Optional[dict[str, str]] = None, **client_kwargs: dict[str, Any]) -> Coroutine[Any, Any, Response]: """ Return a list of all the databases in the CouchDB instance. https://docs.couchdb.org/en/stable/api/server/common.html#all-dbs :param client: An HTTP client with which to perform the request. :param params: Optional query parameter options. :param client_kwargs: Arguments passed to the HTTP client. :return: The response of the HTTP request. """ return client.get(url='/_all_dbs', params=params, **client_kwargs)
def db_all_docs( client: AsyncClient, db: str, params: Optional[dict[str, str]] = None, **client_kwargs: dict[str, Any]) -> Coroutine[Any, Any, Response]: """ Bulk retrieve documents from the CouchDB database with the name `db`. https://docs.couchdb.org/en/stable/api/database/bulk-api.html#get--db-_all_docs :param client: An HTTP client with which to perform the request. :param db: The name of the database to retrieve documents from. :param params: Optional query parameter options. :param client_kwargs: Arguments passed to the HTTP client. :return: The response of the HTTP request. """ return client.get(url=f'/{db}/_all_docs', params=params, **client_kwargs)
async def main(): start = time.time() yappi.set_clock_type("wall") yappi.start() # If you don't start yappi, stats.empty() will always be true client = AsyncClient(app=app, ) async with client: tasks = [ client.get("http://www.example.org/") for _ in range(int(sys.argv[1])) ] resps = await asyncio.gather(*tasks) for resp in resps: print(f"Request ID: {resp.json()[0]}") print(f"Actual timing: {resp.json()[1]* 1000:>8.3f}") print(f"Server Timing: {resp.headers.get('server-timing')}") print("-----") end = time.time() print(f"TOTAL:{end-start:>8.3f}")
def get_db_doc( client: AsyncClient, db: str, docid: str, params: Optional[dict[str, str]] = None, **client_kwargs: dict[str, Any]) -> Coroutine[Any, Any, Response]: """ Retrieve from the CouchDB database with the name `db` a document having the document ID `docid`. : https://docs.couchdb.org/en/stable/api/document/common.html#get--db-docid :param client: An HTTP client with which to perform the request. :param db: The name of the CouchDB database from which to retrieve the document. :param docid: The document ID of the document to retrieve. :param params: Optional query parameter options. :param client_kwargs: Arguments passed to the HTTP client. :return: The response of the HTTP request. """ return client.get(f'/{db}/{docid}', params=params, **client_kwargs)
async def generate_preview( self, client: httpx.AsyncClient, url: str, ) -> Optional[str]: bad_result = self.get_bad_result(url) self.logger.debug("Expected bad result: %s", bad_result) for _ in range(0, self.retries): self.logger.info("Generating preview for: %s", url) response = await asyncio.wait_for( client.get(url), timeout=self.config.get('timeout', 10), ) self.logger.debug("%s fetched.", url) response.raise_for_status() html = response.text loop = asyncio.get_event_loop() soup = await asyncio.wait_for( loop.run_in_executor( None, BeautifulSoup, html, 'html.parser', ), # 2 seconds should be a plenty for # sane webpages. timeout=2, ) self.logger.debug("%s parsed.", url) title: str = " ".join(soup.title.get_text().split()) self.logger.debug("%s title extracted: %s", url, title) if bad_result and re.search(bad_result, title): self.logger.info( 'The title matched the expected "bad title", retrying…') else: return title return None