async def fetch_and_combine_file_info(self, file_id: str): async with httpx.AsyncClient() as client: details = await self.fetch_file_details(file_id, client) details['segments'] = await self.fetch_file_segments(file_id, client) return details
def get_new_session(self): return httpx.AsyncClient(verify=False, trust_env=False)
async def test_get(): """Integration test.""" async with httpx.AsyncClient() as client: gl = gl_httpx.GitLabAPI(client, "gidgetlab") data = await gl.getitem("/templates/licenses/mit") assert "description" in data
async def api_lnurlscan(code: str): try: url = lnurl.Lnurl(code) except ValueError: return jsonify({"error": "invalid lnurl"}), HTTPStatus.BAD_REQUEST domain = urlparse(url.url).netloc if url.is_login: return jsonify({ "domain": domain, "kind": "auth", "error": "unsupported" }), HTTPStatus.BAD_REQUEST async with httpx.AsyncClient() as client: r = await client.get(url.url, timeout=40) if r.is_error: return jsonify({ "domain": domain, "error": "failed to get parameters" }), HTTPStatus.SERVICE_UNAVAILABLE try: jdata = json.loads(r.text) data: lnurl.LnurlResponseModel = lnurl.LnurlResponse.from_dict(jdata) except (json.decoder.JSONDecodeError, lnurl.exceptions.LnurlResponseException): return ( jsonify({ "domain": domain, "error": f"got invalid response '{r.text[:200]}'" }), HTTPStatus.SERVICE_UNAVAILABLE, ) if type(data) is lnurl.LnurlChannelResponse: return jsonify({ "domain": domain, "kind": "channel", "error": "unsupported" }), HTTPStatus.BAD_REQUEST params: Dict = data.dict() if type(data) is lnurl.LnurlWithdrawResponse: params.update(kind="withdraw") params.update(fixed=data.min_withdrawable == data.max_withdrawable) # callback with k1 already in it parsed_callback: ParseResult = urlparse(data.callback) qs: Dict = parse_qs(parsed_callback.query) qs["k1"] = data.k1 parsed_callback = parsed_callback._replace( query=urlencode(qs, doseq=True)) params.update(callback=urlunparse(parsed_callback)) if type(data) is lnurl.LnurlPayResponse: params.update(kind="pay") params.update(fixed=data.min_sendable == data.max_sendable) params.update(description_hash=data.metadata.h) params.update(description=data.metadata.text) if data.metadata.images: image = min(data.metadata.images, key=lambda image: len(image[1])) data_uri = "data:" + image[0] + "," + image[1] params.update(image=data_uri) params.update(commentAllowed=jdata.get("commentAllowed", 0)) params.update(domain=domain) return jsonify(params)
def create_client( self, transport: typing.Optional[AsyncOpenTelemetryTransport] = None, ): return httpx.AsyncClient()
async def search_name(name): async with httpx.AsyncClient() as client: result = await client.post( 'https://www.ceve-market.org/api/searchname', data={"name": name}) return result.json()
async def get_new_list_default() -> List: """ 获取最新上架的12个moments Raises ------ HttpxRequestException Return ------ 类型为 MomentListing 的列表 """ payload = { "operationName": "SearchMomentListingsDefault", "variables": { "byPlayers": [], "byTagNames": [], "byTeams": [], "bySets": [], "bySeries": [], "bySetVisuals": [], "byGameDate": { "start": None, "end": None }, "byCreatedAt": { "start": None, "end": None }, "byPower": { "min": None, "max": None }, "byPrice": { "min": None, "max": None }, "byListingType": ["BY_USERS"], "byPlayStyle": [], "bySkill": [], "byPrimaryPlayerPosition": [], "bySerialNumber": { "min": None, "max": None }, "searchInput": { "pagination": { "cursor": "", "direction": "RIGHT", "limit": 12 } }, "orderBy": "UPDATED_AT_DESC" }, "query": open('graphql/SearchMomentListingsDefault.graphql').read() } url = base_market_url + "SearchMomentListingsDefault=" try: async with httpx.AsyncClient() as client: r = await client.post(url, data=json.dumps(payload), headers=headers) response_json = r.json() return response_json['data']['searchMomentListings']['data'][ 'searchSummary']['data']['data'] except Exception as e: logger.warning( f"httpx request error({sys._getframe().f_code.co_name}): {e}") raise HttpxRequestException
async def test_async_proxy_close(): client = httpx.AsyncClient(proxies={"all": PROXY_URL}) await client.aclose()
def test_unsupported_proxy_scheme(): with pytest.raises(ValueError): httpx.AsyncClient(proxies="ftp://127.0.0.1")
async def main(url, *, from_page=0, to_page=1, max_page=-1, per_page=3, all_post=False, board_name=None, fetch_comment=False, is_test=False): if all_post: base_url = config['PTT_ALLPOST']['url'] elif board_name: base_url = config['PTT_BOARD']['url'] + '/' + board_name if not from_page: # 拿取index.html的初始頁面和前一頁頁數 with httpx.Client(cookies=COOKIES, timeout=int(config['REQUEST']['timeout'])) as client: try: oldest, prev, next_, latest = fetch_last_page(client, url) except (httpx.RequestError, httpx.HTTPStatusError): logger.error( f"連線錯誤, {int(config['REQUEST']['retry_after'])} 秒後重新連線") time.sleep(int(config['REQUEST']['retry_after'])) return else: start = int(latest) else: start = int(from_page) end = int(to_page) # 最多蒐集幾頁 if max_page > 0: end = start - max_page + 1 if end < 1: end = 1 # 產生要蒐集的頁數連結 cur_page = start while cur_page >= end: links = [] for page_num in range(cur_page, max(cur_page - per_page, end - 1), -1): links.append((page_num, base_url + f'/index{page_num}.html')) logger.debug(links) async with httpx.AsyncClient( cookies=COOKIES, timeout=int(config['REQUEST']['timeout'])) as client: # 蒐集每一頁的文章連結 tasks = [ asyncio.create_task(fetch_post_list(client, page_num, link)) for page_num, link in links ] try: result = await asyncio.gather(*tasks, return_exceptions=True) except: logger.error('文章連結蒐集失敗') raise else: post_links = [] # [link] web_parse.parse_post_links(result, post_links=post_links, all_post=all_post) logger.debug(f'共有 {len(post_links)} 篇文章要蒐集') # 蒐集現有文章連結內容 tasks = [ asyncio.create_task(fetch_post_content(client, link)) for link in post_links ] posts_info = [] # 蒐集到的文章資訊 try: result = await asyncio.gather(*tasks) except: logger.error('文章內容蒐集失敗') raise else: web_parse.parse_posts(result, posts_info=posts_info, fetch_comment=fetch_comment) if is_test: await asyncio.to_thread(record, 'result.rec', posts_info) retry = True while retry: try: ok, retry = upload.bulk(os.getenv('ES_INDEX'), posts_info=posts_info, is_test=is_test) except: logger.error('上傳失敗') raise if retry: logger.error( f"{int(config['REQUEST']['retry_after'])} 秒後重新上傳") time.sleep(int(config['REQUEST']['retry_after'])) cur_page -= per_page
def __init__(self, wrapped_store, mlb_urls): self._store = wrapped_store self._mlb_urls = mlb_urls self.mlb_clients = [ httpx.AsyncClient(base_url=url) for url in mlb_urls ]
async def collect(self) -> Iterable: # noqa: C901 """Connect to a device running hyperglass-agent via HTTP.""" log.debug("Query parameters: {}", self.query) client_params = { "headers": { "Content-Type": "application/json" }, "timeout": params.request_timeout, } if self.device.ssl is not None and self.device.ssl.enable: with self.device.ssl.cert.open("r") as file: cert = file.read() if not cert: raise RestError( "SSL Certificate for device {d} has not been imported", level="danger", d=self.device.name, ) http_protocol = "https" client_params.update({"verify": str(self.device.ssl.cert)}) log.debug( (f"Using {str(self.device.ssl.cert)} to validate connection " f"to {self.device.name}")) else: http_protocol = "http" endpoint = "{protocol}://{address}:{port}/query/".format( protocol=http_protocol, address=self.device._target, port=self.device.port) log.debug("URL endpoint: {}", endpoint) try: async with httpx.AsyncClient(**client_params) as http_client: responses = () for query in self.query: encoded_query = await jwt_encode( payload=query, secret=self.device.credential.password. get_secret_value(), duration=params.request_timeout, ) log.debug("Encoded JWT: {}", encoded_query) raw_response = await http_client.post( endpoint, json={"encoded": encoded_query}) log.debug("HTTP status code: {}", raw_response.status_code) raw = raw_response.text log.debug("Raw Response:\n{}", raw) if raw_response.status_code == 200: decoded = await jwt_decode( payload=raw_response.json()["encoded"], secret=self.device.credential.password. get_secret_value(), ) log.debug("Decoded Response:\n{}", decoded) responses += (decoded, ) elif raw_response.status_code == 204: raise ResponseEmpty( params.messages.no_output, device_name=self.device.name, ) else: log.error(raw_response.text) except httpx.exceptions.HTTPError as rest_error: msg = parse_exception(rest_error) log.error("Error connecting to device {}: {}", self.device.name, msg) raise RestError( params.messages.connection_error, device_name=self.device.name, error=msg, ) except OSError as ose: log.critical(str(ose)) raise RestError( params.messages.connection_error, device_name=self.device.name, error="System error", ) except CertificateError as cert_error: log.critical(str(cert_error)) msg = parse_exception(cert_error) raise RestError( params.messages.connection_error, device_name=self.device.name, error=f"{msg}: {cert_error}", ) if raw_response.status_code != 200: log.error("Response code is {}", raw_response.status_code) raise RestError( params.messages.connection_error, device_name=self.device.name, error=params.messages.general, ) if not responses: log.error("No response from device {}", self.device.name) raise RestError( params.messages.connection_error, device_name=self.device.name, error=params.messages.no_response, ) return responses
def _get_default_client(self) -> httpx.AsyncClient: logger.debug('returning default http client with user agent: %s', self._user_agent) headers = {'User-Agent': self._user_agent} return httpx.AsyncClient(headers=headers)
async def search(query: str): async with httpx.AsyncClient() as client: r = await client.get(jisho + query) return {"query": r.json()}
async def _request(): async with httpx.AsyncClient() as client: return await HttpxAdapter(client).call(api_request)
async def api_lnurlscan(code: str): try: url = lnurl.Lnurl(code) except ValueError: return jsonify({"message": "invalid lnurl"}), HTTPStatus.BAD_REQUEST domain = urlparse(url.url).netloc # params is what will be returned to the client params: Dict = {"domain": domain} if url.is_login: params.update(kind="auth") params.update(callback=url.url) # with k1 already in it lnurlauth_key = g.wallet.lnurlauth_key(domain) params.update(pubkey=lnurlauth_key.verifying_key.to_string("compressed").hex()) else: async with httpx.AsyncClient() as client: r = await client.get(url.url, timeout=40) if r.is_error: return ( jsonify({"domain": domain, "message": "failed to get parameters"}), HTTPStatus.SERVICE_UNAVAILABLE, ) try: jdata = json.loads(r.text) data: lnurl.LnurlResponseModel = lnurl.LnurlResponse.from_dict(jdata) except (json.decoder.JSONDecodeError, lnurl.exceptions.LnurlResponseException): return ( jsonify( { "domain": domain, "message": f"got invalid response '{r.text[:200]}'", } ), HTTPStatus.SERVICE_UNAVAILABLE, ) if type(data) is lnurl.LnurlChannelResponse: return ( jsonify( {"domain": domain, "kind": "channel", "message": "unsupported"} ), HTTPStatus.BAD_REQUEST, ) params.update(**data.dict()) if type(data) is lnurl.LnurlWithdrawResponse: params.update(kind="withdraw") params.update(fixed=data.min_withdrawable == data.max_withdrawable) # callback with k1 already in it parsed_callback: ParseResult = urlparse(data.callback) qs: Dict = parse_qs(parsed_callback.query) qs["k1"] = data.k1 # balanceCheck/balanceNotify if "balanceCheck" in jdata: params.update(balanceCheck=jdata["balanceCheck"]) # format callback url and send to client parsed_callback = parsed_callback._replace(query=urlencode(qs, doseq=True)) params.update(callback=urlunparse(parsed_callback)) if type(data) is lnurl.LnurlPayResponse: params.update(kind="pay") params.update(fixed=data.min_sendable == data.max_sendable) params.update(description_hash=data.metadata.h) params.update(description=data.metadata.text) if data.metadata.images: image = min(data.metadata.images, key=lambda image: len(image[1])) data_uri = "data:" + image[0] + "," + image[1] params.update(image=data_uri) params.update(commentAllowed=jdata.get("commentAllowed", 0)) return jsonify(params)
async def reverse(bot: Amime, message: Message): reply = message.reply_to_message lang = message._lang if reply.from_user.id == bot.me.id: return if not reply.media: await message.reply_text(lang.media_not_found_text) return media = (reply.photo or reply.sticker or reply.animation or reply.document or reply.video) if isinstance(media, (Document, Video)): if bool(media.thumbs) and len(media.thumbs) > 0: media = media.thumbs[0] else: return sent = await message.reply_photo("https://i.imgur.com/m0N2pFc.jpg", caption=lang.searching_media_text) path = await bot.download_media(media) async with httpx.AsyncClient(http2=True) as client: try: response = await client.post( "https://www.google.com/searchbyimage/upload", files=dict( encoded_image=(os.path.basename(path), open(path, "rb")), image_content="", ), timeout=20.0, allow_redirects=False, ) except httpx.TimeoutException: await sent.edit_text(lang.timed_out_text) return if response.status_code == 400: await sent.edit_text(lang.api_overuse_text) return url = response.headers["Location"] opener = urllib.request.build_opener() source = opener.open(f"{url}&hl=en").read() soup = bs4.BeautifulSoup(source, "html.parser") results = { "similar_images": None, "override": None, "best_guess": None, } try: for bess in soup.findAll("a", {"class": "PBorbe"}): results["override"] = f"https://www.google.com{bess['href']}" except BaseException: pass for similar_image in soup.findAll("input", {"clas": "gLFyf"}): results[ "similar_images"] = f"https://www.google.com/search?tbm=isch&q={urllib.parse.quote_plus(similar_image['value'])}" for best_guess in soup.findAll("div", {"class": "r5a77d"}): results["best_guess"] = best_guess.get_text() guess = results["best_guess"] page_url = None if results["override"] is not None: page_url = results["override"] else: page_url = results["similar_images"] if guess is None and page_url is None: await sent.edit_text(lang.no_results_text) return single = opener.open(page_url).read().decode() images = [] count = 0 for image in re.findall( r"^,\[\"(.*[.png|.jpg|.jpeg])\",[0-9]+,[0-9]+\]$", single, re.I | re.M): count += 1 images.append(image) if count >= 5: break if len(images) == 0: await sent.edit_text(lang.no_results_text) return await sent.reply_media_group( media=[InputMediaPhoto(image) for image in images]) await sent.edit_text( lang.search_results_text.format( query=f"<a href='{url}'>{guess}</a>"), disable_web_page_preview=True, ) await client.aclose() os.remove(path)
async def api_payments_create_invoice(): if "description_hash" in g.data: description_hash = unhexlify(g.data["description_hash"]) memo = "" else: description_hash = b"" memo = g.data["memo"] if g.data.get("unit") or "sat" == "sat": amount = g.data["amount"] else: price_in_sats = await fiat_amount_as_satoshis(g.data["amount"], g.data["unit"]) amount = price_in_sats async with db.connect() as conn: try: payment_hash, payment_request = await create_invoice( wallet_id=g.wallet.id, amount=amount, memo=memo, description_hash=description_hash, extra=g.data.get("extra"), webhook=g.data.get("webhook"), conn=conn, ) except InvoiceFailure as e: return jsonify({"message": str(e)}), 520 except Exception as exc: raise exc invoice = bolt11.decode(payment_request) lnurl_response: Union[None, bool, str] = None if g.data.get("lnurl_callback"): if "lnurl_balance_check" in g.data: save_balance_check(g.wallet.id, g.data["lnurl_balance_check"]) async with httpx.AsyncClient() as client: try: r = await client.get( g.data["lnurl_callback"], params={ "pr": payment_request, "balanceNotify": url_for( "core.lnurl_balance_notify", service=urlparse(g.data["lnurl_callback"]).netloc, wal=g.wallet.id, _external=True, ), }, timeout=10, ) if r.is_error: lnurl_response = r.text else: resp = json.loads(r.text) if resp["status"] != "OK": lnurl_response = resp["reason"] else: lnurl_response = True except (httpx.ConnectError, httpx.RequestError): lnurl_response = False return ( jsonify( { "payment_hash": invoice.payment_hash, "payment_request": payment_request, # maintain backwards compatibility with API clients: "checking_id": invoice.payment_hash, "lnurl_response": lnurl_response, } ), HTTPStatus.CREATED, )
async def get_transactions( set_id, play_id, by_highest=False ) -> Tuple[List[Tuple[int, float, str]], float, int, str, int]: """ 获取50条交易信息,返回辅助判断交易的参数 Parameters ---------- set_id: 包的id play_id: 动作的id by_highest: 如果为True, 按成交价返回,否则按最近交易时间返回 Raises ------ HttpxRequestException Return ------ (recent_transactions, adjust_volume, circulation_count, jerseyNumber) recent_transactions: (序号,价格,买家名)的列表。类型为 List[Tuple[int, float, str]], adjust_volume: 日成交量, 8位小数 circulation_count: 该play的总供应量 player: 球员名 jersey_number: 球衣号 """ payload = { "operationName": "SearchMarketplaceTransactions", "variables": { "input": { "sortBy": "PRICE_DESC" if by_highest else "UPDATED_AT_DESC", "searchInput": { "pagination": { "cursor": "", "direction": "RIGHT", "limit": 50 } }, "filters": { "byEditions": [{ "setID": set_id, "playID": play_id }] } } }, "query": open('graphql/SearchMarketplaceTransactions.graphql').read() } url = base_market_url + "SearchMarketplaceTransactions" try: async with httpx.AsyncClient() as client: r = await client.post(url, data=json.dumps(payload), headers=headers) response_json = r.json() marketplace_transactions = response_json['data'][ 'searchMarketplaceTransactions']['data']['searchSummary'][ 'data']['data'] if len(marketplace_transactions) == 0: return [], 0, 0, '', -1 recent_transactions = list( map( lambda i: (int(i['moment']['flowSerialNumber']), float(i['price']), i['buyer']['username']), marketplace_transactions)) adjust_volume = 0 if not by_highest: # 用最近成交的排序来计算成交量 adjust_volume = calculate_adjust_volume( list( map(lambda i: i['updatedAt'], marketplace_transactions))) first_moment = marketplace_transactions[0]['moment'] circulation_count = int( first_moment['setPlay']['circulationCount']) player = first_moment['play']['stats']['playerName'] jersey_number = int(first_moment['play']['stats']['jerseyNumber']) return recent_transactions, adjust_volume, circulation_count, player, jersey_number except Exception as e: logger.warning( f"httpx request error({sys._getframe().f_code.co_name}): {e}") raise HttpxRequestException
class Service(ABC): headers = { "User-Agent": "Mozilla/5.0 (Linux; Android 5.0.2; 7045Y Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2728.43 Mobile Safari/537.36", "X-Requested-With": "XMLHttpRequest", } country_codes = {"7": "ru", "375": "by", "380": "ua"} phone_codes = [] client = httpx.AsyncClient(headers=headers, verify=False) def __init__(self, phone: str, country_code: int): self.country_code = str(country_code) self.phone = phone[len(self.country_code):] self.formatted_phone = phone self.russian_name = "".join( random.choice( "АаБбВвГгДдЕеЁёЖжЗзИиЙйКкЛлМмНнОоПпРрСсТтУуФфХхЦцЧчШшЩщЪъЫыЬьЭэЮюЯя" ) for _ in range(5)) self.username = self.password = "".join( random.choice(string.ascii_letters) for _ in range(12)) self.email = self.username + "@gmail.com" async def get(self, *args, **kwargs): return await self.request_logger(self.client.get, *args, **kwargs) async def post(self, *args, **kwargs): return await self.request_logger(self.client.post, *args, **kwargs) async def options(self, *args, **kwargs): return await self.request_logger(self.client.options, *args, **kwargs) async def request_logger(self, function: Callable, *args, **kwargs): response = await function(*args, **kwargs) if response.is_error: logger.error( f"{self.__class__.__name__} returned an error HTTP code: {response.status_code}" ) return response async def get_csrf_token(self, url: str, pattern): response = await self.get(url) return re.search(pattern, response.text).group(1).strip() @staticmethod def format(phone: str, mask: str, mask_symbol: str = "*"): if len(phone) == mask.count(mask_symbol): formatted_phone = "" for symbol in mask: if symbol == mask_symbol: formatted_phone += phone[0] phone = phone[(len(phone) - 1) * -1:] else: formatted_phone += symbol else: formatted_phone = phone return formatted_phone @abstractmethod async def run(self): pass
async def api_payments_pay_lnurl(): domain = urlparse(g.data["callback"]).netloc async with httpx.AsyncClient() as client: try: r = await client.get( g.data["callback"], params={ "amount": g.data["amount"], "comment": g.data["comment"] }, timeout=40, ) if r.is_error: return jsonify({"message": "failed to connect"}), HTTPStatus.BAD_REQUEST except (httpx.ConnectError, httpx.RequestError): return jsonify({"message": "failed to connect"}), HTTPStatus.BAD_REQUEST params = json.loads(r.text) if params.get("status") == "ERROR": return jsonify({ "message": f"{domain} said: '{params.get('reason', '')}'" }), HTTPStatus.BAD_REQUEST invoice = bolt11.decode(params["pr"]) if invoice.amount_msat != g.data["amount"]: return ( jsonify({ "message": f"{domain} returned an invalid invoice. Expected {g.data['amount']} msat, got {invoice.amount_msat}." }), HTTPStatus.BAD_REQUEST, ) if invoice.description_hash != g.data["description_hash"]: return ( jsonify({ "message": f"{domain} returned an invalid invoice. Expected description_hash == {g.data['description_hash']}, got {invoice.description_hash}." }), HTTPStatus.BAD_REQUEST, ) try: extra = {} if params.get("successAction"): extra["success_action"] = params["successAction"] if g.data["comment"]: extra["comment"] = g.data["comment"] payment_hash = pay_invoice( wallet_id=g.wallet.id, payment_request=params["pr"], description=g.data.get("description", ""), extra=extra, ) except Exception as exc: traceback.print_exc(7) g.db.rollback() return jsonify({"message": str(exc)}), HTTPStatus.INTERNAL_SERVER_ERROR return ( jsonify({ "success_action": params.get("successAction"), "payment_hash": payment_hash, # maintain backwards compatibility with API clients: "checking_id": payment_hash, }), HTTPStatus.CREATED, )
async def get_user_from_remote(url): async with httpx.AsyncClient() as client: res = await client.get(url) User.create_or_update_by_user_data_list(res.json()["users"])
async def resolve_person(self, info, id): async with httpx.AsyncClient() as client: r = await client.get(f'https://swapi.dev/api/people/{id}') #r.json() return r.json()
async def update_arch_versions() -> None: print("update versions") arch_versions: Dict[str, Tuple[str, str, int]] = {} awaitables = [] for (url, repo, variant) in VERSION_CONFIG: awaitables.append(parse_repo(repo, variant, url)) for sources in (await asyncio.gather(*awaitables)): for source in sources.values(): msys_ver = arch_version_to_msys(source.version) for p in source.packages.values(): url = "https://www.archlinux.org/packages/%s/%s/%s/" % ( p.repo, p.arch, p.name) if p.name in arch_versions: old_ver = arch_versions[p.name][0] if version_is_newer_than(msys_ver, old_ver): arch_versions[p.name] = (msys_ver, url, p.builddate) else: arch_versions[p.name] = (msys_ver, url, p.builddate) url = "https://www.archlinux.org/packages/%s/%s/%s/" % ( source.repos[0], source.arches[0], source.name) if source.name in arch_versions: old_ver = arch_versions[source.name][0] if version_is_newer_than(msys_ver, old_ver): arch_versions[source.name] = (msys_ver, url, source.date) else: arch_versions[source.name] = (msys_ver, url, source.date) print("done") print("update versions from AUR") # a bit hacky, try to get the remaining versions from AUR possible_names = set() for s in state.sources.values(): if package_name_is_vcs(s.name): continue for p in s.packages.values(): possible_names.update(get_arch_names(p.realname)) possible_names.update(get_arch_names(s.realname)) async with httpx.AsyncClient() as client: r = await client.get("https://aur.archlinux.org/packages.gz", timeout=REQUEST_TIMEOUT) aur_packages = set() for name in r.text.splitlines(): if name.startswith("#"): continue if name in arch_versions: continue if name not in possible_names: continue aur_packages.add(name) aur_url = ("https://aur.archlinux.org/rpc/?v=5&type=info&" + "&".join(["arg[]=%s" % n for n in aur_packages])) r = await client.get(aur_url, timeout=REQUEST_TIMEOUT) for result in r.json()["results"]: name = result["Name"] if name not in aur_packages or name in arch_versions: continue last_modified = result["LastModified"] url = "https://aur.archlinux.org/packages/%s" % name arch_versions[name] = (result["Version"], url, last_modified) print("done") state.arch_versions = arch_versions
# This file is part of RaVaNBot (Telegram Bot) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import httpx http = httpx.AsyncClient(http2=True)
def _make_async_http_client(loop): http = httpx.AsyncClient() atexit.register(lambda: loop.run_until_complete(http.aclose())) return http
async def get_sub_list(self, _) -> list[RawPost]: async with httpx.AsyncClient() as client: raw_data = await client.get( "http://api.act.sdo.com/UnionNews/List?gameCode=ff&category=5309,5310,5311,5312,5313&pageIndex=0&pageSize=5" ) return raw_data.json()["Data"]
async def client(app): async with httpx.AsyncClient(app=app, base_url='http://dicetower.app/') as client: yield client
async def get_client(app: typing.Callable) -> typing.AsyncIterator: async with LifespanManager(app): async with httpx.AsyncClient(app=app, base_url="http://testserver/") as client: yield client
# Copyright (C) 2019 Aiogram # # This file is part of Hitsuki (Telegram Bot) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import httpx timeout = httpx.Timeout(40, pool=None) http = httpx.AsyncClient(http2=True, timeout=timeout)