async def sendWithProxy(self, proxy, **kwargs): ''' Отправка запроса с прокси ''' async with aiohttp.ClientSession(connector=ProxyConnector.from_url(proxy.formated), **kwargs,) as session: async with session.post(self.url, ssl=False) as response: return await response.json()
async def check_anonymous(proxy: str) -> bool: """ 检测代理的匿名程度 :param proxy: 待校验的代理 :return: 校验结果,如果是高匿代理就返回True """ anonymous = True try: connector = ProxyConnector.from_url(proxy) requests.urllib3.disable_warnings() ua = UserAgent() async with ClientSession(connector=connector, timeout=5) as session: # 异步http请求 async with session.get(ANONYMOUS_CHECK_API, ssl=False, headers={"User-Agent": ua.random()}, timeout=5) as response: res = await response.text() res = json.loads(res) anonymous = ProxyValidator.is_anonymous(res) if anonymous: proxy_validator.info( "The proxy {} is anonymous".format(proxy)) await session.close() return anonymous except Exception as e: proxy_validator.error("Checking proxy {} anonymous " "has an error:{} type {}".format( proxy, str(e), type(e))) raise ClientError("check anonymous")
async def _downloader(cls, download_url: List[List[Union[int, str]]], logger, proxy: Optional[str] = None) \ -> List[List[Union[int, BytesIO]]]: """ 下载所有图片链接, 返回二进制数据列表 :param download_url: url_list :return: bytes_data_list """ down = [] index_date = [i[0] for i in download_url] n = 0 if proxy: proxy_info_list = proxy.split(":") port = int(proxy_info_list[-1]) ip = proxy_info_list[1].split("/")[-1] # hander = { # "user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4471.0 Safari/537.36 Edg/91.0.864.1", # "Referer": "https://www.pixiv.net/" # } connector = ProxyConnector(proxy_type=ProxyType.SOCKS5, host=ip, port=port) async with aiohttp.ClientSession(connector=connector) as session: for url in download_url: logger.info(f"请求url:{url}") img = await cls._aiohttp_down(session, url) down.append([index_date[n], img]) n += 1 else: async with aiohttp.ClientSession() as session: for url in download_url: img = await cls._aiohttp_down(session, url) down.append([index_date[n], img]) n += 1 return down
def __init__( self, key: str, *, proxy: str = '', to_language: str = 'en', text_format: str = 'plain', hints: list = [] ): self.key = str(key) if to_language not in Translator.supported: raise ValueError("You setted wrong language") self._language = to_language if text_format not in Translator.valid_text_formats: raise ValueError("You setted incorrect text format") self.connector = None self.proxy_url = proxy if proxy: self.connector = ProxyConnector.from_url(proxy) for hint in hints: if hint not in self.supported: raise ValueError(f"Invalid language in hint list: `{hint}`") self.hints = list(hints) self.text_format = text_format
async def Check(ip): if not ip['ip']: return False if (ip['type'].lower() == "http") or (ip['type'].lower() == "https"): typelist = [ip['type'].lower()] elif (ip['type'] == 'unknown') or (ip['type'] == ""): typelist = ["https", "http"] for _type in typelist: proxy = _type + "://" + ip['ip'] + ":" + str(ip['port']) connector = ProxyConnector.from_url(proxy) try: start = time.time() async with aiohttp.ClientSession( connector=connector) as session: async with session.get("%s://httpbin.org/get" % _type, timeout=5) as resp: r = await resp.json() iplist = r['origin'].split(",") for i in range(len(iplist)): iplist[i] = iplist[i].strip(" ") iplist = list(set(iplist)) ip["anonymous"] = True if len( iplist) == 1 else False assert (ip['ip'] in r['origin']) end = time.time() delay = int(round((end - start), 3) * 1000) ip['type'] = _type ip['time'] = int(end) ip['delay'] = delay return ip except: pass return False
async def create_proxy_connection(self, data: str): host, port, login, password = data.split(';') return ProxyConnector(proxy_type=ProxyType.HTTPS, host=host, port=int(port), username=login, password=password)
async def test_socks4_connector(url, rdns): connector = ProxyConnector.from_url( SOCKS4_URL, rdns=rdns, ) async with aiohttp.ClientSession(connector=connector) as session: async with session.get(url) as resp: assert resp.status == 200
async def test_ip_command(message: types.Message): if message.from_user.id not in admins: return new_connection() connector = ProxyConnector.from_url('socks5://127.0.0.1:9050') async with aiohttp.ClientSession(connector=connector) as session: response = await session.get('http://httpbin.org/ip') result = await response.text() await message.answer(result)
def build_tor_connector(cli_args: argparse.Namespace) \ -> ProxyConnector or None: if cli_args.tor: return ProxyConnector(proxy_type=ProxyType.SOCKS5, host='127.0.0.1', port=9050, verify_ssl=False) else: return None
async def fetch(proxy_ip, proxy_port, url="http://ip-api.com/json/"): """ Асинхронный get запрос через proxy Принимает IP, PORT от прокси и страницу для запроса. По-дефолту делает запрос к сервису ip-api.com и возвращает инфо о proxy-IP """ connector = ProxyConnector(host=proxy_ip, port=proxy_port) async with aiohttp.ClientSession(connector=connector) as session: async with session.get(url) as response: return await response.text()
async def sendWithProxy(self, proxy, **kwargs): ''' Sending request from proxy ''' async with aiohttp.ClientSession( connector=ProxyConnector.from_url(proxy.formated), **kwargs, ) as session: async with session.get(f"http://ipinfo.io/{proxy.host}/json", ssl=False) as response: return await response.json()
async def test_socks5_connector_with_invalid_proxy_port(unused_tcp_port): connector = ProxyConnector( proxy_type=ProxyType.SOCKS5, host=SOCKS5_IPV4_HOST, port=unused_tcp_port, username=LOGIN, password=PASSWORD, ) with pytest.raises(SocksConnectionError): async with aiohttp.ClientSession(connector=connector) as session: async with session.get(HTTP_TEST_URL) as resp: await resp.text()
async def test_socks5_connector_with_invalid_credentials(): connector = ProxyConnector( proxy_type=ProxyType.SOCKS5, host=SOCKS5_IPV4_HOST, port=SOCKS5_IPV4_PORT, username=LOGIN, password=PASSWORD + "aaa", ) with pytest.raises(SocksError): async with aiohttp.ClientSession(connector=connector) as session: async with session.get(HTTP_TEST_URL) as resp: await resp.text()
async def test_socks5_connector_with_timeout(): connector = ProxyConnector( proxy_type=ProxyType.SOCKS5, host=SOCKS5_IPV4_HOST, port=SOCKS5_IPV4_PORT, username=LOGIN, password=PASSWORD, ) with pytest.raises(asyncio.TimeoutError): async with aiohttp.ClientSession(connector=connector) as session: async with session.get(HTTP_URL_DELAY_3_SEC, timeout=1) as resp: await resp.text()
async def sendVote(url, data): proxy = getNewProxy() connector = ProxyConnector.from_url(proxy) async with aiohttp.ClientSession(connector=connector) as session: while True: try: async with session.post(url, data=data, ssl=False, proxy=proxy, timeout=10) as result: results = await result.json() print(f'Choice 1: {results["results"][""]}\n\ Choice 2: {results["results"][""]}') except Exception as e: proxy = getNewProxy(proxy)
async def _request(self, url): connector = None if self.use_proxy: if not Config.proxy: raise Exception('Proxy must be if you use it!') connector = ProxyConnector.from_url(Config.proxy) async with aiohttp.ClientSession( connector=connector, timeout=ClientTimeout(total=10)) as session: async with session.get(url, headers=self.headers, verify_ssl=False) as resp: await resp.read() return resp
def connector(self): if self.proxy: try: _connector = ProxyConnector.from_url(self.proxy, verify_ssl=False, rdns=True) except Exception as e: msg = ( "Failed to set proxy %s\neg. socks5://[user:pass@]127.0.0.1:1080" % (self.proxy)) self.error_log(msg=msg, e=e) exit(-1) else: _connector = aiohttp.TCPConnector(verify_ssl=False) # 默认禁用证书验证 return _connector
def create(cls, proxy: str = None): """ Create a new aiohttp.ClientSession object TODO: Test if using detach, will release the proxy in use? :param proxy: If set, all session requests will use this proxy. :return: A aiohttp.ClientSession object. """ if proxy is not None: cls.connector = ProxyConnector.from_url(proxy) cls.session = aiohttp.ClientSession(connector=cls.connector) log.debug(f'creating session: {cls.session}, connector: {cls.connector}, proxy: {proxy}') return cls.session
async def refresh(cls, refresh_token): """ token刷新请求 :param refresh_token: :return: (access_token, refresh_token) """ url = "https://oauth.secure.pixiv.net/auth/token" data = { "client_id": "MOBrBDS8blbauoSck0ZfDbtuzpyT", "client_secret": "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj", "grant_type": "refresh_token", "include_policy": "true", "refresh_token": refresh_token, } headers = { "User-Agent": "PixivAndroidApp/5.0.234 (Android 11; Pixel 5)" } if cls._proxy: proxy_info_list = cls._proxy.split(":") port = int(proxy_info_list[-1]) ip = proxy_info_list[1].split("/")[-1] connector = ProxyConnector(proxy_type=ProxyType.SOCKS5, host=ip, port=port) async with aiohttp.ClientSession(connector=connector) as session: async with session.post(url=url, data=data, headers=headers) as response: res = await response.json() else: async with aiohttp.ClientSession() as session: async with session.post(url=url, data=data, headers=headers) as response: res = await response.json() try: access_token = res["access_token"] refresh_token = res["refresh_token"] except BaseException as error: cls._logger.error(f"error:\n{error}") cls._logger.info(f"返回消息\n{res}") else: return access_token, refresh_token
async def _run_tasks(req_dict_list, proxy_str, max_concurrent, process_output, verify_tls, redirects, usrdata): tasks = [] sem = asyncio.Semaphore(max_concurrent) conn = None if proxy_str != '': # ============== handle remote dns resolution ======================= p = int(proxy_str.split(':')[2]) h = proxy_str.split('://')[1].split(':')[0] conn = ProxyConnector(proxy_type=ProxyType.HTTP, host=h, port=p, rdns=True) else: conn = TCPConnector(ttl_dns_cache=None) # Number of secs, None means cached forever async with ClientSession(connector=conn) as client: for req_dict in req_dict_list: # pyhton 3.7 # tasks.append(asyncio.create_task(_bound_fetch(...))) # ----------------- # tasks.append(asyncio.ensure_future(_bound_fetch(...))) tasks.append(_bound_fetch(sem,req_dict, client, process_output, verify_tls, redirects, usrdata)) await asyncio.gather(*tasks)
async def start(self): """ Start and print info status """ while True: proxy = PROXY.pop() connector = ProxyConnector( proxy_type=CONFIG["proxy_type"], host=proxy.split(':')[0], port=int(proxy.split(':')[1]), rdns=True, ssl=False ) video_payload = await self.request_increment_view_count(connector) if video_payload: print( f"Views counts: {video_payload.video_info['info'][10]}. Time from start: " f"{int(time.time() - self.start_time)}") await asyncio.sleep(random.randrange(1)) PROXY.append(proxy)
async def sendWithProxy(self, proxy, requestKwargs, **kwargs): ''' Sending request ''' async with aiohttp.ClientSession( connector=ProxyConnector.from_url(proxy.formated), **kwargs, ) as session: if self.TYPE == "POST": async with session.post(self.URL, ssl=False, **requestKwargs) as response: return await response.text() elif self.TYPE == "GET": async with session.get(self.URL, ssl=False, **requestKwargs) as response: return await response.text() elif self.TYPE == "HEAD": async with session.head(self.URL, ssl=False, **requestKwargs) as response: return await response.text() else: raise UnsupportedType(f"Unknown request type: {self.TYPE}")
async def _create_connector(self) -> None: connector = ProxyConnector.from_url(self.proxy.url) self._session = aiohttp.ClientSession(connector=connector)
async def check_proxy(self, proxy: str, dst: str, web_key: str) -> ValidateResult: """ 校验代理的可用性 :param proxy: 待校验的代理 :param dst: 目标站点地址 :param web_key: 目标站点 :return: 校验结果 """ result = ValidateResult(proxy=proxy, delay=-1, web_key=web_key, dst=dst, useful=1) time_start = time.time() try: # 启用代理 connector = ProxyConnector(verify_ssl=False).from_url(proxy) requests.urllib3.disable_warnings() # 异步http请求 async with ClientSession(connector=connector, timeout=self._timeout) as session: params = { "url": dst, "verify_ssl": False, "timeout": self._timeout, "headers": { "User-Agent": self._ua.random() } } # verify_ssl = False if "https" in proxy.split(":"): params["verify_ssl"] = False # 异步http请求 async with session.get(**params) as response: proxy_validator.info( "wait proxy {} for {} response".format(proxy, dst)) await response.text() await session.close() time_end = time.time() delay = time_end - time_start proxy_validator.info( "check proxy {} for {} success cost {} s".format( proxy, dst, delay)) result.delay = delay result.available = 1 # 请求超时就认为代理不可用 if delay > PROXY_REQUEST_DELAY: result.available = 0 return result except (BaseException, asyncio.TimeoutError, ClientError, ClientHttpProxyError, ClientSSLError) as e: err_msg = e if isinstance(e, asyncio.TimeoutError) or isinstance( e, ClientHttpProxyError): err_msg = "Http request timeout" if not isinstance(e, ClientSSLError) or not isinstance( e, ssl.SSLError): result.available = 0 # 重试 if self._retry <= VAILDATORS_RETRY: # 重试次数小于阈值就再试一次 self._retry = self._retry + 1 result = await self.check_proxy(proxy, dst, web_key) return result time_end = time.time() proxy_validator.error("check proxy {} {} times fail for {} " "and cost {} s".format( proxy, self._retry, dst, time_end - time_start)) proxy_validator.error("check proxy {} for {} " "error:{} type {}".format( proxy, dst, err_msg, type(e))) self._retry = 0 result.delay = time_end - time_start return result
async def test_socks5_connector_ipv6(): connector = ProxyConnector.from_url(SOCKS5_IPV6_URL, family=socket.AF_INET6) async with aiohttp.ClientSession(connector=connector) as session: async with session.get(HTTP_TEST_URL) as resp: assert resp.status == 200
def proxy(self, url: str): self.proxy_url = url self.connector = ProxyConnector.from_url(url)
async def send_request(url: str, method: Optional[str] = "get", text_type: Optional[str] = "json", encoding: Optional[str] = "utf-8", max_cor: Optional[int] = 200, **kwargs) -> str: """异步发送请求 `method`: 请求的方法 `url`: 请求的url `text_type`: 返回的响应文本的格式,有文本,二进制以及json `encoding`: 文本的编码 `max_cor`: 最大并发数 """ proxy = kwargs.get("proxy") try: connector = ProxyConnector.from_url( proxy) # 使用ProxyConnector来让aiohttp支持https代理 except (ValueError, TypeError): connector = None # 如果代理有问题则设置connector为None sem = asyncio.Semaphore(max_cor) # 设置最大并发数为200 headers = json.loads( get_scrapy_settings("DEFAULT_REQUEST_HEADERS").replace(r"'", '"')) if method == "get": # get 请求 try: async with sem: async with ClientSession(connector=connector) as session: async with session.get(url, headers=headers, timeout=5, verify_ssl=False, allow_redirects=False) \ as response: if text_type == "json": return await response.json() # json elif text_type == "text": return await response.text(encoding=encoding) # 文本 else: return await response.read() # 二进制 except (ac.ServerConnectionError, asyncio.TimeoutError, ac.ClientProxyConnectionError, ac.ClientOSError, ac.ContentTypeError): if text_type == "json": return {"origin": "false"} elif text_type == "text": return "false" else: return b"false" else: # post请求 data = kwargs.get("data") try: async with ClientSession(connector=connector) as session: async with session.post(url, headers=headers, data=data, timeout=5, allow_redirects=False) \ as response: if text_type == "json": return await response.json() # json elif text_type == "text": return await response.text(encoding=encoding) # 文本 else: return await response.read() # 二进制 except (ac.ServerConnectionError, asyncio.TimeoutError, ac.ClientProxyConnectionError): if text_type == "json": return {"origin": "false"} elif text_type == "text": return "false" else: return b"false"