async def test_proxied_http_calls(): loop = asyncio.get_running_loop() port = random.randint(40000, 60000) config = ServerConfig( username="******", password="******", host="127.0.0.1", port=port, ) task = loop.create_task(run_main(config, False)) await asyncio.sleep(0.5) connector = ProxyConnector.from_url(f"socks5://foo:[email protected]:{port}") async with aiohttp.ClientSession(connector=connector) as session: resp = await session.get("http://asdf.com") assert resp.ok task.cancel() port = random.randint(40000, 60000) config = ServerConfig( username="******", password="******", host="127.0.0.1", port=port, ) task = loop.create_task(run_main(config, True)) await asyncio.sleep(0.5) connector = ProxyConnector.from_url(f"socks5://foo:[email protected]:{port}") async with aiohttp.ClientSession(connector=connector) as session: resp = await session.get("http://asdf.com") assert resp.ok task.cancel()
def http_client( self, client_type: ClientType = None, client_id: int = 0, client_options: Mapping[str, Any] = None, container=None, ) -> aiohttp.ClientSession: if client_type in self._http_clients and client_id in self._http_clients[ client_type]: return self._http_clients[client_type][client_id] options = self._http_client_options.copy() if client_type in (self.ClientType.PROXIED, self.ClientType.NONPROXIED): options["cookie_jar"] = self.http_clients_load_cookies( client_type, client_id) options["timeout"] = aiohttp.ClientTimeout(total=60) if client_type in (self.ClientType.PROXIED, ): from scraper.control import Container assert isinstance(container, Container) options["connector"] = ProxyConnector.from_url( container.tor.proxy_url) options = utils.update_recursive(options, client_options or {}) self._http_clients[client_type][ client_id] = session = LoggingClientSession(**options) log.debug("({cls}) created new %s HTTP client session %s, options=%s", client_type.value, session, options, extra=self._fmtargs) return session
async def test_socks5_proxy_with_proxy_connect_timeout(): connector = ProxyConnector.from_url(SOCKS5_IPV4_URL) timeout = aiohttp.ClientTimeout(total=32, sock_connect=0.001) with pytest.raises(ProxyTimeoutError): async with aiohttp.ClientSession(connector=connector) as session: async with session.get(HTTP_TEST_URL, timeout=timeout) as resp: await resp.text()
async def test_socks4_proxy(url, rdns): connector = ProxyConnector.from_url( SOCKS4_URL, rdns=rdns, ) res = await fetch(connector=connector, url=url) assert res.status == 200
async def _socks_connect(self, node: Node): logger = logging.getLogger() score = self._init_score() response_times = 0 url = random.choice(self.url_list) host = node.settings['servers'][0]['address'] port = node.settings['servers'][0]['port'] for i in range(self.times): await asyncio.sleep(self.sleep_seconds) try: begin_time = time.time() connector = ProxyConnector.from_url(f'socks5://{host}:{port}') async with aiohttp.ClientSession( connector=connector) as session: async with session.get(url=url, timeout=5) as resp: body = await resp.read() length = len(body or '') response_times += 1 finish_time = time.time() current_ping = int((finish_time - begin_time) * 1000) logger.info( f'{node} times: {i + 1} score:{current_ping}{self.unit} response: {length} bytes' ) score = self._score(current_ping, length, score) except Exception as e: logger.error(f'{node} available test failed: {e}') if self.times - i - 1 + response_times < self.response_times: break return score, response_times
async def get(self, check_size: bool = False, size="sample") -> bytes: try: headers = {} connector: ProxyConnector = ProxyConnector() proxy = ymConfig.getConfig('setting').get('proxy') if proxy: connector = ProxyConnector.from_url(proxy) if size == "sample": self.url = self.sample_url elif size == "file": self.url = self.file_url elif size == "jpeg": self.url = self.jpeg_url async with aiohttp.request('GET', self.url, headers=headers, connector=connector, timeout=aiohttp.ClientTimeout(600)) as resp: img_bytes: bytes = await resp.read() if connector: await connector.close() if check_size: pass '''img: PIL.Image.Image = PIL.Image.open(BytesIO(initial_bytes=img_bytes)) if img.size != (self.width, self.height): raise ValueError(f'Image Size Error: expected {(self.width, self.height)} but got {img.size}')''' except (asyncio.TimeoutError, ValueError) as e: raise e except PIL.UnidentifiedImageError: raise ValueError(f'Image load fail {str(img_bytes[:20])}...') return img_bytes
async def check_proxy(self, proxy, proxyType, i): print(i) connector = ProxyConnector.from_url("{}://{}".format(proxyType, proxy)) async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: responseJSON = "" try: async with session.post('https://discord.com/api/v8/register', json=payload) as response: while True: status_code = response.status try: responseJSONpart = await response.read() except asyncio.exceptions.IncompleteReadError as e: responseJSON = responseJSON + e.partial.decode( 'utf-8') continue else: responseJSON = responseJSON + responseJSONpart.decode( 'utf-8') print(response.status) break print(response.status) break except Exception as e: result = get_full_class_name(e) if result == "proxy_socks._errors.ProxyConnectionError": print("Failed proxy: {}".format(proxy)) elif result == "asyncio.exceptions.TimeoutError": print("timed out. #{}".format(i))
def _session(self) -> ClientSession: return ClientSession(connector=ProxyConnector.from_url( self.http_proxy), raise_for_status=True, timeout=ClientTimeout(total=60, connect=30, sock_read=10))
def __init__(self, limit=30, timeout=10, env=False, internal=False, proxy=None, bypass=False): """ When 'env' is True and 'proxy' is None, possible proxies will be obtained automatically (wrong proxy may be obtained). When 'proxy' is not None, it will force the proxy to be used and 'env' will have no effect. proxy <str> is used for a single proxy with a url: 'socks5://user:[email protected]:1080' If you want to use proxy chaining, read https://github.com/romis2012/aiohttp-socks. """ kwargs = {'limit_per_host': limit} if bypass: import ssl from .bypass_sni import ByPassResolver ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_ctx.check_hostname = False ssl_ctx.verify_mode = ssl.CERT_NONE kwargs.update({'ssl': ssl_ctx, 'resolver': ByPassResolver()}) if proxy: try: from aiohttp_socks import ProxyConnector self.conn = ProxyConnector.from_url(proxy, **kwargs) _flag = False except ModuleNotFoundError as e: if proxy.startswith('socks'): raise e else: self.conn = aiohttp.TCPConnector(**kwargs) _flag = True else: self.conn = aiohttp.TCPConnector(**kwargs) self.internal = internal self.client = aiohttp.ClientSession( connector=self.conn, timeout=aiohttp.ClientTimeout(total=timeout), trust_env=env, ) if proxy and _flag: from functools import partial self.client.head = partial(self.client.head, proxy=proxy) self.client.get = partial(self.client.get, proxy=proxy) self.client.post = partial(self.client.post, proxy=proxy)
def __init__(self, limit=30, timeout=10, env=False, internal=False, proxy=None): """ When 'env' is True and 'proxy' is None, possible proxies will be obtained automatically (wrong proxy may be obtained). When 'proxy' is not None, it will force the proxy to be used and 'env' will have no effect. proxy <str> is used for a single proxy with a url: 'socks5://user:[email protected]:1080' If you want to use proxy chaining, read https://github.com/romis2012/aiohttp-socks. """ if proxy: self.conn = ProxyConnector.from_url(proxy, limit_per_host=limit) else: self.conn = aiohttp.TCPConnector(limit_per_host=limit) self.internal = internal self.client = aiohttp.ClientSession( connector=self.conn, timeout=aiohttp.ClientTimeout(total=timeout), trust_env=env, )
async def get(self, offest=0, limit=40) -> 'list': await self.__build() # params = {} connector: ProxyConnector = ProxyConnector() proxy = ymConfig.getConfig('setting').get('proxy') if proxy: connector = ProxyConnector.from_url(proxy) data: list = [] for k, t in self.rips.items(): async with aiohttp.request('GET', k, connector=connector) as response: raw = await response.read() logger.debug(k + f"\n[{t[0]}, {t[1]}]") data = data + json.loads(raw)['posts'][t[0]:t[1]] result: [AnipicData] = [] for one in data: anipic: AnipicData = AnipicData() one['file_url'] = f"https://images.anime-pictures.net/{one['md5'][0:3]}/{one['md5'] + one['ext']}" anipic.__dict__.update(one) async with aiohttp.request( 'GET', f'https://anime-pictures.net/pictures/view_post/{anipic.id}?type=json', connector=connector, timeout=aiohttp.ClientTimeout(20)) as oneInfo: meta = json.loads(await oneInfo.read()) anipic.tags = meta['tags'] result.append(anipic) if connector: await connector.close() return result
def __init__(self, webhook: str) -> None: colorama.init() self.total_requests = 0 self.failed_requests = 0 self.successful_requests = 0 self.major_errors = 0 self.claimed_codes = 0 proxy_path = path.join(path.dirname(path.realpath(__file__)), "socks5_proxies.txt") with open(proxy_path, "r") as f_: proxies = f_.read().strip().split() self.sessions: List[aiohttp.ClientSession] = [ aiohttp.ClientSession( connector=ProxyConnector.from_url("socks5://" + proxy), timeout=ClientTimeout(total=120)) for proxy in proxies ] self.sessions_len = len(self.sessions) - 1 self.discord_session = aiohttp.ClientSession() self.discord = Webhook.from_url(webhook, adapter=AsyncWebhookAdapter( self.discord_session)) print(Fore.GREEN + "Proxies loaded!")
def useProxy(self): proxy = ymConfig.getConfig('setting').get('proxy') if proxy: self.connector = ProxyConnector.from_url(proxy) return self else: raise SearcherProxyNotFoundException
async def test_socks4_proxy(url, rdns): connector = ProxyConnector.from_url( SOCKS4_URL, rdns=rdns, ) async with aiohttp.ClientSession(connector=connector) as session: async with session.get(url) as resp: assert resp.status == 200
async def create_client_session(self): proxy = self.get_proxy() connector = ProxyConnector.from_url(proxy) if proxy else None final_cookies = await self.get_cookies() client_session = ClientSession( connector=connector, cookies=final_cookies, read_timeout=None ) return client_session
def create_client_session(self): proxy = self.get_proxy() connector = ProxyConnector.from_url(proxy) if proxy else None final_cookies = self.auth.auth_details.cookie.format() client_session = ClientSession( connector=connector, cookies=final_cookies, read_timeout=None ) return client_session
def create_client_session(self): proxy = self.get_proxy() connector = ProxyConnector.from_url(proxy) if proxy else None final_cookies = self.auth.cookies if hasattr(self.auth, "cookies") else {} client_session = ClientSession( connector=connector, cookies=final_cookies, read_timeout=None ) return client_session
async def fetch_page(url): # Set up an AIO-compatible wrapper around our SOCKS proxy. connector = ProxyConnector.from_url('socks5://localhost:9050') async with aiohttp.ClientSession(connector=connector) as session: async with session.get(url) as response: # "Block" until the response has been fully fetched. await response.text() return response.ok
async def fetch(self, url: str, args: CmdArgs) -> bytes: if args.proxy != '': connector = ProxyConnector.from_url(args.proxy, ssl=False) else: connector = TCPConnector(ssl=False) async with ClientSession( connector=connector) as client: # type: ClientSession async with client.get( url, headers=args.headers) as resp: # type: ClientResponse return await resp.content.read()
def __init__(self, *args, **kwargs): proxy = kwargs.get('proxy') cookie_jar = kwargs.get('cookie_jar') self.logger = kwargs.get('logger', Mock()) connector = ProxyConnector.from_url(proxy) connector.verify_ssl = False self.session = aiohttp.ClientSession(connector=connector, trust_env=True, cookie_jar=cookie_jar)
async def download(book_id: int, file_type: str, type_: int = 0, retry: int = 3): # type 0: flibusta.is # type 1: flibustahezeous3.onion while retry > 0: url: str = "" connector = None if type_ == 0: basic_url = "http://flibusta.is" elif type_ == 1: basic_url = "http://flibustahezeous3.onion" else: raise Exception() if type_ == 1: connector = ProxyConnector.from_url(Config.TOR_PROXIES) if file_type in ("fb2", "epub", "mobi"): url = basic_url + f"/b/{book_id}/{file_type}" else: url = basic_url + f"/b/{book_id}/download" try: async with aiohttp.ClientSession(timeout=ClientTimeout( total=10 * 60, sock_connect=2 * 60), connector=connector) as session: async with session.get( url, allow_redirects=True, max_redirects=50 ) as resp: # type: aiohttp.ClientResponse if resp.headers.get( "Content-Type" ) and "text/html" in resp.headers.get( "Content-Type") or resp.status != 200: raise NotBookException("NotBookException") if resp.headers.get("Content-Type") == "application/zip": return await asyncio.get_event_loop().run_in_executor( process_pool_executor, unzip, await resp.read(), file_type) return await resp.content.read() except (aiohttp.ServerDisconnectedError, aiohttp.ClientOSError, aiohttp.ClientPayloadError, aiohttp.ClientConnectorError, zipfile.BadZipFile, FileNotFoundError, ProxyTimeoutError, NotBookException) as e: print(e) type_ += 1 if type_ >= 2: type_ = 0 retry -= 1 return await manual_convert(book_id, file_type)
async def fetch(self, url: str) -> str: if self.args.proxy != '': connector = ProxyConnector.from_url(self.args.proxy, ssl=False) else: connector = TCPConnector(ssl=False) async with ClientSession( connector=connector) as client: # type: ClientSession async with client.get( url, headers=self.args.headers) as resp: # type: ClientResponse return str(resp.url), self.load_raw2text(await resp.read())
async def check_socks5_connection(): try: async with aiohttp.ClientSession(connector=ProxyConnector.from_url( 'socks5:/138.124.187.29:1080')) as session: async with session.get( 'http://icanhazip.com/', headers={'User-Agent': str(UserAgent().random)}) as response: pass except ProxyConnectionError: return False return True
async def get_session(timeout: Optional[float] = None): if not timeout: timeout = 12 proxy_connector = ProxyConnector.from_url(PROXY) if PROXY else None session = RetryClient(retry_options=RETRY_OPTION, connector=proxy_connector, timeout=aiohttp.ClientTimeout(total=timeout), headers={'User-Agent': env.USER_AGENT}) return session
async def check_url(self, url): try: connector = ProxyConnector.from_url(self.get_str()) async with aiohttp.ClientSession(connector=connector) as _sess: response = await _sess.get(url, timeout=1) if response.status == 200: return True except Exception as e: # print("ER:", type(e), e) pass return False
async def run(links) -> list: proxies = self.proxies proxy = self.proxies[randint(0, len(proxies) - 1)] if proxies else "" connector = ProxyConnector.from_url(proxy) if proxy else None async with ClientSession( connector=connector, cookies=self.auth.cookies, read_timeout=None ) as session: for link in links: task = asyncio.ensure_future(self.json_request(link, session)) tasks.append(task) responses = list(await asyncio.gather(*tasks)) return responses
def __init__(self, *args, **kwargs): proxy = kwargs.get('proxy') cookie_jar = kwargs.get('cookie_jar') self.logger = kwargs.get('logger', Mock()) # moved here to speed up the launch of Maigret from aiohttp_socks import ProxyConnector connector = ProxyConnector.from_url(proxy) connector.verify_ssl = False self.session = ClientSession(connector=connector, trust_env=True, cookie_jar=cookie_jar)
async def _request( cls, method: HTTPRequestMethod, url: str, *, headers: Optional[Dict[str, str]] = None, body: Optional[Dict[str, str]] = None, body_encoding: BodyFormatter = BodyFormatter.URL_ENCODE, cookies: Dict[str, str] = None, verify: bool = True, allow_redirects: bool = False, timeout: Optional[float] = None, proxies: Optional[str] = None, ) -> Response: timeout = timeout or DEFAULT_REQUEST_TIMEOUT if proxies is not None: from aiohttp_socks import ProxyConnector proxies = ProxyConnector.from_url(proxies) async with aiohttp.ClientSession(connector=proxies) as session: try: async with session.request( method.value, url, headers=headers, cookies=cls._to_cookie_obj(cookies), verify_ssl=verify, allow_redirects=allow_redirects, timeout=aiohttp.ClientTimeout(total=timeout), **{body_encoding.value: body}, ) as response: cookies = cls._to_cookie_dict(response.cookies) headers = dict(response.headers) raw = await response.read() try: text = await response.text() except UnicodeDecodeError: text = "" return Response( response.status, url, response.reason, headers, raw, text, cookies, ) except _TimeoutError as e: raise RequestTimeoutError( f"요청시간이 경과하였습니다. -> {timeout}초") from e
async def url_proceed(self, msg): if len(msg.reply_markup.rows) <= 0: print("Failed find msg:", msg) return elif len(msg.reply_markup.rows[0].buttons) <= 0: print("Failed find msg:", msg) return url = msg.reply_markup.rows[0].buttons[0].url print("Найдена ссылка:", url) connector = ProxyConnector.from_url(self.proxy.get_str()) async with aiohttp.ClientSession(connector=connector) as session: text = '' try: resp = await session.get(url) if resp.status == 200: text = await resp.text() except Exception: pass await asyncio.sleep(2) await self.is_wait_msg(await self.get_last_msg()) if 'captcha' in text: await self.skip_task(msg) return match = re.search(REG_CODE_TOKEN, text) if not match: return xdata = { "code": match.group('code'), "token": match.group('token') } print("Send xdata:", xdata) try: rsp = await session.post('https://dogeclick.com/reward', data={ "code": match.group('code'), "token": match.group('token') }) print(await rsp.text()) except Exception as e: print("SEND EXCEPTION:", e, type(e))
def create_tor_connector(settings): url = 'socks5://@{host}:{port}'.format(host=settings['host'], port=settings['port']) if 'username' in settings or 'password' in settings: if 'username' not in settings or 'password' not in settings: raise Exception('Username and password required.') url = 'socks5://{username}:{password}@{host}:{port}'.format( username=settings['username'], password=settings['password'], host=settings['host'], port=settings['port']) return ProxyConnector.from_url(url)