async def test_proxied_http_calls(): loop = asyncio.get_running_loop() port = random.randint(40000, 60000) config = ServerConfig( username="******", password="******", host="127.0.0.1", port=port, ) task = loop.create_task(run_main(config, False)) await asyncio.sleep(0.5) connector = ProxyConnector.from_url(f"socks5://foo:[email protected]:{port}") async with aiohttp.ClientSession(connector=connector) as session: resp = await session.get("http://asdf.com") assert resp.ok task.cancel() port = random.randint(40000, 60000) config = ServerConfig( username="******", password="******", host="127.0.0.1", port=port, ) task = loop.create_task(run_main(config, True)) await asyncio.sleep(0.5) connector = ProxyConnector.from_url(f"socks5://foo:[email protected]:{port}") async with aiohttp.ClientSession(connector=connector) as session: resp = await session.get("http://asdf.com") assert resp.ok task.cancel()
async def get(self, check_size: bool = False, size="sample") -> bytes: try: headers = {} connector: ProxyConnector = ProxyConnector() proxy = ymConfig.getConfig('setting').get('proxy') if proxy: connector = ProxyConnector.from_url(proxy) if size == "sample": self.url = self.sample_url elif size == "file": self.url = self.file_url elif size == "jpeg": self.url = self.jpeg_url async with aiohttp.request( 'GET', self.url, headers=headers, connector=connector, timeout=aiohttp.ClientTimeout(600)) as resp: img_bytes: bytes = await resp.read() if connector: await connector.close() if check_size: pass '''img: PIL.Image.Image = PIL.Image.open(BytesIO(initial_bytes=img_bytes)) if img.size != (self.width, self.height): raise ValueError(f'Image Size Error: expected {(self.width, self.height)} but got {img.size}')''' except (asyncio.TimeoutError, ValueError) as e: raise e except PIL.UnidentifiedImageError: raise ValueError(f'Image load fail {str(img_bytes[:20])}...') return img_bytes
def get_connector(config): logme.debug(__name__ + ':get_connector') _connector = None global proxyauth if config.Proxy_Username: if config.Proxy_Password: proxyauth = aiohttp.BasicAuth(config.Proxy_Username, config.Proxy_Password) else: print("Error: Proxy username requires a password.") else: proxyauth = None if config.Proxy_host: if config.Proxy_host.lower() == "tor": _connector = ProxyConnector(host='127.0.0.1', port=9050, rdns=True) elif config.Proxy_port and config.Proxy_type: if config.Proxy_type.lower() == "socks5": _type = ProxyType.SOCKS5 elif config.Proxy_type.lower() == "socks4": _type = ProxyType.SOCKS4 elif config.Proxy_type.lower() == "http": global httpproxy httpproxy = "http://" + config.Proxy_host + ":" + str( config.Proxy_port) return _connector else: logme.critical("get_connector:proxy-type-error") print( "Error: Proxy types allowed are: http, socks5 and socks4. No https." ) sys.exit(1) _connector = ProxyConnector(proxy_type=_type, host=config.Proxy_host, port=config.Proxy_port, rdns=True) else: logme.critical(__name__ + ':get_connector:proxy-port-type-error') print( "Error: Please specify --proxy-host, --proxy-port, and --proxy-type" ) sys.exit(1) else: if config.Proxy_port or config.Proxy_type: logme.critical(__name__ + ':get_connector:proxy-host-arg-error') print( "Error: Please specify --proxy-host, --proxy-port, and --proxy-type" ) sys.exit(1) return _connector
def __init__(self, webhook: str) -> None: colorama.init() self.total_requests = 0 self.failed_requests = 0 self.successful_requests = 0 self.major_errors = 0 self.claimed_codes = 0 proxy_path = path.join(path.dirname(path.realpath(__file__)), "socks5_proxies.txt") with open(proxy_path, "r") as f_: proxies = f_.read().strip().split() self.sessions: List[aiohttp.ClientSession] = [ aiohttp.ClientSession( connector=ProxyConnector.from_url("socks5://" + proxy), timeout=ClientTimeout(total=120)) for proxy in proxies ] self.sessions_len = len(self.sessions) - 1 self.discord_session = aiohttp.ClientSession() self.discord = Webhook.from_url(webhook, adapter=AsyncWebhookAdapter( self.discord_session)) print(Fore.GREEN + "Proxies loaded!")
async def _socks_connect(self, node: Node): logger = logging.getLogger() score = self._init_score() response_times = 0 url = random.choice(self.url_list) host = node.settings['servers'][0]['address'] port = node.settings['servers'][0]['port'] for i in range(self.times): await asyncio.sleep(self.sleep_seconds) try: begin_time = time.time() connector = ProxyConnector.from_url(f'socks5://{host}:{port}') async with aiohttp.ClientSession( connector=connector) as session: async with session.get(url=url, timeout=5) as resp: body = await resp.read() length = len(body or '') response_times += 1 finish_time = time.time() current_ping = int((finish_time - begin_time) * 1000) logger.info( f'{node} times: {i + 1} score:{current_ping}{self.unit} response: {length} bytes' ) score = self._score(current_ping, length, score) except Exception as e: logger.error(f'{node} available test failed: {e}') if self.times - i - 1 + response_times < self.response_times: break return score, response_times
def __init__(self, limit=30, timeout=10, env=False, internal=False, proxy=None, bypass=False): """ When 'env' is True and 'proxy' is None, possible proxies will be obtained automatically (wrong proxy may be obtained). When 'proxy' is not None, it will force the proxy to be used and 'env' will have no effect. proxy <str> is used for a single proxy with a url: 'socks5://user:[email protected]:1080' If you want to use proxy chaining, read https://github.com/romis2012/aiohttp-socks. """ kwargs = {'limit_per_host': limit} if bypass: import ssl from .bypass_sni import ByPassResolver ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_ctx.check_hostname = False ssl_ctx.verify_mode = ssl.CERT_NONE kwargs.update({'ssl': ssl_ctx, 'resolver': ByPassResolver()}) if proxy: try: from aiohttp_socks import ProxyConnector self.conn = ProxyConnector.from_url(proxy, **kwargs) _flag = False except ModuleNotFoundError as e: if proxy.startswith('socks'): raise e else: self.conn = aiohttp.TCPConnector(**kwargs) _flag = True else: self.conn = aiohttp.TCPConnector(**kwargs) self.internal = internal self.client = aiohttp.ClientSession( connector=self.conn, timeout=aiohttp.ClientTimeout(total=timeout), trust_env=env, ) if proxy and _flag: from functools import partial self.client.head = partial(self.client.head, proxy=proxy) self.client.get = partial(self.client.get, proxy=proxy) self.client.post = partial(self.client.post, proxy=proxy)
async def test_socks4_proxy(url, rdns): connector = ProxyConnector.from_url( SOCKS4_URL, rdns=rdns, ) res = await fetch(connector=connector, url=url) assert res.status == 200
def make_aiohttp_session(proxy: Optional[dict], headers=None, timeout=None): if headers is None: headers = {'User-Agent': 'Electrum'} if timeout is None: # The default timeout is high intentionally. # DNS on some systems can be really slow, see e.g. #5337 timeout = aiohttp.ClientTimeout(total=45) elif isinstance(timeout, (int, float)): timeout = aiohttp.ClientTimeout(total=timeout) ssl_context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=ca_path) if proxy: connector = ProxyConnector( proxy_type=ProxyType.SOCKS5 if proxy['mode'] == 'socks5' else ProxyType.SOCKS4, host=proxy['host'], port=int(proxy['port']), username=proxy.get('user', None), password=proxy.get('password', None), rdns=True, ssl=ssl_context, ) else: connector = aiohttp.TCPConnector(ssl=ssl_context) return aiohttp.ClientSession(headers=headers, timeout=timeout, connector=connector)
def http_client( self, client_type: ClientType = None, client_id: int = 0, client_options: Mapping[str, Any] = None, container=None, ) -> aiohttp.ClientSession: if client_type in self._http_clients and client_id in self._http_clients[ client_type]: return self._http_clients[client_type][client_id] options = self._http_client_options.copy() if client_type in (self.ClientType.PROXIED, self.ClientType.NONPROXIED): options["cookie_jar"] = self.http_clients_load_cookies( client_type, client_id) options["timeout"] = aiohttp.ClientTimeout(total=60) if client_type in (self.ClientType.PROXIED, ): from scraper.control import Container assert isinstance(container, Container) options["connector"] = ProxyConnector.from_url( container.tor.proxy_url) options = utils.update_recursive(options, client_options or {}) self._http_clients[client_type][ client_id] = session = LoggingClientSession(**options) log.debug("({cls}) created new %s HTTP client session %s, options=%s", client_type.value, session, options, extra=self._fmtargs) return session
async def test_socks5_proxy_with_proxy_connect_timeout(): connector = ProxyConnector.from_url(SOCKS5_IPV4_URL) timeout = aiohttp.ClientTimeout(total=32, sock_connect=0.001) with pytest.raises(ProxyTimeoutError): async with aiohttp.ClientSession(connector=connector) as session: async with session.get(HTTP_TEST_URL, timeout=timeout) as resp: await resp.text()
def __init__(self, limit=30, timeout=10, env=False, internal=False, proxy=None): """ When 'env' is True and 'proxy' is None, possible proxies will be obtained automatically (wrong proxy may be obtained). When 'proxy' is not None, it will force the proxy to be used and 'env' will have no effect. proxy <str> is used for a single proxy with a url: 'socks5://user:[email protected]:1080' If you want to use proxy chaining, read https://github.com/romis2012/aiohttp-socks. """ if proxy: self.conn = ProxyConnector.from_url(proxy, limit_per_host=limit) else: self.conn = aiohttp.TCPConnector(limit_per_host=limit) self.internal = internal self.client = aiohttp.ClientSession( connector=self.conn, timeout=aiohttp.ClientTimeout(total=timeout), trust_env=env, )
def useProxy(self): proxy = ymConfig.getConfig('setting').get('proxy') if proxy: self.connector = ProxyConnector.from_url(proxy) return self else: raise SearcherProxyNotFoundException
async def check_proxy(self, proxy, proxyType, i): print(i) connector = ProxyConnector.from_url("{}://{}".format(proxyType, proxy)) async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: responseJSON = "" try: async with session.post('https://discord.com/api/v8/register', json=payload) as response: while True: status_code = response.status try: responseJSONpart = await response.read() except asyncio.exceptions.IncompleteReadError as e: responseJSON = responseJSON + e.partial.decode( 'utf-8') continue else: responseJSON = responseJSON + responseJSONpart.decode( 'utf-8') print(response.status) break print(response.status) break except Exception as e: result = get_full_class_name(e) if result == "proxy_socks._errors.ProxyConnectionError": print("Failed proxy: {}".format(proxy)) elif result == "asyncio.exceptions.TimeoutError": print("timed out. #{}".format(i))
def _session(self) -> ClientSession: return ClientSession(connector=ProxyConnector.from_url( self.http_proxy), raise_for_status=True, timeout=ClientTimeout(total=60, connect=30, sock_read=10))
async def test_socks4_proxy(url, rdns): connector = ProxyConnector.from_url( SOCKS4_URL, rdns=rdns, ) async with aiohttp.ClientSession(connector=connector) as session: async with session.get(url) as resp: assert resp.status == 200
async def replace_session(self): await self.session.close() proxy_connector = ProxyConnector(**env.TELEGRAPH_PROXY_DICT, loop=self.loop) \ if env.TELEGRAPH_PROXY_DICT else None self.session = RetryClient(connector=proxy_connector, timeout=ClientTimeout(total=10), loop=self.loop, json_serialize=self._json_serialize)
async def create_client_session(self): proxy = self.get_proxy() connector = ProxyConnector.from_url(proxy) if proxy else None final_cookies = await self.get_cookies() client_session = ClientSession( connector=connector, cookies=final_cookies, read_timeout=None ) return client_session
def create_client_session(self): proxy = self.get_proxy() connector = ProxyConnector.from_url(proxy) if proxy else None final_cookies = self.auth.cookies if hasattr(self.auth, "cookies") else {} client_session = ClientSession( connector=connector, cookies=final_cookies, read_timeout=None ) return client_session
def create_client_session(self): proxy = self.get_proxy() connector = ProxyConnector.from_url(proxy) if proxy else None final_cookies = self.auth.auth_details.cookie.format() client_session = ClientSession( connector=connector, cookies=final_cookies, read_timeout=None ) return client_session
async def fetch_page(url): # Set up an AIO-compatible wrapper around our SOCKS proxy. connector = ProxyConnector.from_url('socks5://localhost:9050') async with aiohttp.ClientSession(connector=connector) as session: async with session.get(url) as response: # "Block" until the response has been fully fetched. await response.text() return response.ok
async def test_socks5_proxy_with_invalid_proxy_port(unused_tcp_port): connector = ProxyConnector( proxy_type=ProxyType.SOCKS5, host=PROXY_HOST_IPV4, port=unused_tcp_port, username=LOGIN, password=PASSWORD, ) with pytest.raises(ProxyConnectionError): await fetch(connector=connector, url=TEST_URL_IPV4)
async def test_socks5_proxy_with_invalid_credentials(): connector = ProxyConnector( proxy_type=ProxyType.SOCKS5, host=PROXY_HOST_IPV4, port=SOCKS5_PROXY_PORT, username=LOGIN, password=PASSWORD + 'aaa', ) with pytest.raises(ProxyError): await fetch(connector=connector, url=TEST_URL_IPV4)
async def get(self) -> 'list': # params = {} self.__build() connector: ProxyConnector = ProxyConnector() proxy = ymConfig.getConfig('setting').get('proxy') if proxy: connector = ProxyConnector.from_url(proxy) data: list = [] for k, t in self.rips.items(): async with aiohttp.request('GET', k, connector=connector) as response: raw = await response.read() logger.debug(k + f"\n[{t[0]}, {t[1]}]") data = data + json.loads(raw)[t[0]:t[1]] result: list = self._formatData(data) await connector.close() self.hasParm = 0 self.hasAction = '' self.rips.clear() return result
def __init__(self, *args, **kwargs): proxy = kwargs.get('proxy') cookie_jar = kwargs.get('cookie_jar') self.logger = kwargs.get('logger', Mock()) connector = ProxyConnector.from_url(proxy) connector.verify_ssl = False self.session = aiohttp.ClientSession(connector=connector, trust_env=True, cookie_jar=cookie_jar)
def get_connector(config): logme.debug(__name__ + ":get_connector") _connector = None if config.Proxy_host: if config.Proxy_host.lower() == "tor": _connector = ProxyConnector(host="127.0.0.1", port=9050, rdns=True) elif config.Proxy_port and config.Proxy_type: if config.Proxy_type.lower() == "socks5": _type = ProxyType.SOCKS5 elif config.Proxy_type.lower() == "socks4": _type = ProxyType.SOCKS4 elif config.Proxy_type.lower() == "http": global httpproxy httpproxy = "http://" + config.Proxy_host + ":" + str( config.Proxy_port) return _connector else: logme.critical("get_connector:proxy-type-error") print( "Error: Proxy types allowed are: http, socks5 and socks4. No https." ) sys.exit(1) _connector = ProxyConnector( proxy_type=_type, host=config.Proxy_host, port=config.Proxy_port, rdns=True, ) else: logme.critical(__name__ + ":get_connector:proxy-port-type-error") print( "Error: Please specify --proxy-host, --proxy-port, and --proxy-type" ) sys.exit(1) else: if config.Proxy_port or config.Proxy_type: logme.critical(__name__ + ":get_connector:proxy-host-arg-error") print( "Error: Please specify --proxy-host, --proxy-port, and --proxy-type" ) sys.exit(1) return _connector
async def test_socks5_proxy_with_timeout(): connector = ProxyConnector( proxy_type=ProxyType.SOCKS5, host=PROXY_HOST_IPV4, port=SOCKS5_PROXY_PORT, username=LOGIN, password=PASSWORD, ) with pytest.raises(asyncio.TimeoutError): await fetch(connector=connector, url=TEST_URL_IPV4_DELAY, timeout=1)
async def fetch(self, url: str, args: CmdArgs) -> bytes: if args.proxy != '': connector = ProxyConnector.from_url(args.proxy, ssl=False) else: connector = TCPConnector(ssl=False) async with ClientSession( connector=connector) as client: # type: ClientSession async with client.get( url, headers=args.headers) as resp: # type: ClientResponse return await resp.content.read()
async def download(book_id: int, file_type: str, type_: int = 0, retry: int = 3): # type 0: flibusta.is # type 1: flibustahezeous3.onion while retry > 0: url: str = "" connector = None if type_ == 0: basic_url = "http://flibusta.is" elif type_ == 1: basic_url = "http://flibustahezeous3.onion" else: raise Exception() if type_ == 1: connector = ProxyConnector.from_url(Config.TOR_PROXIES) if file_type in ("fb2", "epub", "mobi"): url = basic_url + f"/b/{book_id}/{file_type}" else: url = basic_url + f"/b/{book_id}/download" try: async with aiohttp.ClientSession(timeout=ClientTimeout( total=10 * 60, sock_connect=2 * 60), connector=connector) as session: async with session.get( url, allow_redirects=True, max_redirects=50 ) as resp: # type: aiohttp.ClientResponse if resp.headers.get( "Content-Type" ) and "text/html" in resp.headers.get( "Content-Type") or resp.status != 200: raise NotBookException("NotBookException") if resp.headers.get("Content-Type") == "application/zip": return await asyncio.get_event_loop().run_in_executor( process_pool_executor, unzip, await resp.read(), file_type) return await resp.content.read() except (aiohttp.ServerDisconnectedError, aiohttp.ClientOSError, aiohttp.ClientPayloadError, aiohttp.ClientConnectorError, zipfile.BadZipFile, FileNotFoundError, ProxyTimeoutError, NotBookException) as e: print(e) type_ += 1 if type_ >= 2: type_ = 0 retry -= 1 return await manual_convert(book_id, file_type)
async def fetch(self, url: str) -> str: if self.args.proxy != '': connector = ProxyConnector.from_url(self.args.proxy, ssl=False) else: connector = TCPConnector(ssl=False) async with ClientSession( connector=connector) as client: # type: ClientSession async with client.get( url, headers=self.args.headers) as resp: # type: ClientResponse return str(resp.url), self.load_raw2text(await resp.read())
async def test_socks5_proxy_with_invalid_proxy_port(unused_tcp_port): connector = ProxyConnector( proxy_type=ProxyType.SOCKS5, host=SOCKS5_IPV4_HOST, port=unused_tcp_port, username=LOGIN, password=PASSWORD, ) with pytest.raises(ProxyConnectionError): async with aiohttp.ClientSession(connector=connector) as session: async with session.get(HTTP_TEST_URL) as resp: await resp.text()