async def fetch(url, proxy=None): conn = aiohttp.ProxyConnector(proxy=proxy) headers = {'user-agent': get_user_agent()} with aiohttp.ClientSession(connector=conn) as session: with aiohttp.Timeout(TIMEOUT): async with session.get('http://python.org', headers) as resp: return resp.json()
def test_https_connect_http_proxy_error(self, ClientRequestMock): loop_mock = unittest.mock.Mock() proxy_req = ClientRequest('GET', 'http://proxy.example.com', loop=loop_mock) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', 'http://proxy.example.com') proxy_req.send = send_mock = unittest.mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = start_mock = unittest.mock.Mock() self._fake_coroutine( start_mock, unittest.mock.Mock(status=400, reason='bad request')) connector = aiohttp.ProxyConnector('http://proxy.example.com', loop=loop_mock) tr, proto = unittest.mock.Mock(), unittest.mock.Mock() tr.get_extra_info.return_value = None self._fake_coroutine(loop_mock.create_connection, (tr, proto)) req = ClientRequest('GET', 'https://www.python.org') with self.assertRaisesRegex(aiohttp.HttpProxyError, "400, message='bad request'"): self.loop.run_until_complete(connector._create_connection(req))
def test_https_connect_runtime_error(self, ClientRequestMock): loop_mock = unittest.mock.Mock() proxy_req = ClientRequest('GET', 'http://proxy.example.com', loop=loop_mock) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', 'http://proxy.example.com') proxy_resp._loop = loop_mock proxy_req.send = send_mock = unittest.mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = start_mock = unittest.mock.Mock() self._fake_coroutine(start_mock, unittest.mock.Mock(status=200)) connector = aiohttp.ProxyConnector( 'http://proxy.example.com', loop=loop_mock) tr, proto = unittest.mock.Mock(), unittest.mock.Mock() tr.get_extra_info.return_value = None self._fake_coroutine(loop_mock.create_connection, (tr, proto)) req = ClientRequest('GET', 'https://www.python.org', loop=self.loop) with self.assertRaisesRegex( RuntimeError, "Transport does not expose socket instance"): self.loop.run_until_complete(connector._create_connection(req)) proxy_req.close() proxy_resp.close() req.close()
def __init__(self, config=None): if config is None: config = ConfigDefaults() self.config = config self.commands = Commands(self) self.players = {} self.now_playing = {} self.last_status = None self.exit_signal = None if self.config.proxy: self.connector = aiohttp.ProxyConnector(proxy=self.config.proxy) else: self.connector = aiohttp.TCPConnector() self.aiolocks = defaultdict(asyncio.Lock) self._setup_logging() options = {"connector": self.connector} super().__init__(loop=None, **options) self.http.user_agent += " VitasBot/{0}".format(str(BOTVERSION))
async def get_pages(urls, proxy_url): tasks = [ fetch_page(url, aiohttp.ProxyConnector(proxy_url)) for url in urls ] for task in asyncio.as_completed(tasks): url, content = await task print('url: %s; content: %.100s' % (url, content))
def test_connect(self, ClientRequestMock): req = ClientRequest('GET', 'http://www.python.org') self.assertEqual(req.path, '/') loop_mock = unittest.mock.Mock() connector = aiohttp.ProxyConnector('http://proxy.example.com', loop=loop_mock) self.assertIs(loop_mock, connector._loop) resolve_host = unittest.mock.Mock() self._fake_coroutine(resolve_host, [unittest.mock.MagicMock()]) connector._resolve_host = resolve_host tr, proto = unittest.mock.Mock(), unittest.mock.Mock() self._fake_coroutine(loop_mock.create_connection, (tr, proto)) conn = self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.path, 'http://www.python.org/') self.assertIs(conn._transport, tr) self.assertIs(conn._protocol, proto) # resolve_host.assert_called_once_with('proxy.example.com', 80) self.assertEqual(tr.mock_calls, []) ClientRequestMock.assert_called_with( 'GET', 'http://proxy.example.com', auth=None, headers={'Host': 'www.python.org'}, loop=loop_mock)
def make_prox_list(self): prox_list = [] ip_list = self._get_ips() for ip in ip_list: connector = aiohttp.ProxyConnector(proxy=ip) prox_list.append(connector) return prox_list
def test_auth_from_url(self, ClientRequestMock): proxy_req = ClientRequest('GET', 'http://*****:*****@proxy.example.com') ClientRequestMock.return_value = proxy_req self.assertIn('AUTHORIZATION', proxy_req.headers) self.assertNotIn('PROXY-AUTHORIZATION', proxy_req.headers) loop_mock = unittest.mock.Mock() connector = aiohttp.ProxyConnector( 'http://*****:*****@proxy.example.com', loop=loop_mock) connector._resolve_host = resolve_mock = unittest.mock.Mock() self._fake_coroutine(resolve_mock, [unittest.mock.MagicMock()]) tr, proto = unittest.mock.Mock(), unittest.mock.Mock() self._fake_coroutine(loop_mock.create_connection, (tr, proto)) req = ClientRequest('GET', 'http://www.python.org') self.assertNotIn('AUTHORIZATION', req.headers) self.assertNotIn('PROXY-AUTHORIZATION', req.headers) self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.path, 'http://www.python.org/') self.assertNotIn('AUTHORIZATION', req.headers) self.assertIn('PROXY-AUTHORIZATION', req.headers) self.assertNotIn('AUTHORIZATION', proxy_req.headers) self.assertNotIn('PROXY-AUTHORIZATION', proxy_req.headers) ClientRequestMock.assert_called_with( 'GET', 'http://*****:*****@proxy.example.com', auth=None, loop=unittest.mock.ANY, headers=unittest.mock.ANY)
def test_https_connect(self, ClientRequestMock): loop_mock = unittest.mock.Mock() proxy_req = ClientRequest('GET', 'http://proxy.example.com', loop=loop_mock) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', 'http://proxy.example.com') proxy_req.send = send_mock = unittest.mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = start_mock = unittest.mock.Mock() self._fake_coroutine(start_mock, unittest.mock.Mock(status=200)) connector = aiohttp.ProxyConnector('http://proxy.example.com', loop=loop_mock) tr, proto = unittest.mock.Mock(), unittest.mock.Mock() self._fake_coroutine(loop_mock.create_connection, (tr, proto)) req = ClientRequest('GET', 'https://www.python.org') self.loop.run_until_complete(connector._create_connection(req)) self.assertEqual(proxy_req.method, 'CONNECT') self.assertEqual(proxy_req.path, 'www.python.org:443') tr.pause_reading.assert_called_once_with() tr.get_extra_info.assert_called_once_with('socket', default=None)
def test_proxy_auth_property(self): connector = aiohttp.ProxyConnector( 'http://proxy.example.com', proxy_auth=aiohttp.helpers.BasicAuth('user', 'pass'), loop=self.loop) self.assertEqual(('user', 'pass', 'latin1'), connector.proxy_auth) connector.close()
def test_connect(self, ClientRequestMock): req = ClientRequest('GET', URL('http://www.python.org'), loop=self.loop) self.assertEqual(req.url.path, '/') connector = aiohttp.ProxyConnector(URL('http://proxy.example.com'), loop=self.loop) self.assertIs(self.loop, connector._loop) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) tr, proto = mock.Mock(), mock.Mock() self.loop.create_connection = make_mocked_coro((tr, proto)) conn = self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._transport, tr) self.assertIs(conn._protocol, proto) # resolve_host.assert_called_once_with('proxy.example.com', 80) tr.get_extra_info.assert_called_once_with('sslcontext') ClientRequestMock.assert_called_with( 'GET', URL('http://proxy.example.com'), auth=None, headers={'Host': 'www.python.org'}, loop=self.loop) conn.close()
def __init__(self, login_id, password): """ 使用`login_id`和`password`来初始化认证对象。 `login_id`为登录DMM网站所需的用户名,一般为电子邮件地址,`password`为登录所需的密码。 仅支持用DMM账号登录,不支持Facebook和Google+账号登录。 :param login_id: str :param password: str :return: none """ # 初始化登录变量 self.login_id = login_id self.password = password # 初始化aiohttp会话,如果设定了代理服务器,则通过代理服务器发起会话 if config.proxy: self.connector = aiohttp.ProxyConnector(proxy=config.proxy, force_close=False) else: self.connector = None self.session = aiohttp.ClientSession(connector=self.connector) self.headers = {'User-Agent': self.user_agent} # 初始化登录过程中所需的变量 self.dmm_token = None self.token = None self.idKey = None self.pwdKey = None self.owner = None self.osapi_url = None self.world_id = None self.world_ip = None self.api_token = None self.api_starttime = None self.flash = None
def fetch(self, code, semaphore, proxy): headers = {"User-Agent": "medoc1001119", "Host": "uakey.com.ua"} url = 'http://uakey.com.ua/files/cert_list.php?edrpo=%s' % code counter = 0 with (yield from semaphore): while True: counter += 1 if counter >= self.retry: break with aiohttp.Timeout(self.timeout): try: if self.proxies: p = proxy.get_proxy conn = aiohttp.ProxyConnector(proxy=p) else: conn = None with aiohttp.ClientSession(connector=conn) as session: response = yield from session.get(url, headers=headers) body = yield from response.read() break except Exception as err: body = 'err'.encode('utf-8') continue return (code, body.decode('utf-8', errors='ignore')) # proxy = [] # codes = ['35294300'] # a = UKeys(codes) # res = a.start() # print(res)
def create_session(self, loop): conn = None if self.proxy and self.proxy_user: conn = aiohttp.ProxyConnector( loop=loop, limit=self.parallel, proxy=self.proxy, proxy_auth=aiohttp.BasicAuth(self.proxy_user, self.proxy_password) ) elif self.proxy: conn = aiohttp.ProxyConnector(loop=loop, limit=self.parallel, proxy=self.proxy) else: conn = aiohttp.TCPConnector(loop=loop, limit=self.parallel) session = aiohttp.ClientSession(connector=conn) return session
def test_proxy_auth(self): with self.assertRaises(AssertionError) as ctx: aiohttp.ProxyConnector('http://proxy.example.com', proxy_auth=('user', 'pass'), loop=unittest.mock.Mock()) self.assertEqual(ctx.exception.args[0], ("proxy_auth must be None or BasicAuth() tuple", ('user', 'pass')))
def proxy_connector(proxy, proxy_auth=None, **kwargs): if isinstance(proxy, HttpProxyAddr): return aiohttp.ProxyConnector(proxy.url, proxy_auth=proxy_auth, **kwargs) elif isinstance(proxy, SocksAddr): return SocksConnector(proxy, proxy_auth, **kwargs) else: raise ValueError('Unsupported `proxy` format')
def __init__(self, username, password, state=None, delay=5, proxy=None, loop=None, lock=None): if proxy is None: self._conn = None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state) self.delay = delay self.loop = loop or asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0
def test_ctor(self): connector = aiohttp.ProxyConnector( URL('http://localhost:8118'), proxy_auth=aiohttp.helpers.BasicAuth('user', 'pass'), loop=self.loop, ) self.assertEqual('http://localhost:8118', str(connector.proxy)) self.assertEqual(aiohttp.helpers.BasicAuth('user', 'pass'), connector.proxy_auth) self.assertTrue(connector.force_close)
def main_loop(): coros = [] conn = (aiohttp.ProxyConnector( proxy="http://127.0.0.1:8888", limit=CONCURRENT_CONN)) if FIDDLER else (aiohttp.TCPConnector( limit=CONCURRENT_CONN)) for year in range(START_INDEX, END_INDEX): coros.append(asyncio.Task(fetch_page(conn, year))) yield from asyncio.gather(*coros) conn.close()
def test_proxy_connection_error(self): connector = aiohttp.ProxyConnector('http://proxy.example.com', loop=self.loop) connector._resolve_host = resolve_mock = unittest.mock.Mock() self._fake_coroutine(resolve_mock, OSError('dont take it serious')) req = ClientRequest('GET', 'http://www.python.org') expected_headers = dict(req.headers) with self.assertRaises(aiohttp.ProxyConnectionError): self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.path, '/') self.assertEqual(dict(req.headers), expected_headers)
def get1(): url = "http://python.org" connector = aiohttp.ProxyConnector(proxy=PROXY) D("http get from %s ..", url) req = yield from aiohttp.request("get", url, connector=connector) with open(OUT,"bw") as fo: while True: chunk = yield from req.content.read(8016) if not chunk: break fo.write(chunk) I("get %s => %s", url, OUT)
def _get_session(self): if self.proxy_key: proxy = redis_pop(self.redis, self.proxy_key) if proxy: addr = proxy['protocol'] + '://' + proxy['ip'] +':'+proxy['port'] conn = aiohttp.ProxyConnector(proxy=addr) session = aiohttp.ClientSession(connector=conn) return session else: return None else: return aiohttp.ClientSession()
def __init__(self, cookies): """Create new client. cookies is a dictionary of authentication cookies. """ # Event fired when the client connects for the first time with # arguments (). self.on_connect = event.Event('Client.on_connect') # Event fired when the client reconnects after being disconnected with # arguments (). self.on_reconnect = event.Event('Client.on_reconnect') # Event fired when the client is disconnected with arguments (). self.on_disconnect = event.Event('Client.on_disconnect') # Event fired when a StateUpdate arrives with arguments (state_update). self.on_state_update = event.Event('Client.on_state_update') self._cookies = cookies proxy = os.environ.get('HTTP_PROXY') if proxy: self._connector = aiohttp.ProxyConnector(proxy) else: self._connector = aiohttp.TCPConnector() self._channel = channel.Channel(self._cookies, self._connector) # Future for Channel.listen self._listen_future = None self._request_header = hangouts_pb2.RequestHeader( # Ignore most of the RequestHeader fields since they aren't # required. Sending a recognized client_id is important because it # changes the behaviour of some APIs (eg. get_conversation will # filter out EVENT_TYPE_GROUP_LINK_SHARING_MODIFICATION without # it). client_version=hangouts_pb2.ClientVersion( client_id=hangouts_pb2.CLIENT_ID_WEB_HANGOUTS, major_version='hangups-{}'.format(version.__version__), ), language_code='en', ) # String identifying this client (populated later): self._client_id = None # String email address for this account (populated later): self._email = None # Active client management parameters: # Time in seconds that the client as last set as active: self._last_active_secs = 0.0 # ActiveClientState enum int value or None: self._active_client_state = None
async def get_session(self, *args, **kwargs): # TODO socks support # TODO auth support skip_auto = kwargs.pop('skip_auto_headers', []) headers = kwargs.pop('headers', {}) if self.user_agent is not None and 'User-Agent' not in headers: skip_auto.append('User-Agent') headers['User-Agent'] = self.user_agent() if callable(self.user_agent) else self.user_agent connector = aiohttp.ProxyConnector(proxy=self._proxy, force_close=False, limit=1) session = aiohttp.ClientSession(connector=connector, skip_auto_headers=skip_auto, headers=headers) session.proxy = self._proxy session.get = functools.partial(self.get, session=session) return session
def __init__(self): """ 构造函数,根据环境变量初始化代理服务器。 :return: none """ if config.proxy: self.connector = aiohttp.ProxyConnector(proxy=config.proxy, force_close=False) else: self.connector = None # 初始化存放镇守府图片和api_start2内容的变量 self.api_start2 = None self.worlds = {}
async def fetch_page_by_aiohttp(url, proxy_url, timeout, loop): resp = None conn = aiohttp.ProxyConnector(proxy_url) try: with aiohttp.ClientSession(connector=conn, loop=loop) as session,\ aiohttp.Timeout(timeout): async with session.get(url) as response: logger.info('url: %s; status: %d' % (url, response.status)) resp = await response.read() except (aiohttp.errors.ClientOSError, aiohttp.errors.ClientResponseError, aiohttp.errors.ServerDisconnectedError, asyncio.TimeoutError) as e: logger.error('url: %s; error: %r' % (url, e)) finally: return (url, resp)
def fetch(self, code, semaphore, proxy): headers = {"User-Agent": "medoc1001118", "Host": "lic.bestzvit.com.ua"} url = 'http://lic.bestzvit.com.ua/key_medoc_test.php?edrpo=%s' % code counter = 0 with (yield from semaphore): while True: counter += 1 if counter >= self.retry: break with aiohttp.Timeout(self.timeout): try: if self.proxies: p = proxy.get_proxy conn = aiohttp.ProxyConnector(proxy=p) else: conn = None with aiohttp.ClientSession(connector=conn) as session: response = yield from session.get(url, headers=headers) body = yield from response.read() break except Exception as err: body = 'err'.encode('cp1251') continue bd = body.decode('cp1251', errors='ignore') if bd == '': # print('Null') return (code, '') elif '<head>' in bd: # print('Shit') return (code, 'err') return (code, body.decode('cp1251', errors='ignore')) # p = ['http://115.31.183.94:80'] # l = [] # with open('./lic.txt', 'r') as f: # while True: # code = f.readline().strip() # if not code: # break # l.append(code) # print(l) # a = Medlic(l) # data = a.start() # # for d in data: # print(d)
def test_request_port(self, ClientRequestMock): proxy_req = ClientRequest('GET', 'http://proxy.example.com') ClientRequestMock.return_value = proxy_req loop_mock = unittest.mock.Mock() connector = aiohttp.ProxyConnector('http://proxy.example.com', loop=loop_mock) tr, proto = unittest.mock.Mock(), unittest.mock.Mock() tr.get_extra_info.return_value = None self._fake_coroutine(loop_mock.create_connection, (tr, proto)) req = ClientRequest('GET', 'http://localhost:1234/path') self.loop.run_until_complete(connector._create_connection(req)) self.assertEqual(req.path, 'http://localhost:1234/path')
def __init__(self, cookies): """Create new client. cookies is a dictionary of authentication cookies. """ # Event fired when the client connects for the first time with # arguments (). self.on_connect = event.Event('Client.on_connect') # Event fired when the client reconnects after being disconnected with # arguments (). self.on_reconnect = event.Event('Client.on_reconnect') # Event fired when the client is disconnected with arguments (). self.on_disconnect = event.Event('Client.on_disconnect') # Event fired when a StateUpdate arrives with arguments (state_update). self.on_state_update = event.Event('Client.on_state_update') self._cookies = cookies proxy = os.environ.get('HTTP_PROXY') if proxy: self._connector = aiohttp.ProxyConnector(proxy) else: self._connector = aiohttp.TCPConnector() self._channel = channel.Channel(self._cookies, self._connector) # Future for Channel.listen self._listen_future = None self._request_header = hangouts_pb2.RequestHeader( # Ignore most of the RequestHeader fields since they aren't # required. client_version=hangouts_pb2.ClientVersion( major_version='hangups-{}'.format(__version__), ), language_code='en', ) # String identifying this client (populated later): self._client_id = None # String email address for this account (populated later): self._email = None # Active client management parameters: # Time in seconds that the client as last set as active: self._last_active_secs = 0.0 # ActiveClientState enum int value or None: self._active_client_state = None
def fetch(self, code, semaphore, proxy): headers = { "User-Agent": "Mozilla/5.1 (compatible; Googlebot/2.1; +http://www.googIe.com/bot.html)", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "Origin": "https://youcontrol.com.ua", "Referer": "https://www.google.com.ua/url?sa=t&rct=j&q=&esrc=s&source=web&cd=11&sqi=2&ved=0CFgQFjAK&url=https://youcontrol.com.ua//catalog/company_details/{0}/&ei=NastA2_WMI-O7QautoOXIg&usg=AFQjakahZhWGZeIoJGtAOoSgSG25l4GREeri" .format(str(code)), "Accept-language": "ua-UA,ua;q=0.8,ru-RU;q=0.6,ru;q=0.4", "Upgrade-Insecure-Requests": "1", } url = 'https://youcontrol.com.ua/catalog/company_details/{0}/'.format( str(code)) counter = 0 with (yield from semaphore): while True: counter += 1 if counter >= self.retry: break with aiohttp.Timeout(self.timeout): try: if self.proxies: p = proxy.get_proxy conn = aiohttp.ProxyConnector(proxy=p) else: conn = None with aiohttp.ClientSession(connector=conn) as session: response = yield from session.get(url, headers=headers) body = yield from response.read() if 'recaptcha' in body.decode(): raise Exception elif '404.css' in body.decode(): raise Exception break except Exception as err: body = 'err'.encode('utf-8') continue return (code, body.decode('utf-8', errors='ignore'))