Ejemplo n.º 1
0
def set_proxy(url, auth=None):
    global _proxy
    global _proxy_auth
    if not url:
        _proxy = None
    else:
        type = url.split(':')[0]
        _proxy = (type, url, auth) if auth else (
            type,
            url,
        )

        _proxy_auth = aiohttp.BasicAuth
        if type != "http":
            if not auth:
                socks_conn = SocksConnector.from_url(url, rdns=True, limit=10)
            else:
                if len(auth) < 2:
                    auth += ('', )
                socks_conn = SocksConnector.from_url(url,
                                                     rnds=True,
                                                     username=auth[0],
                                                     password=auth[1],
                                                     limit=10)

            _pools['default'] = aiohttp.ClientSession(connector=socks_conn,
                                                      loop=_loop)

            _proxy_auth = {
                "socks4":
                None,  # as socks imped by aiohttp_socks and it handles auth internally
                "socks5": None,
            }.get(type, None)
Ejemplo n.º 2
0
    async def prove(self):
        # socks proxy
        proxy = 'socks5://{}:{}'.format(self.target_host,
                                        str(self.target_port))
        connector = SocksConnector.from_url(proxy)
        url = self.ceye_dns_api(t='url')
        async with ClientSession(connector=connector) as session:
            async with session.options(url=url) as res1:
                if res1:
                    text1 = await res1.text()
                    if 'HTTP Record Insert Success' in text1:
                        self.flag = 1
                        self.res.append({"info": proxy, "key": "proxy unauth"})
                        return

        # http proxy
        proxy = 'http://{}:{}'.format(self.target_host, str(self.target_port))
        async with ClientSession() as session:
            async with session.get(url=url, proxy=proxy) as res2:
                if res2:
                    text2 = await res2.text()
                    if 'HTTP Record Insert Success' in text2:
                        self.flag = 1
                        self.res.append({"info": proxy, "key": "proxy unauth"})
                        return
Ejemplo n.º 3
0
async def get_idex_depth(symbol, cnt):
    global p_count
    proxy = proxys[cnt]
    if cnt > 39:
        cnt -= 20
    if cnt > 19:
        cnt -= 20
    header = {
        'IDEX-API-KEY': gv.idex_apis[cnt],
    }
    url = f"https://api.idex.io/v1/orderbook?market={symbol}&level=2&limit=20"
    socks_url = 'socks5://' + proxy[2] + ':' + proxy[3] + '@' + proxy[0] + ':' + proxy[1]
    connector = SocksConnector.from_url(socks_url)
    try:
        async with aiohttp.ClientSession(connector=connector, headers=header) as session:
            async with session.get(url) as response:
                html = await response.text()
                jhtml = json.loads(html)
                p_count += 1
                if 'sequence' in html:
                    return jhtml
                elif 'code' in html:
                    if jhtml['code'] != 'MARKET_NOT_FOUND':
                        print('IDEX', symbol, jhtml['code'])
                    return None
    except Exception as exc:
        print(exc, proxy)
        return None
Ejemplo n.º 4
0
    def __init__(self, bot):
        self.bot = bot

        connector = None
        socks5_url = self.bot.config.get('socks5_proxy_url')
        if socks5_url:
            from aiohttp_socks import SocksConnector
            connector = SocksConnector.from_url(socks5_url, rdns=True)

        self.http = aiohttp.ClientSession(
            loop=self.bot.loop,
            read_timeout=self.bot.config.get('http_read_timeout', 60),
            connector=connector
            if self.bot.config.get('use_socks5_for_all_connections') else None,
            headers={
                'User-Agent':
                self.bot.config['user_agent'] + ' ' + self.bot.http.user_agent
            })

        self.emote_client = EmoteClient(self.bot)

        with open('data/ec-emotes-final.json') as f:
            self.ec_emotes = json.load(f)

        # keep track of paginators so we can end them when the cog is unloaded
        self.paginators = weakref.WeakSet()
Ejemplo n.º 5
0
    async def prove(self):
        # socks proxy
        proxy = 'socks5://{}:{}'.format(self.target_host, str(self.target_port))
        connector = SocksConnector.from_url(proxy)
        url = 'http://example.com/'

        async with ClientSession(connector=connector) as session:
            async with session.get(url=url) as res1:
                if res1:
                    text1 = await res1.text()
                    if 'More information...' in text1:
                        self.flag = 1
                        self.res.append({"info": proxy, "key": "proxy unauth"})
                        return

        # http proxy
        proxy = 'http://{}:{}'.format(self.target_host, str(self.target_port))
        async with ClientSession() as session:
            async with session.get(url=url, proxy=proxy) as res2:
                if res2:
                    text2 = await res2.text()
                    if 'More information...' in text2:
                        self.flag = 1
                        self.res.append({"info": proxy, "key": "proxy unauth"})
                        return
Ejemplo n.º 6
0
async def load_content(
        url: str,
        proxy_address: str = None,
        timeout: int = None):

    timeout_ = DEFAULT_TIMEOUT if not timeout else timeout

    if proxy_address is None:
        async with aiohttp.ClientSession() as session:
            return {
                'response': await fetch(session, url=url),
                'proxy': proxy_address
            }

    session = aiohttp.ClientSession(
        connector=SocksConnector.from_url(f'socks5://{proxy_address}'),
        timeout=aiohttp.ClientTimeout(total=timeout_)
    )
    try:
        async with session as session:
            return {
                'response': await fetch(session, url=url),
                'proxy': proxy_address
            }
    except Exception:
        print(f'Bad proxy: {proxy_address}')
        return {
            'response': None,
            'proxy': proxy_address
        }
Ejemplo n.º 7
0
	def __init__(self, bot):
		self.bot = bot

		connector = None
		socks5_url = self.bot.config.get('socks5_proxy_url')
		if socks5_url:
			from aiohttp_socks import SocksConnector
			connector = SocksConnector.from_url(socks5_url, rdns=True)

		self.http = aiohttp.ClientSession(
			loop=self.bot.loop,
			read_timeout=self.bot.config.get('http_read_timeout', 60),
			connector=connector if self.bot.config.get('use_socks5_for_all_connections') else None,
			headers={
				'User-Agent':
					self.bot.config['user_agent'] + ' '
					+ self.bot.http.user_agent
			})

		self.aioec = aioec.Client(
			loop=self.bot.loop,
			connector=connector,
			base_url=self.bot.config.get('ec_api_base_url'))
		# keep track of paginators so we can end them when the cog is unloaded
		self.paginators = weakref.WeakSet()
Ejemplo n.º 8
0
 async def open_session(self, proxy: str = None) -> aiohttp.ClientSession:
     if proxy is None:
         self.session = aiohttp.ClientSession()
         return self.session
     connector = SocksConnector.from_url(proxy)
     self.session = aiohttp.ClientSession(connector=connector)
     return self.session
async def async_session():
    with Controller.from_port(port=9051) as controller:
        controller.authenticate(password='')
        controller.signal(Signal.NEWNYM)
    connector = SocksConnector.from_url('socks5://127.0.0.1:9050')
    return await aiohttp.ClientSession(connector=connector
                                       ).__aenter__(), connector
Ejemplo n.º 10
0
async def test_socks4_connector(url, rdns):
    connector = SocksConnector.from_url(
        SOCKS4_URL,
        rdns=rdns,
    )
    async with aiohttp.ClientSession(connector=connector) as session:
        async with session.get(url) as resp:
            assert resp.status == 200
Ejemplo n.º 11
0
 async def proxy_session(self, proxy_url: str) -> aiohttp.ClientSession:
     """
      proxy_url ="socks5://user:[email protected]:1080" or "socks5://127.0.0.1:1080"
     :param proxy_url:
     :return:
     """
     connector = SocksConnector.from_url(proxy_url)
     self.session = aiohttp.ClientSession(connector=connector)
     return self.session
Ejemplo n.º 12
0
def async_session():
    with Controller.from_port(port=9051) as controller:
        controller.authenticate(
            password='******')
        controller.signal(Signal.NEWNYM)

    connector = SocksConnector.from_url('socks5://127.0.0.1:9050')
    return asyncio.get_event_loop().run_until_complete(
        aiohttp.ClientSession(connector=connector).__aenter__()), connector
Ejemplo n.º 13
0
async def get(url, proxy=PROXY):
    connector = SocksConnector.from_url(PROXY, verify_ssl=False)
    if proxy:
        async with aiohttp.ClientSession(connector=connector) as session:
            async with session.get(url, headers={'User-agent':HEADER}) as response:
                return await response.text()
    else:
        async with aiohttp.ClientSession(connector=TCPConnector(verify_ssl=False)) as session:
            async with session.get(url,headers={'User-agent':HEADER}) as response:
                return await response.text()
Ejemplo n.º 14
0
 async def _connect(self):
     try:
         socks_port = await self.app.tor.socks_port()
         conn = SocksConnector.from_url(f"socks5://127.0.0.1:{socks_port}",
                                        rdns=True)
         async with aiohttp.ClientSession(connector=conn) as session:
             self.session = session
             await self.ping()
     except Exception as e:
         logger.exception(e)
         raise e
Ejemplo n.º 15
0
def _create_onetime_pool():
    if _proxy and _proxy[0] != "http":
        url = _proxy[1]
        if len(_proxy) > 2:  # auth
            auth = _proxy[2]
            socks_conn = SocksConnector.from_url(url,
                                                 rdns=True,
                                                 username=auth[0],
                                                 password=auth[1],
                                                 limit=1,
                                                 force_close=True)
        else:
            socks_conn = SocksConnector.from_url(url,
                                                 rdns=True,
                                                 limit=1,
                                                 force_close=True)
        return aiohttp.ClientSession(connector=socks_conn, loop=_loop)
    else:
        return aiohttp.ClientSession(connector=aiohttp.TCPConnector(
            limit=1, force_close=True),
                                     loop=_loop)
Ejemplo n.º 16
0
    async def open_session(self,
                           proxy: typing.Optional[str] = None
                           ) -> aiohttp.ClientSession:
        """

        @type proxy: object
        """
        if proxy is None:
            self.session = aiohttp.ClientSession()
            return self.session
        connector = SocksConnector.from_url(proxy)
        self.session = aiohttp.ClientSession(connector=connector)
        return self.session
Ejemplo n.º 17
0
    async def download(self, request: Request) -> Union[Response, Exception]:
        session = self._default_session
        new_session = False
        proxy = None

        if request.proxy:
            proxy_url = URL(request.proxy)
            if proxy_url.scheme in ('sock4', 'sock5'):
                connector = SocksConnector.from_url(request.url)
                session = ClientSession(cookie_jar=CookieJar(unsafe=True), connector=connector)
                new_session = True
            elif proxy_url.scheme == 'https' and URL(request.url).scheme == 'https':
                return await self.download_by_requests(request)
            else:
                proxy = request.proxy

        try:
            if request.cookies:
                session.cookie_jar.update_cookies(request.cookies)

            resp = await session.request(method=request.method,
                                         url=request.url,
                                         params=request.params,
                                         data=request.data,
                                         proxy=proxy,
                                         headers=request.headers,
                                         timeout=request.timeout)

            status = resp.status
            text = await resp.text(encoding=request.encoding)
            cookies = resp.cookies

            response = Response(text=text, status=status, cookies=cookies)
            if request.cookies:
                response.cookies.update(request.cookies)

            return response

        except Exception as e:
            self.logger.error(traceback.format_exc(limit=10))
            return e

        finally:
            if new_session:
                await session.close()
Ejemplo n.º 18
0
    async def get_data(self, urls, workers, timeout, tor, proxy):
        tasks = []
        sem = asyncio.Semaphore(workers)
        if tor:
            connector = SocksConnector.from_url('socks5://localhost:9050')
        elif proxy:
            connector = ProxyConnector.from_url(proxy)
        else:
            connector = None
        async with ClientSession(connector=connector) as session:

            for base_url in urls:
                task = asyncio.ensure_future(
                    self.bound_fetch(sem, session, base_url, timeout))
                tasks.append(task)

            responses = asyncio.gather(*tasks)
            await responses
Ejemplo n.º 19
0
    def __init__(self, retry_interval=5, max_qps=None, **kwargs):
        if kwargs.get('connector') is None:
            # 因为 aiohttp 组件原因,暂无法支持检测https代理, 因此弃用http/https代理,统一使用socks5代理
            try:
                if conf['proxy']['proxy'].lower() == 'true':
                    proxy = conf['proxy']['proxy_url']
                    connector = SocksConnector.from_url(proxy)
                else:
                    # context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_1)
                    connector = TCPConnector(ssl=False)
                kwargs.setdefault('connector', connector)
            except KeyError as e:
                logger.error("Load tentacle config error: %s, please check the config in tentacle.conf." % e)

        self._max_fail_retries = int(conf['basic']['max_retries']) or 0
        self._retry_interval = retry_interval
        self._limit = LimitRate(1, 1. / max_qps) if max_qps else None
        super().__init__(**kwargs)
Ejemplo n.º 20
0
async def get_bilaxy_depth(symbol, cnt):
    url = f"https://newapi.bilaxy.com/v1/orderbook?pair={symbol}"
    socks_url = 'socks5://' + proxys[cnt][2] + ':' + proxys[cnt][3] + '@' + proxys[cnt][0] + ':' + proxys[cnt][1]
    connector = SocksConnector.from_url(socks_url)
    try:
        async with aiohttp.ClientSession(connector=connector) as session:
            async with session.get(url) as response:
                html = await response.text()
                jhtml = json.loads(html)
                if 'timestamp' in html:
                    return jhtml
                else:
                    print('bilaxy', symbol, jhtml)
                    if 'Not found pair' in html:
                        _query(f"""UPDATE bilaxy_markets SET "is_active" = 'f' WHERE "market" LIKE '%{symbol}%'""")
                    return None
    except Exception as exc:
        print(exc, proxys[cnt])
        return None
Ejemplo n.º 21
0
async def get_hitbtc_depth(symbol, cnt):
    url = f"https://api.hitbtc.com/api/2/public/orderbook/{symbol.replace('/', '')}"
    socks_url = 'socks5://' + proxys[cnt][2] + ':' + proxys[cnt][3] + '@' + proxys[cnt][0] + ':' + proxys[cnt][1]
    connector = SocksConnector.from_url(socks_url)
    try:
        async with aiohttp.ClientSession(connector=connector) as session:
            async with session.get(url) as response:
                html = await response.text()
                jhtml = json.loads(html)
                if 'ask' in html:
                    return jhtml
                elif 'error' in html:
                    print('hitbtc', jhtml['error'])
                    return None
                else:
                    return None
    except Exception as exc:
        print(exc, proxys[cnt])
        return None
Ejemplo n.º 22
0
async def get_hotbit_depth(symbol, cnt):
    url = f"https://api.hotbit.io/api/v1/order.depth?interval=1e-8&&limit=20&market={symbol}"
    socks_url = 'socks5://' + proxys[cnt][2] + ':' + proxys[cnt][3] + '@' + proxys[cnt][0] + ':' + proxys[cnt][1]
    connector = SocksConnector.from_url(socks_url)
    try:
        async with aiohttp.ClientSession(connector=connector) as session:
            async with session.get(url) as response:
                html = await response.text()
                jhtml = json.loads(html)
                if jhtml['error'] is None:
                    return jhtml['result']
                elif jhtml['error']:
                    print('hotbit', symbol, jhtml['error'])
                    if 'market not exist' in html:
                        _query(f"""UPDATE hotbit_markets SET "is_active" = 'f' WHERE "market" LIKE '%{symbol}%'""")
                    return None
                else:
                    return None
    except Exception as exc:
        print(exc, proxys[cnt])
        return None
Ejemplo n.º 23
0
    def build_session(self, analyzer_system, host_pool):
        """Build a new session."""
        timing_trace_config = _get_timing_trace_config(host_pool)

        session_init_kwargs = {
            'timeout': ClientTimeout(total=analyzer_system['timeout']),
            'trace_configs': [timing_trace_config],
        }

        if analyzer_system['socks_proxy']:
            session_init_kwargs['connector'] = SocksConnector.from_url(
                analyzer_system['socks_proxy'],
                rdns=True,
            )

        session = ClientSession(
            **session_init_kwargs,
        )

        self.sessions.append(session)

        return session
Ejemplo n.º 24
0
async def process(link, user, passw, proxy):
    global GOOD, FINISHED

    cproxy = SocksConnector.from_url(f'socks{settings["socks"]}://' + proxy)

    try:
        user = macros(user, link, '')
        passw = macros(passw, link, user)

        async with ClientSession(connector=cproxy, timeout=timeout) as s:
            data = await first(s, link)

            if not module.valid(data[1], data[0]):
                await save('rebrut', f'{link} - {user}:{passw}')
                return

            _post = module.parse(data[0], user, passw)

            if _post is None:
                await save('rebrut', f'{link} - {user}:{passw}')
                return

            data = await second(s, link, _post)
            assert module.required in data[0]

            await save('good', f'{link} - {user}:{passw}')
            GOOD += 1
    except (SocksConnectionError, SocksError):
        await save('rebrut', f'{link} - {user}:{passw}')
    except asyncio.TimeoutError:
        await save('timeout', f'{link} - {user}:{passw}')
    except AssertionError:
        pass
    except Exception as e:
        await save('report', e)
    finally:
        FINISHED += 1
        print(f'Good: {GOOD}; Done: {FINISHED}', end='\r')
        return
Ejemplo n.º 25
0
 async def get_title(self, url: str):
     url = url.strip()
     connector = None
     # TODO allow exclusive whitelist mode
     if self.settings["use_tor"] and not self.iswhitelisted(url):
         try:
             from aiohttp_socks import SocksConnector
             connector = SocksConnector.from_url(self.settings["tor_addr"])
         except ImportError:
             raise ImportError
     async with aiohttp.ClientSession(connector=connector) as session:
         async with session.get(url) as response:
             if response.status == 200:
                 text = await response.text()
                 soup = bs4(text, "html.parser")
                 if soup is not None:
                     try:
                         title = soup.title.string
                     except AttributeError as error:
                         pass
                     else:
                         return title.strip()
Ejemplo n.º 26
0
async def main():
    async with aiohttp.ClientSession() as session:
        sample = await fetch(session, url=PYTHON_URL)

    timeout = aiohttp.ClientTimeout(total=3)
    pool = load_from_file()
    for proxy in pool:
        address = f"{proxy['ip']}:{proxy['port']}"

        connector = SocksConnector.from_url(f'socks5://{address}')
        try:
            async with aiohttp.ClientSession(connector=connector,
                                             timeout=timeout) as session:
                text = await fetch(session, url=PYTHON_URL)
        except Exception:
            print(f'Bad proxy: {address}')
            continue

        if text != sample:
            print(f'Bad proxy: {address}')
            continue

        print(f'Valid proxy: {address}')
Ejemplo n.º 27
0
async def run(batch, requests, proxy):
    # Create client session that will ensure we don't open a new connection per each request.
    # todo It is synced on the whole second part of the wall clock time to make testing in Wireshark easier.
    # todo This results in at most 1.5 seconds delay and can be removed later on
    wait_always = 1000  # msec, to ensure all async tasks (also with wait_time = 0) are able to make this deadline
    wait_final_byte = 5000  # this is how long we wait until the final byte is sent
    ns = get_time_ns()
    start_time = round(ns / 1e9) * 1e3 + wait_always
    start_time_str = str(datetime.datetime.fromtimestamp(start_time / 1000))
    print(f"Start sending time: {start_time_str}", end="")

    # prepare requests
    prepared_requests = {}
    req_ids = batch.get_reqs()
    for req_id in req_ids:
        if batch.sync_last_byte:
            last_byte_time = start_time + wait_final_byte
            print("\tlast byte time: " +
                  str(datetime.datetime.fromtimestamp(last_byte_time / 1000)))
        else:
            last_byte_time = None
            print()
        prepared_requests[req_id] = __prepare_request(requests[req_id],
                                                      batch.allow_redirects,
                                                      last_byte_time)

    tasks = []

    if proxy is not None:
        connector = SocksConnector.from_url(proxy, verify_ssl=False)
    else:
        connector = aiohttp.TCPConnector(verify_ssl=False)

    async with ClientSession(connector=connector) as session:
        send_order = prepare_sending_order(batch.items)
        for key in send_order:
            wait_time = key[1]
            wait_until = start_time + wait_time
            values = batch.items[key]
            a_prepared_request = copy.deepcopy(prepared_requests[key[0]])
            # add wait_time to final_byte_time
            if 'final_byte_time' in a_prepared_request:
                a_prepared_request['final_byte_time'] += wait_time
            # resolve url to ip
            # todo a_request['url'] = await resolve_all_to_ip(loop, [f"{a_request['url'].split('//')[0]}//{a_request['url'].split('//')[1].split('/')[0]}"])
            # send request
            # print(f"Sending ({values[1]}x): {utils.get_req_string(requests[key[0]], True, ['timestamp'])}")
            tasks.append(
                asyncio.ensure_future(
                    __a_sup_request(key[0], a_prepared_request, wait_time,
                                    wait_until, values[1],
                                    batch.get_send_timeout(), session)))
        # results = await asyncio.gather(*tasks)
        results = [
            await f for f in tqdm(asyncio.as_completed(tasks),
                                  total=len(tasks),
                                  desc="Receiving ",
                                  ncols=progress_bar_width)
        ]

    # decode all responses
    responses_decoded = {
        'start_time':
        start_time_str,
        'end_time':
        str(datetime.datetime.fromtimestamp(round(get_time_ns() / 1e9))),
        'contents':
        defaultdict(list)
    }

    errors = ""
    for i, result in enumerate(
            tqdm(results, desc="Processing", ncols=progress_bar_width)):
        if isinstance(result[1], Exception):
            errors += f"Error in sending request {i} :\n{utils.tabbed_pprint_string(result, 1)}\n"
            continue
        for j, response in enumerate(result[1]):
            response_decoded = __decode_response(response)
            response_decoded['wait_time'] = result[0][1]
            response_decoded['send_index'] = j
            responses_decoded['contents'][result[0][0]].append(
                copy.deepcopy(response_decoded))
    time.sleep(0.1)
    print(errors)
    # sort lists to send_time
    for request_id in responses_decoded['contents'].keys():
        responses_decoded['contents'][request_id] = sorted(
            responses_decoded['contents'][request_id],
            key=lambda x: x['send_time'])
    return responses_decoded
async def prepare_sessions():
    for _ in range(K):
        connector = SocksConnector.from_url('socks5://127.0.0.1:9050')
        sessions.append(await aiohttp.ClientSession(connector=connector
                                                    ).__aenter__())
Ejemplo n.º 29
0
 async def __get_connector(self):
     if self.useTor:
         return SocksConnector.from_url(self.connector_url, rdns=True)
     else:
         return aiohttp.TCPConnector(limit=self.pool_limit)
Ejemplo n.º 30
0
 def tor_connector(self):
     proxy = f"{self.proxy.get('http')}"
     connector = SocksConnector.from_url(proxy)
     self.connector = connector
     return connector