async def run(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext): global responses tasks = [] if len(filext) == 0: url = target + '/{}' resolver = aiohttp.AsyncResolver(nameservers=[dserv]) conn = aiohttp.TCPConnector(limit=threads, resolver=resolver, family=socket.AF_INET, verify_ssl=sslv) timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout) async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session: with open(wdlist, 'r') as wordlist: for word in wordlist: word = word.strip() task = asyncio.create_task( fetch(url.format(word), session, redir, sslv)) tasks.append(task) await asyncio.sleep(0) responses = await asyncio.gather(*tasks) else: filext = ',' + filext filext = filext.split(',') for ext in filext: ext = ext.strip() if len(ext) == 0: url = target + '/{}' else: url = target + '/{}.' + ext resolver = aiohttp.AsyncResolver(nameservers=[dserv]) conn = aiohttp.TCPConnector(limit=threads, resolver=resolver, family=socket.AF_INET, verify_ssl=sslv) timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout) async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session: with open(wdlist, 'r') as wordlist: for word in wordlist: word = word.strip() task = asyncio.create_task( fetch(url.format(word), session, redir, sslv)) tasks.append(task) await asyncio.sleep(0) responses = await asyncio.gather(*tasks)
async def login(self, *args, **kwargs) -> None: """Re-create the connector and set up sessions before logging into Discord.""" # Use asyncio for DNS resolution instead of threads so threads aren't spammed. self._resolver = aiohttp.AsyncResolver() # Use AF_INET as its socket family to prevent HTTPS related problems both locally # and in production. self._connector = aiohttp.TCPConnector( resolver=self._resolver, family=socket.AF_INET, ) # Client.login() will call HTTPClient.static_login() which will create a session using # this connector attribute. self.http.connector = self._connector self.http_session = aiohttp.ClientSession(connector=self._connector) self.api_client = api.APIClient(connector=self._connector) if self.redis_session.closed: # If the RedisSession was somehow closed, we try to reconnect it # here. Normally, this shouldn't happen. await self.redis_session.connect() # Build the FilterList cache await self.cache_filter_list_data() await self.stats.create_socket() await super().login(*args, **kwargs)
async def fetch(method, uri, params_prefix=None, loop=None, credentials=None, **params): """Fetch the given uri and return the contents of the response.""" params = _prepare_params(params, params_prefix) # build the HTTP request and use basic authentication url = f"https://{CHALLONGE_API_URL}/{uri}.json" timeout = aiohttp.ClientTimeout(total=TIMEOUT) resolver = aiohttp.AsyncResolver( ) if use_async_resolver else aiohttp.DefaultResolver() connector = aiohttp.TCPConnector(resolver=resolver) if credentials is None: credentials = { "login": _credentials["user"], "password": _credentials["api_key"] } async with aiohttp.ClientSession(loop=loop, timeout=timeout, connector=connector) as session: auth = aiohttp.BasicAuth(**credentials) async with session.request(method, url, params=params, auth=auth) as response: if response.status >= 400: raise ChallongeException( f"{response.status} {response.reason}") return await response.json()
async def _query_homepage(task_name, task_id, redis_task_url, hostlist, loop): """ use coroutine to access homepage of host, and save all result into redis. return a list of error hosts. """ pool = await aioredis.create_pool(redis_task_url, loop=loop) try: timeout = aiohttp.ClientTimeout(total=120, connect=10, sock_connect=10, sock_read=100) resolver = aiohttp.AsyncResolver(loop=loop) conn = aiohttp.TCPConnector(resolver=resolver, limit=0, loop=loop) async with aiohttp.ClientSession(connector=conn, timeout=timeout, headers=_ua_headers, raise_for_status=True, loop=loop) as session: fslist = [] fhlist = {} for h in hostlist: url = ('http://www.%s' % h) if not h.lower().startswith('www.') else ('http://%s' % h) f = asyncio.ensure_future(_req_homepage(h, url, session, pool, task_name, task_id), loop=loop) fslist.append(f) fhlist[id(f)] = h # waiting all connection finish done, pending = await asyncio.wait(fslist, loop=loop) retrylist = [] for f in done: h = fhlist[id(f)] try: err = f.result() if err: retrylist.append(h) except Exception: retrylist.append(h) return retrylist finally: pool.close() await pool.wait_closed()
async def _async_init(self): if self.resolver is None: loop = asyncio.get_event_loop() if self.dns_service is None: self.resolver = aiohttp.AsyncResolver(loop=loop) else: self.resolver = self.dns_service.aiohttp_resolver(loop=loop)
def instance(cls) -> 'WorkClient': if cls._instance is None: cls._instance = cls.__new__(cls) cls.work_urls = config.Config.instance().work_peers if config.Config.instance().node_work_generate: cls.work_urls.append(config.Config.instance().node_url) cls.connector = aiohttp.TCPConnector( family=0, resolver=aiohttp.AsyncResolver()) cls.session = aiohttp.ClientSession(connector=cls.connector, json_serialize=json.dumps) cls.active_difficulty = nanopy.work_difficulty cls.dpow_client = None cls.dpow_futures = {} cls.dpow_id = 1 # Construct DPoW Client cls.dpow_user = os.getenv('DPOW_USER', None) cls.dpow_key = os.getenv('DPOW_KEY', None) if cls.dpow_user is not None and cls.dpow_key is not None: cls.dpow_client = DpowClient(cls.dpow_user, cls.dpow_key, work_futures=cls.dpow_futures, bpow=False) cls.dpow_fallback_url = 'https://dpow.nanocenter.org/service/' else: cls.dpow_user = os.getenv('BPOW_USER', None) cls.dpow_key = os.getenv('BPOW_KEY', None) if cls.dpow_user is not None and cls.dpow_key is not None: cls.dpow_client = DpowClient(cls.dpow_user, cls.dpow_key, work_futures=cls.dpow_futures, bpow=True) cls.dpow_fallback_url = 'https://bpow.banano.cc/service/' return cls._instance
def _recreate(self) -> None: """Re-create the connector, aiohttp session, and the APIClient.""" # Use asyncio for DNS resolution instead of threads so threads aren't spammed. # Doesn't seem to have any state with regards to being closed, so no need to worry? self._resolver = aiohttp.AsyncResolver() # Its __del__ does send a warning but it doesn't always show up for some reason. if self._connector and not self._connector._closed: log.warning( "The previous connector was not closed; it will remain open and be overwritten" ) # Use AF_INET as its socket family to prevent HTTPS related problems both locally # and in production. self._connector = aiohttp.TCPConnector( resolver=self._resolver, family=socket.AF_INET, ) # Client.login() will call HTTPClient.static_login() which will create a session using # this connector attribute. self.http.connector = self._connector # Its __del__ does send a warning but it doesn't always show up for some reason. if self.http_session and not self.http_session.closed: log.warning( "The previous session was not closed; it will remain open and be overwritten" ) self.http_session = aiohttp.ClientSession(connector=self._connector) self.api_client.recreate(force=True, connector=self._connector)
def instance(cls) -> 'RPCClient': if cls._instance is None: cls._instance = cls.__new__(cls) cls.wallet_id = Config.instance().wallet cls.node_url = Config.instance().node_url cls.connector = aiohttp.TCPConnector(family=0 ,resolver=aiohttp.AsyncResolver()) cls.session = aiohttp.ClientSession(connector=cls.connector, json_serialize=json.dumps) return cls._instance
async def communicate_wallet_async(wallet_command): conn = aiohttp.TCPConnector(family=socket.AF_INET6, resolver=aiohttp.AsyncResolver()) async with aiohttp.ClientSession(connector=conn) as session: async with session.post("http://[::1]:7076", json=wallet_command, timeout=300) as resp: return await resp.json()
def __init__(self, *args, **kwargs): self.connector = aiohttp.TCPConnector( resolver=aiohttp.AsyncResolver(), family=socket.AF_INET, ) super().__init__(*args, connector=self.connector, **kwargs) self.http_session: Optional[aiohttp.ClientSession] = None
async def communicate_wallet_async(wallet_command): conn = aiohttp.TCPConnector(family=socket.AF_INET6, resolver=aiohttp.AsyncResolver()) async with aiohttp.ClientSession(connector=conn) as session: async with session.post("http://{0}:{1}".format( settings.node_ip, settings.node_port), json=wallet_command, timeout=300) as resp: return await resp.json()
async def _async_init(self): if self.resolver is None: loop = asyncio.get_event_loop() if self.dns_service is None: self.resolver = aiohttp.AsyncResolver(loop=loop) else: self.resolver = self.dns_service.aiohttp_resolver(loop=loop) if self.session is None: self.session = aiohttp_client_session(resolver=self.resolver, timeout=self.request_timeout)
def aiohttp_client_session(*, timeout=None, **kwargs): """use aiodns and support number timeout""" if isinstance(timeout, (int, float)): timeout = aiohttp.ClientTimeout(total=timeout) resolver = aiohttp.AsyncResolver() # Fix: No route to host. https://github.com/saghul/aiodns/issues/22 family = socket.AF_INET connector = aiohttp.TCPConnector(resolver=resolver, family=family) return aiohttp.ClientSession(connector=connector, timeout=timeout, **kwargs)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._resolver = aiohttp.AsyncResolver(self.loop) self._connector = aiohttp.TCPConnector( resolver=self._resolver, family=socket.AF_INET, loop=self.loop, ) self.session = aiohttp.ClientSession(connector=self._connector, loop=self.loop)
async def wayback(dserv, tout): global found print('\n\n' + Y + '[!]' + C + ' Requesting Wayback Machine...' + W + '\n') tasks = [] resolver = aiohttp.AsyncResolver(nameservers=[dserv]) conn = aiohttp.TCPConnector(limit=10) timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout) async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session: for f_url in found: tasks.append(asyncio.create_task(wm_fetch(f_url, session))) await asyncio.gather(*tasks)
def __init__(self, *args, **kwargs): # Use asyncio for DNS resolution instead of threads so threads aren't spammed. # Use AF_INET as its socket family to prevent HTTPS related problems both locally # and in production. self.connector = aiohttp.TCPConnector( resolver=aiohttp.AsyncResolver(), family=socket.AF_INET, ) super().__init__(*args, connector=self.connector, **kwargs) self.http_session: Optional[aiohttp.ClientSession] = None self.api_client = api.APIClient(loop=self.loop, connector=self.connector)
async def prepare(self): resolver = aiohttp.AsyncResolver(nameservers=['1.1.1.1', '1.0.0.1']) self.session = aiohttp.ClientSession( connector=aiohttp.TCPConnector(limit=0, resolver=resolver)) modules = [ f'{p.parent}.{p.stem}' for p in pathlib.Path('modules').glob('*.py') ] for module in modules: self.load_extension(module) await self.room_cache.open() await self.todo_cache.open()
async def sync(token: str, reader: StreamReader, writer: StreamWriter, lock: Lock, *, server: bool = False) -> None: connector = aiohttp.TCPConnector(resolver=aiohttp.AsyncResolver()) async with aiohttp.ClientSession(connector=connector) as session: spot = Spotify(session, token) context = Context(lock, server=server) await asyncio.wait([ publish(writer, spot, context) if server else subscribe( reader, spot, context) ], return_when=asyncio.FIRST_COMPLETED)
def aiohttp_client_session(*, timeout=None, resolver=None, proxy_url=None, **kwargs): """use aiodns and support number timeout""" if timeout is None: timeout = 30 if isinstance(timeout, (int, float)): timeout = aiohttp.ClientTimeout(total=timeout) if resolver is None: resolver = aiohttp.AsyncResolver() # Fix: No route to host. https://github.com/saghul/aiodns/issues/22 conn_params = dict(resolver=resolver, family=socket.AF_INET) if proxy_url: connector = ProxyConnector.from_url(proxy_url, **conn_params) else: connector = aiohttp.TCPConnector(**conn_params) return aiohttp.ClientSession(connector=connector, timeout=timeout, **kwargs)
async def start(self, *args, **kwargs) -> None: """ Initialize from an async context. To ensure that the event loop is ready, we delay setting async attributes until after this method is called. """ log.info("Initializing Ryan attributes from an async context") self.start_time = arrow.utcnow() connector = aiohttp.TCPConnector(resolver=aiohttp.AsyncResolver(), family=socket.AF_INET) self.http_session = aiohttp.ClientSession(connector=connector) log.info("Ryan ready, connecting to Discord") await super().start(*args, **kwargs)
async def wayback(target, dserv, tout): global found is_avail = False ext = tldextract.extract(target) domain = ext.registered_domain if len(domain) < 2: domain = ext.domain print('\n' + Y + '[!]' + C + ' Checking Availability on Wayback Machine' + W, end='') wm_avail = 'http://archive.org/wayback/available' avail_data = {'url': domain} try: check_rqst = requests.get(wm_avail, params=avail_data, timeout=10) check_sc = check_rqst.status_code if check_sc == 200: check_data = check_rqst.text json_chk_data = json.loads(check_data) avail_data = json_chk_data['archived_snapshots'] if len(avail_data) != 0: is_avail = True print(G + '['.rjust(5, '.') + ' Available ]') else: print(R + '['.rjust(5, '.') + ' N/A ]') else: print('\n' + R + '[-] Status : ' + C + str(check_sc) + W) except Exception as e: print('\n' + R + '[-] Exception : ' + C + str(e) + W) if is_avail == True: print('\n' + Y + '[!]' + C + ' Requesting Wayback Machine...' + W + '\n') tasks = [] resolver = aiohttp.AsyncResolver(nameservers=[dserv]) conn = aiohttp.TCPConnector(limit=10) timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout) async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session: for f_url in found: tasks.append(asyncio.create_task(wm_fetch(f_url, session))) await asyncio.gather(*tasks)
async def report_results_async( self, twistcli_scan_result: Dict[str, Any], bc_platform_integration: BcPlatformIntegration, bc_api_key: str, file_path: Path, **kwargs: Any, ) -> int: logging.info(f"Start to send report for package file {file_path}") payload = self._create_report( twistcli_scan_result=twistcli_scan_result, bc_platform_integration=bc_platform_integration, file_path=file_path, **kwargs, ) headers = merge_dicts( get_default_post_headers( bc_platform_integration.bc_source, bc_platform_integration.bc_source_version), {"Authorization": bc_api_key}, ) async with aiohttp.ClientSession(connector=aiohttp.TCPConnector( resolver=aiohttp.AsyncResolver())) as session: async with session.post( url=f"{self.vulnerabilities_base_url}/report", headers=headers, json=payload) as response: content = await response.text() if response.ok: logging.info( f"Successfully send report for package file {file_path}") return 0 else: logging.error( f"Failed to send report for package file {file_path}") logging.error( f"Status code: {response.status}, Reason: {response.reason}, Content: {content}" ) return 1
async def run(target, threads, tout, wdlist, redir, sslv, dserv, output, data): tasks = [] url = target + '/{}' resolver = aiohttp.AsyncResolver(nameservers=[dserv]) conn = aiohttp.TCPConnector(limit=threads, resolver=resolver, family=socket.AF_INET, verify_ssl=sslv) timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout) async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session: with open(wdlist) as wordlist: for word in wordlist: word = word.strip() task = asyncio.create_task( fetch(url.format(word), session, redir, sslv)) tasks.append(task) responses = await asyncio.gather(*tasks) dir_output(responses, output, data)
def _recreate(self): """ Re-create the connector, aiohttp session and the APIClient. """ # Use asyncio for DNS resolution instead of threads so threads aren't spammed. self._resolver = aiohttp.AsyncResolver() if self._connector and not self._connector._closed: print("The previous connector was not closed; it will remain open and be overwritten") if self.http_session and not self.http_session.closed: print('The previous http session was not closed, it will remain open and be overwritten') self._connector = aiohttp.TCPConnector( resolver=self._resolver, family=socket.AF_INET, ) self.http.connector = self._connector self.http_session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=30), connector=self._connector, raise_for_status=True) self.hypixel_api_client.recreate(force=True, connector=self._connector)
def __init__(self, config: dict): super().__init__( command_prefix=commands.when_mentioned_or(*config['prefix']), description=config['description'], case_insensitive=True, fetch_offline_members=False, ) self.app_info = None self.creator = None self.cogs = CaseInsensitiveDict() self.categories = collections.defaultdict(list) self.owners = config['owners'] self.command_counter = collections.Counter() self.session = aiohttp.ClientSession(connector=aiohttp.TCPConnector( resolver=aiohttp.AsyncResolver(), family=socket.AF_INET), loop=self.loop) self.start_time = datetime.utcnow() self.pool = self.loop.run_until_complete( db.create_pool(config['pg_credentials'])) self.process = psutil.Process(os.getpid()) self.token = config['token'] self.webhook_url = config['webhook_url'] self.db_scheduler = DatabaseScheduler(self.pool, timefunc=datetime.utcnow) self.db_scheduler.add_callback(self._dispatch_from_scheduler) # add our error handler self.load_extension('core.error_handler') # load any extensions from the cogs directory for name in os.listdir('cogs'): if name.startswith('__'): continue self.load_extension(f'cogs.{name}')