async def build_list(cls): headers = {"X-API-KEY": Keys.site_api} # Set up the session if DEBUG_MODE: http_session = ClientSession(connector=TCPConnector( resolver=AsyncResolver(), family=socket.AF_INET, verify_ssl=False, )) else: http_session = ClientSession(connector=TCPConnector( resolver=AsyncResolver())) # Get all the snakes if cls.snakes is None: response = await http_session.get(URLs.site_names_api, params={"get_all": "true"}, headers=headers) cls.snakes = await response.json() # Get the special cases if cls.special_cases is None: response = await http_session.get(URLs.site_special_api, headers=headers) special_cases = await response.json() cls.special_cases = { snake['name'].lower(): snake for snake in special_cases } # Close the session http_session.close()
def __init__(self, **kwargs): super().__init__(**kwargs) self.http_session = ClientSession(connector=TCPConnector( resolver=AsyncResolver(), family=socket.AF_INET)) self._guild_available = asyncio.Event() self.loop.create_task(self.send_log("SeasonalBot", "Connected!"))
def create_session(cls, connector: TCPConnector = None, timeout: int = 5, headers: dict = None) -> ClientSession: """ Create and return an aiohttp ClientSession with default or some custom parameters. :param connector: Connector for standard TCP Sockets :param timeout: Maximum time to wait for an HTTP response :param headers: Spoofed request headers to trick some websites :return session: Session ready to go """ resolver = AsyncResolver(nameservers=DNS_NAMESERVERS) connector = TCPConnector( limit=100, limit_per_host=0, resolver=resolver if resolver else cls.create_resolver(), use_dns_cache=False, force_close=True, family=socket.AF_INET, ssl=False) session = ClientSession(cookie_jar=DummyCookieJar(), connector=connector, timeout=ClientTimeout(total=timeout), headers=HEADERS, connector_owner=True) return session
def instance(cls) -> 'RPCClient': if cls._instance is None: cls._instance = cls.__new__(cls) cls.node_url = options.node_url cls.connector = TCPConnector(family=0, resolver=AsyncResolver()) cls.session = ClientSession(connector=cls.connector) return cls._instance
def __init__(self, **kwargs): super().__init__(**kwargs) self.http_session = ClientSession( connector=TCPConnector( resolver=AsyncResolver(), family=socket.AF_INET, ) )
def __init__(self, redis_session: RedisSession, **kwargs): super().__init__(**kwargs) self.http_session = ClientSession(connector=TCPConnector( resolver=AsyncResolver(), family=socket.AF_INET)) self._guild_available = asyncio.Event() self.redis_session = redis_session self.loop.create_task(self.check_channels()) self.loop.create_task(self.send_log(self.name, "Connected!"))
def _set_config(self): self._timeout = self._config.get('timeout', 15) self._headers = self._config.get( 'headers', { 'User-Agent': 'Mozilla/5.0 (compatible; CoolCarbine/0.1-dev; +http://www.puse.cat/bot.html)' }) self._resolver = AsyncResolver(nameservers=self._config.get( 'nameservers', ['1.1.1.1', '8.8.8.8']))
def __init__(self, **kwargs): try: super().__init__(**kwargs) self.http_session = ClientSession(connector=TCPConnector( resolver=AsyncResolver(), family=socket.AF_INET)) # Main database for current season self.database = TinyDB(config.database) self.query = Query() log.info('Main database loaded') self.info_text = '' self.info_text += linesep + linesep + config.description self.info_text += linesep + linesep + 'Admins:' for admin in config.admins: self.info_text += linesep + ' ' + admin self.info_text += linesep + linesep + config.additional_info_text self.info_text += linesep + linesep + 'Type !help to see a list of available commands.' + linesep + linesep amnt_failed = 0 # TimedTask must be started first if at all if 'timed_task' in config.cogs and config.cogs[0] != 'timed_task': log.fatal( 'TimedTask is specified to be loaded; has to be loaded first!' ) sys.exit() # Go through cogs and load them as extensions # NOTE: Each cog adds its own bit to _self.info_text_ for cog_name in config.cogs: try: cog_name = 'Cogs.' + cog_name self.load_extension(cog_name) except Exception as e: amnt_failed += 1 log.error( f'Failed to load extension {cog_name}: {repr(e)} {format_exc()}' ) else: log.info('Loading extension ' + cog_name) # If any cogs aren't loaded, bot behaviour is undefined because many cogs depend on each other - better not execute the thing and let the bot admin figure out what's going on. if amnt_failed > 0: log.fatal('Summary:\n Num failed extension loads: %d', amnt_failed) sys.exit() self.info_text += 'Some additional information:' + linesep + ' Please be aware of the fact that there may be bugs in the system. There are fail-safe mechanisms, but they may not always prevent a loss of ' + config.currency_name + 's in case of an error.' except Exception as e: # If any exception occurs at this point, better not execute the thing and let the bot admin figure out what's going on. log.exception(e) sys.exit()
async def on_ready(self): self.http_session = ClientSession(connector=TCPConnector( resolver=AsyncResolver(), family=socket.AF_INET)) for cog in cogs: try: self.load_extension(cog) except Exception as e: logger.error(f'Failed to load extension: {cog}\n{e}') logger.info(f'Client Logged in at {datetime.now()}') logger.info(f'Logged in as : {self.user.name}') logger.info(f'ID : {self.user.id}')
async def _get_aiohttp_session(self): # aiohttp session needs to be initialised in async function # https://github.com/tiangolo/fastapi/issues/301 if self.client_session is None: resolver = AsyncResolver() conn = aiohttp.TCPConnector(family=socket.AF_INET, resolver=resolver, ttl_dns_cache=10, force_close=True, enable_cleanup_closed=True) self.client_session = aiohttp.ClientSession(connector=conn) return self.client_session
async def startup(self) -> None: """Initialize the crawler manager's redis connection and http session used to make requests to shepherd """ self.loop = get_event_loop() self.redis = await init_redis( env('REDIS_URL', default=DEFAULT_REDIS_URL), self.loop) self.session = ClientSession( connector=TCPConnector(resolver=AsyncResolver(loop=self.loop), loop=self.loop), json_serialize=partial(json.dumps, ensure_ascii=False), loop=self.loop, )
def make_http_session( loop: Optional[AbstractEventLoop] = None) -> ClientSession: """Creates and returns a new aiohttp.ClientSession that uses AsyncResolver :param loop: Optional asyncio event loop to use. Defaults to asyncio.get_event_loop() :return: An instance of aiohttp.ClientSession """ if loop is None: loop = asyncio.get_event_loop() return ClientSession( connector=TCPConnector(resolver=AsyncResolver(loop=loop), loop=loop), json_serialize=ujson.dumps, loop=loop, )
def create_aio_http_client_session( loop: Optional[AbstractEventLoop] = None) -> ClientSession: """Creates and returns a new aiohttp.ClientSession. The returned aiohttp.ClientSession is configured to use ujson for JSON serialization and aiohttp.AsyncResolver as its dns resolver :param loop: Optional reference to the automation's event loop :return: The newly created aiohttp.ClientSession """ eloop = Helper.ensure_loop(loop) return ClientSession( connector=TCPConnector(resolver=AsyncResolver(loop=eloop), loop=eloop), json_serialize=partial(dumps, ensure_ascii=False), loop=eloop, )
async def task(self, spider, semaphore): resolver = AsyncResolver(nameservers=["8.8.8.8", "8.8.4.4"]) conn = aiohttp.TCPConnector( limit=int(rcfg("Rconcurrency")), keepalive_timeout=3, use_dns_cache=True, resolver=resolver ) with aiohttp.ClientSession(connector=conn) as session: while spider.is_running(): if len(self.pre_parse_urls) == 0: await asyncio.sleep(0.5) continue url = self.pre_parse_urls.pop() self.parsing_urls.append(url) asyncio.ensure_future(self.execute_url(url, spider, session, semaphore))
from bot.api import APIClient, APILoggingHandler from bot.constants import Bot as BotConfig, DEBUG_MODE log = logging.getLogger('bot') bot = Bot(command_prefix=when_mentioned_or(BotConfig.prefix), activity=Game(name="Commands: !help"), case_insensitive=True, max_messages=10_000) # Global aiohttp session for all cogs # - Uses asyncio for DNS resolution instead of threads, so we don't spam threads # - Uses AF_INET as its socket family to prevent https related problems both locally and in prod. bot.http_session = ClientSession(connector=TCPConnector( resolver=AsyncResolver(), family=socket.AF_INET, )) bot.api_client = APIClient(loop=asyncio.get_event_loop()) log.addHandler(APILoggingHandler(bot.api_client)) # Internal/debug bot.load_extension("bot.cogs.error_handler") bot.load_extension("bot.cogs.filtering") bot.load_extension("bot.cogs.logging") bot.load_extension("bot.cogs.modlog") bot.load_extension("bot.cogs.security") # Commands, etc bot.load_extension("bot.cogs.antispam") bot.load_extension("bot.cogs.bot")
async def do(self, target: Target): """ сопрограмма, осуществляет подключение к Target, отправку и прием данных, формирует результата в виде dict """ def return_ip_from_deep(sess, response) -> str: try: ip_port = response.connection.transport.get_extra_info( 'peername') if is_ip(ip_port[0]): return ip_port[0] except BaseException: pass try: _tmp_conn_key = sess.connector._conns.items() for k, v in _tmp_conn_key: _h = v[0][0] ip_port = _h.transport.get_extra_info('peername') if is_ip(ip_port[0]): return ip_port[0] except BaseException: pass return '' def update_line(json_record, target): json_record['ip'] = target.ip # json_record['ip_v4_int'] = int(ip_address(target.ip)) # json_record['datetime'] = datetime.datetime.utcnow() # json_record['port'] = int(target.port) return json_record async with self.semaphore: result = None timeout = ClientTimeout(total=target.total_timeout) # region tmp disable trace_config = TraceConfig() trace_config.on_request_start.append(on_request_start) trace_config.on_request_end.append(on_request_end) # endregion resolver = AsyncResolver(nameservers=['8.8.8.8', '8.8.4.4']) # resolver = None # https://github.com/aio-libs/aiohttp/issues/2228 - closed if target.ssl_check: conn = TCPConnector( ssl=False, family=2, # need set current family (only IPv4) limit_per_host=0, resolver=resolver) session = ClientSession(timeout=timeout, connector=conn, response_class=WrappedResponseClass, trace_configs=[trace_config]) simple_zero_sleep = 0.250 else: simple_zero_sleep = 0.001 session = ClientSession( connector=TCPConnector( limit_per_host=0, family=2, # need set current family (only IPv4) resolver=resolver), timeout=timeout, trace_configs=[trace_config]) selected_proxy_connection = None try: selected_proxy_connection = next( self.app_config.proxy_connections) except: pass try: async with session.request( target.method, target.url, timeout=timeout, headers=target.headers, cookies=target.cookies, allow_redirects=target.allow_redirects, data=target.payload, proxy=selected_proxy_connection, trace_request_ctx=self.trace_request_ctx) as response: _default_record = create_template_struct(target) if target.ssl_check: cert = convert_bytes_to_cert(response.peer_cert) if not self.app_config.without_certraw: _default_record['data']['http']['result'][ 'response']['request']['tls_log'][ 'handshake_log']['server_certificates'][ 'certificate']['raw'] = b64encode( response.peer_cert).decode('utf-8') if cert: _default_record['data']['http']['result'][ 'response']['request']['tls_log'][ 'handshake_log']['server_certificates'][ 'certificate']['parsed'] = cert _default_record['data']['http']['status'] = "success" _default_record['data']['http']['result']['response'][ 'status_code'] = response.status # region _header = {} for key in response.headers: _header[key.lower().replace( '-', '_')] = response.headers.getall(key) _default_record['data']['http']['result']['response'][ 'headers'] = _header # endregion if target.method in [ 'GET', 'POST', 'PUT', 'DELETE', 'UPDATE' ]: buffer = b"" try: read_c = asyncio.wait_for( read_http_content(response, n=target.max_size), timeout=target.total_timeout) buffer = await read_c except Exception as e: pass else: if filter_bytes(buffer, target): _default_record['data']['http']['result'][ 'response']['content_length'] = len(buffer) _default_record['data']['http']['result'][ 'response']['body'] = '' try: _default_record['data']['http']['result'][ 'response']['body'] = buffer.decode() except Exception as e: pass if not self.app_config.without_base64: try: _base64_data = b64encode( buffer).decode('utf-8') _default_record['data']['http'][ 'result']['response'][ 'body_raw'] = _base64_data except Exception as e: pass if not self.app_config.without_hashs: try: hashs = { 'sha256': sha256, 'sha1': sha1, 'md5': md5 } for namehash, func in hashs.items(): hm = func() hm.update(buffer) _default_record['data']['http'][ 'result']['response'][ f'body_{namehash}'] = hm.hexdigest( ) except Exception as e: pass result = update_line(_default_record, target) else: # TODO: добавить статус success-not-contain для обозначения того, # что сервис найден, но не попал под фильтр? result = create_error_template( target, error_str='', status_string='success-not-contain') if result: if not result['ip']: result['ip'] = return_ip_from_deep( session, response) except Exception as exp: error_str = '' try: error_str = exp.strerror except: pass result = create_error_template(target, error_str, type(exp).__name__) await asyncio.sleep(simple_zero_sleep) try: await session.close() except: pass try: await conn.close() except: pass if result: if 'duration' in self.trace_request_ctx: request_duration = self.trace_request_ctx['duration'] result['data']['http']['duration'] = request_duration success = access_dot_path(result, "data.http.status") if self.stats: if success == "success": self.stats.count_good += 1 else: self.stats.count_error += 1 if not (self.app_config.status_code == CONST_ANY_STATUS): response_status = access_dot_path( result, 'data.http.result.response.status_code') if response_status: if self.app_config.status_code != response_status: error_str = f'status code: {response_status} is not equal to filter: {self.app_config.status_code}' result = create_error_template( target, error_str=error_str, status_string='success-not-need-status') self.stats.count_good -= 1 self.stats.count_error += 1 line = None try: if self.success_only: if success == "success": line = ujson_dumps(result) else: line = ujson_dumps(result) except Exception: pass if line: await self.output_queue.put(line) await asyncio.sleep(simple_zero_sleep) try: await session.close() except: pass try: await conn.close() except: pass
import socket import asyncio from aiohttp import AsyncResolver, ClientSession, TCPConnector try: asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) except: pass http_session = ClientSession( connector=TCPConnector(resolver=AsyncResolver(), family=socket.AF_INET)) async def close_session(): await http_session.close()
def get_session(): return ClientSession(connector=TCPConnector( resolver=AsyncResolver(), family=socket.AF_INET, ))
bot = AutoShardedBot( command_prefix=when_mentioned_or( ">>> self.", ">> self.", "> self.", "self.", ">>> bot.", ">> bot.", "> bot.", "bot.", ">>> ", ">> ", "> ", ">>>", ">>", ">" ), # Order matters (and so do commas) activity=Game(name="with snekky sneks"), help_attrs={"aliases": ["help()"]}, formatter=Formatter() ) # Make cog names case-insensitive bot.cogs = CaseInsensitiveDict() # Global aiohttp session for all cogs - uses asyncio for DNS resolution instead of threads, so we don't *spam threads* bot.http_session = ClientSession(connector=TCPConnector(resolver=AsyncResolver())) # Internal/debug bot.load_extension("bot.cogs.logging") bot.load_extension("bot.cogs.security") # Commands, etc bot.load_extension("bot.cogs.snakes") bot.run(os.environ.get("BOT_TOKEN")) bot.http_session.close() # Close the aiohttp session when the bot finishes running
def aiohttpSession(): """Aiohttp session creator""" return ClientSession(connector=TCPConnector(resolver=AsyncResolver(), family=socket.AF_INET))
async def client_session() -> ClientSession: """Provides a ClientSession instance.""" # Do not use caching; get a new instance for each test async with ClientSession(connector=TCPConnector( resolver=AsyncResolver())) as client_session: yield client_session