예제 #1
0
    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.http_session = ClientSession(connector=TCPConnector(
            resolver=AsyncResolver(), family=socket.AF_INET))
        self._guild_available = asyncio.Event()

        self.loop.create_task(self.send_log("SeasonalBot", "Connected!"))
예제 #2
0
async def _media_size(url, session=None, http_headers=None):
    _session = None
    if session is None:
        _session = await ClientSession(connector=TCPConnector(verify_ssl=False)
                                       ).__aenter__()
    else:
        _session = session
    content_length = 0
    try:
        async with _session.head(url,
                                 headers=http_headers,
                                 allow_redirects=True) as resp:
            if resp.status != 200:
                print('Request to url {} failed: '.format(url) +
                      responses[resp.status])
            else:
                content_length = int(resp.headers.get(hdrs.CONTENT_LENGTH,
                                                      '0'))

        # try GET request when HEAD failed
        if content_length < 100:
            async with _session.get(url, headers=http_headers) as get_resp:
                if get_resp.status != 200:
                    raise Exception('Request failed: ' + str(get_resp.status) +
                                    " " + responses[get_resp.status])
                else:
                    content_length = int(
                        get_resp.headers.get(hdrs.CONTENT_LENGTH, '0'))
    finally:
        if session is None:
            await _session.__aexit__(exc_type=None, exc_val=None, exc_tb=None)

    return content_length
예제 #3
0
파일: main.py 프로젝트: Rejfin/PriceHunter
async def fetch_sites(list_of_products):
    tasks = []
    conn = TCPConnector(limit_per_host=MAX_CONNECTION_PER_HOST,
                        limit=MAX_CONNECTIONS,
                        ssl=ssl_context)
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
        'Chrome/87.0.4280.88 Safari/537.36',
        'Connection':
        'keep-alive',
        'Accept':
        'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,'
        'application/signed-exchange;v=b3;q=0.9',
        'cookie':
        "intl_splash=false"
    }
    async with ClientSession(connector=conn, headers=headers) as session:
        for product in list_of_products:
            task = loop.create_task(get_data(product, session))
            tasks.append(task)

        await gather(*tasks)
        print("\r[" + Fore.GREEN + "OK" + Fore.RESET +
              "] All sites has been checked")
    return tasks
예제 #4
0
 async def run(self):
     self.show_start_message()
     self.progress_bar = tqdm(desc='Requests', total=self.limit)
     connector = TCPConnector(limit=None)
     tasks = []
     async with self.semaphore:
         async with ClientSession(auth=self.auth,
                                  headers=self.headers,
                                  connector=connector) as session:
             for _ in range(self.limit):
                 try:
                     # set data from file or from params
                     if self.file_obj:
                         data = self._get_data_from_file()
                     else:
                         data = self.data
                 except StopIteration:
                     # go to start of file and repeat task
                     self.file_obj.seek(0)
                     continue
                 except ValueError:
                     raise UserException('Wrong file format')
                 tasks.append(self._request(session=session, data=data))
             for i in range(0, len(tasks), 1000):
                 await asyncio.gather(*tasks[i:i + 1000])
     self.progress_bar.close()
예제 #5
0
async def get_weather_accu(future, location):
    start = time.time()

    url = accu_weather

    info = None
    LOGGER.info(url)

    try:
        async with ClientSession(connector=TCPConnector(ssl=False)) as session:
            html = await fetch(session, url)
            LOGGER.info(
                f'accu weather content fetch in {time.time() - start} secs.')
            parse_start = time.time()
            info = await parse_accu_weather(html)
            LOGGER.info(
                f'accu weather parsing took {time.time() - parse_start} secs.')
    except ClientConnectorError as ex:
        LOGGER.error(f'Unable to connect Accu Weather API : {repr(ex)}')
        LOGGER.error(ex)
        info = WeatherInfo.WeatherInfo('0', "0", "0", "00:00", "", "", "")
        info.set_error(ex)
    except BaseException as ex:
        LOGGER.error(f'Unable to connect Accu Weather API :- {repr(ex)}')
        LOGGER.error(ex)
        info = WeatherInfo.WeatherInfo('0', "0", "0", "00:00", "", "", "")
        info.set_error(ex)

    LOGGER.info(f'Accu Weather Time Taken {time.time() - start}')
    future.set_result(info)
예제 #6
0
async def main():
    st = time()
    loop = asyncio.get_event_loop()
    load_dotenv()

    # Load Environmental Variable
    wsdl = os.getenv("WSDL_FILE")
    username = os.getenv("UCM_USERNAME")
    password = os.getenv("UCM_PASSWORD")
    ucm_pub_url = f'https://{os.getenv("UCM_PUB_ADDRESS")}:8443/axl/'

    print(f"init: {time() - st}s")

    # Create Session, do not verify certificate, enable basic auth
    # ISSUE, zeep not using cookies, authenticating every time
    # authentication is rate limited
    connector = TCPConnector(ssl=False, limit=5)
    auth = BasicAuth(username, password)
    async with ClientSession(connector=connector, auth=auth) as session:
        transport = AsyncTransport(loop=loop, session=session, timeout=10)
        client = Client(wsdl, transport=transport)

        # create the service proxy pointint to our UCM
        service = client.create_service(
            binding_name="{http://www.cisco.com/AXLAPIService/}AXLAPIBinding",
            address=ucm_pub_url,
        )
        print(f"Zeep Setup: {time() - st}s")
        phone_list = await get_phone_list(service)
        # print(client.transport.session)
        print(f"phone List: {time() - st}s")
        await get_phone_list_detail(service, phone_list)
        print(f"Done: {time() - st}s")
예제 #7
0
async def managed_download(isins):
    # max connections per thread to one host
    connector = TCPConnector(limit_per_host=1, limit=10)

    async with ClientSession(connector=connector) as session:
        return await asyncio.gather(*(download_history(session, isin)
                                      for isin in isins))
예제 #8
0
async def asyncQuery(inputIDs, session=None):
    """Asynchronously make a list of API calls."""
    responses = {}
    if not session:
        async with ClientSession(connector=TCPConnector(ssl=False)) as session:
            dp = Dispatcher(inputIDs, session)
            dp.dispatch()
            for task in dp.tasks.values():
                res = await asyncio.gather(*task)
                for _res in res:
                    responses.update(_res)
            return responses
    else:
        dp = Dispatcher(inputIDs, session)
        dp.dispatch()
        if dp.invalid and isinstance(dp.invalid, dict):
            for semantic_type, curies in dp.invalid.items():
                if curies and len(curies) > 0:
                    for curie in curies:
                        responses[curie] = generateFailedResponse(
                            curie, semantic_type)
        for task in dp.tasks.values():
            res = await asyncio.gather(*task)
            for _res in res:
                if _res:
                    responses.update(_res)
        return responses
예제 #9
0
async def get_google_weather(future, location):
    start = time.time()
    info = None
    if location is not None:
        url = google_weather_url + '+' + location
    else:
        url = google_weather_url + '+' + default_location

    LOGGER.info(url)

    try:
        async with ClientSession(connector=TCPConnector(ssl=False)) as session:
            html = await fetch(session, url)
            LOGGER.info(
                f'g-weather content fetch in {time.time() - start} secs.')
            parse_start = time.time()
            info = await parse_google_weather(html)
            LOGGER.info(
                f'g-weather parsing took {time.time() - parse_start} secs.')
    except BaseException as ex:
        LOGGER.error(f'Unable to connect Weather API : {repr(ex)}')
        info = WeatherInfo.WeatherInfo('0', "0", "0", "00:00", "", "", "")
        info.set_error(ex)

    LOGGER.info(f'Weather Time Taken {time.time() - start}')
    future.set_result(info)
예제 #10
0
    async def wall(self):
        options = dict(
            connector=TCPConnector(
                keepalive_timeout=299,
                enable_cleanup_closed=True,
            ),
            timeout=ClientTimeout(total=self.settings['cooldown']),
        )

        async with ClientSession(**options) as session:
            try:
                resp = await session.get(
                    self.settings['url'],
                    headers=self.settings['headers'],
                    allow_redirects=False,
                )
            except HTTP_EXCEPTIONS as e:
                if isinstance(e, OSError) and e.errno != ECONNRESET:
                    err_msg = f'Connection error: {str(e)}'
                    await aprint(err_msg, use_stderr=True, flush=True)
                return

            async with resp:
                if resp.status != 200:
                    return

                try:
                    html = await resp.text()
                except TimeoutError:
                    return

                async for post_info in self.extractor(html):
                    yield post_info
예제 #11
0
async def patched_session_maker(daemon_class):
    fake_daemon = daemon_class()
    info = await fake_daemon.start()
    resolver = FakeResolver(info)
    connector = TCPConnector(resolver=resolver)
    fake_session = ClientSession(connector=connector)
    return fake_session, fake_daemon
예제 #12
0
async def get_weather_forecast(future, location):
    start = time.time()

    if location is not None:
        url = weather_url + location
    else:
        url = weather_url + DEFAULT_LOC_UUID

    info = None
    LOGGER.info(url)

    try:
        async with ClientSession(connector=TCPConnector(ssl=False)) as session:
            html = await fetch(session, url)
            LOGGER.info(
                f'weather content fetch in {time.time() - start} secs.')
            parse_start = time.time()
            info = await parse_weather_forecast(html)
            LOGGER.info(
                f'weather parsing took {time.time() - parse_start} secs.')
    except ClientConnectorError as ex:
        LOGGER.error(f'Unable to connect Weather API : {repr(ex)}')
        info = []
    except BaseException as ex:
        LOGGER.error(f'Unable to connect Weather API : {repr(ex)}')
        info = []

    LOGGER.info(f'Weather Time Taken {time.time() - start}')
    future.set_result(info)
예제 #13
0
async def fetch(url, is_binary=False):
    """
    Fetch URL and guess response extension

    Args:
        url (str): Page URL
        is_binary (bool): True if should download binary content (e.g. images)

    Returns:
        HttpResponse: HTTP response content and extension
    """
    # Fix relative link
    if not URL_PATTERN.match(url):
        url = '{}/{}'.format(MAIN_URL, url)

    logging.debug('Downloading url: {}'.format(url))
    timeout = ClientTimeout(total=REQUEST_TIMEOUT)
    connector = TCPConnector(limit_per_host=MAX_HOST_CONNECTIONS, ssl=False)
    try:
        async with ClientSession(timeout=timeout,
                                 headers=HEADERS,
                                 connector=connector) as session:
            async with session.get(url) as response:
                if is_binary:
                    content = await response.read()
                else:
                    content = await response.text()
                return HttpResponse(content,
                                    guess_extension(response.content_type))
    except Exception as e:
        logging.error('Downloading error: {} [{}]'.format(url, e))
        raise
예제 #14
0
파일: config.py 프로젝트: Seabreg/tachyon
async def configure_hammertime(proxy=None,
                               retry_count=3,
                               cookies=None,
                               **kwargs):
    loop = custom_event_loop()
    engine = AioHttpEngine(loop=loop, verify_ssl=False, proxy=proxy)
    await engine.session.close()
    connector = TCPConnector(loop=loop,
                             verify_ssl=False,
                             use_dns_cache=True,
                             ttl_dns_cache=None)
    if cookies is not None:
        engine.session = ClientSession(loop=loop,
                                       connector=connector,
                                       cookie_jar=DummyCookieJar(loop=loop))
    else:
        engine.session = ClientSession(loop=loop, connector=connector)
    kb = KnowledgeBase()
    hammertime = HammerTime(loop=loop,
                            request_engine=engine,
                            retry_count=retry_count,
                            proxy=proxy,
                            kb=kb)
    setup_hammertime_heuristics(hammertime, **kwargs)
    hammertime.collect_successful_requests()
    hammertime.kb = kb
    return hammertime
예제 #15
0
    async def post(self,
                   context_span: azs.SpanAbc,
                   span_params,
                   response_codec,
                   url,
                   data=None,
                   headers=None,
                   read_timeout=None,
                   conn_timeout=None,
                   ssl_ctx=None):
        """
        :type context_span: azs.SpanAbc
        :type span_params: dict
        :type response_codec: AbstractResponseCodec
        :type url: str
        :type data: bytes
        :type headers: dict
        :type read_timeout: float
        :type conn_timeout: float
        :type ssl_ctx: ssl.SSLContext
        :rtype: Awaitable[ClientResponse]
        """
        conn = TCPConnector(ssl_context=ssl_ctx)
        # TODO проверить доступные хосты для передачи трассировочных заголовков
        headers = headers or {}
        headers.update(context_span.context.make_headers())
        with context_span.tracer.new_child(context_span.context) as span:
            async with ClientSession(loop=self.loop,
                                     headers=headers,
                                     read_timeout=read_timeout,
                                     conn_timeout=conn_timeout,
                                     connector=conn) as session:
                if 'name' in span_params:
                    span.name(span_params['name'])
                if 'endpoint_name' in span_params:
                    span.remote_endpoint(span_params['endpoint_name'])
                if 'tags' in span_params:
                    for tag_name, tag_val in span_params['tags'].items():
                        span.tag(tag_name, tag_val)

                span.kind(az.CLIENT)
                span.tag(azah.HTTP_METHOD, "POST")
                parsed = urlparse(url)
                span.tag(azc.HTTP_HOST, parsed.netloc)
                span.tag(azc.HTTP_PATH, parsed.path)
                span.tag(azc.HTTP_REQUEST_SIZE, str(len(data)))
                span.tag(azc.HTTP_URL, url)
                _annotate_bytes(span, data)
                try:
                    async with session.post(url, data=data) as resp:
                        response_body = await resp.read()
                        _annotate_bytes(span, response_body)
                        span.tag(azc.HTTP_STATUS_CODE, resp.status)
                        span.tag(azc.HTTP_RESPONSE_SIZE,
                                 str(len(response_body)))
                        dec = await response_codec.decode(span, resp)
                        return dec
                except client_exceptions.ClientError as e:
                    span.tag("error.message", str(e))
                    raise
예제 #16
0
async def launch():
    """
    Collects data on all games by CUSA codes
    stored in links.json file
    """
    tasks = []

    f = open("links.json", "r")
    content = f.read()
    js = dict(json.loads(content))
    links = list(js.values())[:200]

    async with ClientSession(headers=HEADERS,
                             connector=TCPConnector(ssl=False)) as session:
        for link in links:
            task = asyncio.ensure_future(
                Helpers.get_async_soup(session=session,
                                       url=f"{EXTERNAL['product']}{link}"))
            tasks.append(task)

        soups = await asyncio.gather(*tasks)

        f = open("games.json", "a")
        f.write('{')

        for index, soup in enumerate(soups):
            string = PS4Game(url=soup[1], soup=soup[0]).as_json()
            f.write(f'"{str(uuid.uuid4())}": {string},')
        f.write('}')
예제 #17
0
 def instance(cls) -> 'RPCClient':
     if cls._instance is None:
         cls._instance = cls.__new__(cls)
         cls.node_url = options.node_url
         cls.connector = TCPConnector(family=0, resolver=AsyncResolver())
         cls.session = ClientSession(connector=cls.connector)
     return cls._instance
예제 #18
0
 async def run(self, urls):
     """
     Async method which creates one instance of aiohttp.ClientSession and uses it to request multiple urls.
     :return:
     """
     # TODO: better logger info management. Needs statistics, sort of.
     tasks = []
     timeout = ClientTimeout(total=2 * 60, sock_connect=60, sock_read=30)
     connector = TCPConnector(verify_ssl=False, limit=1000)
     async with ClientSession(timeout=timeout, connector=connector) as s:
         for url in urls:
             task = asyncio.ensure_future(self.fetch(url, s))
             tasks.append(task)
         responses = await asyncio.gather(*tasks)
     for response in responses:
         if response[0]:
             self.succ_c += 1
         else:
             # TODO: print out the status code. Or save the response as html and analyze. Something like that.
             self.fail_c += 1
     #info('Made ' + str(len(urls)) + ' requests')
     #info('Valid response count: ' + str(self.succ_c))
     #info('Invalid response count: ' + str(self.fail_c))
     self.fail_c = 0
     self.succ_c = 0
     return responses
예제 #19
0
    async def fetch(self, request: Request) -> Response:
        session, resp = None, None
        try:
            session = request.session or aiohttp.ClientSession(connector=TCPConnector(limit=1))
            resp = await session.request(request.method,
                                         request.url,
                                         timeout=request.timeout,
                                         headers=request.header or {},
                                         cookies=request.cookies or {},
                                         data=request.data or {},
                                         **request.extras or {}
                                         )
            byte_content = await resp.read()
            headers = {}
            if resp.headers:
                headers = {k: v for k, v in resp.headers.items()}
            response = Response(body=byte_content,
                                status=resp.status,
                                headers=headers,
                                cookies=resp.cookies
                                )
        finally:
            if resp:
                resp.release()
            if request.session is None and session:
                await session.close()

        return response
예제 #20
0
    async def check_status(args):
        import datetime
        import aiohttp
        from aiohttp import TCPConnector
        domain_list = args.get('domain_list', [])
        start_time = 1571136600
        end_time = 1571223000

        client_name = 'ccemea'

        for channel in domain_list:
            headers = CCAPI.get_sign_header()

            code_url = (
                'http://openapi.chinacache.com/imp/api/v1.0/status_code/'
                'open/count?channel_name={}&start_time={}&end_time={}'
                '&cloud_curr_client_name={}').format(channel, start_time,
                                                     end_time, client_name)

            print(code_url)
            print(headers)
            print(datetime.datetime.now())
            # response = requests.get(code_url, headers=headers, timeout=60)
            # response_json = response.json()

            timeout_obj = aiohttp.ClientTimeout(total=60)
            async with aiohttp.ClientSession(
                    timeout=timeout_obj,
                    headers=headers,
                    connector=TCPConnector(verify_ssl=False)) as session:
                async with session.get(code_url) as response:
                    response_json = await response.json()

            print(response_json)
            print(datetime.datetime.now())
예제 #21
0
async def scan_url(url, high, low, num_requests, sem=None):
    '''
    Scans the "url" for "num_requests" times.
    Half of those requests have a sleep command with a "high" sleep delay
    The other half have a sleep command with a "low" sleep delay
    '''

    tasks = []
    con = TCPConnector(ssl=False)  # In case of SSL type errors
    get_url_rtt.start_time = {}

    if sem != None:
        await sem.acquire(
        )  # In case semaphores are used to call this function

    # Fetch all responses within one Client session, keep connection alive for all requests.
    async with ClientSession(connector=con) as session:
        for i in range(1, num_requests + 1):

            if i % 2 == 0:  # Even Number gets assigned High Delay
                task = asyncio.ensure_future(
                    get_url_rtt(url + 'SLEEP({})'.format(high), session, i))
                tasks.append(task)

            else:  # Odd Number gets assigned Low Delay
                task = asyncio.ensure_future(
                    get_url_rtt(url + 'SLEEP({})'.format(low), session, i))
                tasks.append(task)

        # All RTTs from responses are in this variable
        responses = await asyncio.gather(*tasks)

    if sem != None: sem.release()

    return responses
예제 #22
0
 def http_session(self) -> ClientSession:
     session: Optional[ClientSession] = self.lookup.get("http_session")
     if not session:
         connector = TCPConnector(limit=0, ssl=False, ttl_dns_cache=300)
         session = ClientSession(connector=connector)
         self.lookup["http_session"] = session
     return session
예제 #23
0
파일: monitor.py 프로젝트: theanalyst/task
  async def multi_fetch(self):
    """A method that reuses a session to fire off multiple requests and
    wait for a specified time after to fire the next batch. The number of parallel requests is governed     by req_count & time to wait by sleep_time """
    reqs = []
    timeout = ClientTimeout(total=300)
    while True: 
      try:
        async with ClientSession(connector=TCPConnector(keepalive_timeout=600), timeout=timeout) as session:
          req_ctr = 0
          while True:
            start = time.perf_counter()
            for i in range(self.req_count):
              self.logger.debug('firing off req %d', req_ctr)
              reqs.append(asyncio.ensure_future(
                self.fetch(session)
              ))
              req_ctr = req_ctr+1

            resps = await asyncio.gather(*reqs)
            end = time.perf_counter()
            t = end -start
            self.logger.debug('%d reqs took %s s', self.req_count, t)
            await asyncio.sleep(self.sleep_time)
      except ClientConnectorError as e:
          self.logger.error('An exception has occurred with client: %s', e)

      self.logger.info('Waiting for 30s before retrying')
      time.sleep(30)
예제 #24
0
파일: request.py 프로젝트: ydssx/pyspider
 def __init__(
     self,
     url,
     method="GET",
     headers=None,
     params=None,
     data=None,
     proxy=None,
     session=None,
     timeout=5,
     logger=None,
     meta=None,
     callback=None,
 ) -> None:
     self.close_request_session = False
     if not session:
         self.session = ClientSession(connector=TCPConnector(ssl=False))
         self.close_request_session = True
     else:
         self.session = session
     self.timeout = timeout
     self.url = url
     self.method = method
     self.logger = logger or loguru.logger
     self.meta = meta
     self.callback = callback
     self.headers = headers
     self.params = params
     if isinstance(data, dict):
         self.data = ujson.dumps(data)
     else:
         self.data = data
     self.proxy = proxy
예제 #25
0
async def run(r):
    tasks = []
    url_count = 1
    # Fetch all responses within one Client session,
    # keep connection alive for all requests.
    # for my test 150 was the optimal number
    conn = TCPConnector(limit=300)
    async with ClientSession(connector=conn) as session:
        for i in r:
            task = asyncio.ensure_future(fetch(i, session, url_count))
            tasks.append(task)
            url_count += 1

        bob = []
        for f in tqdm(
                asyncio.as_completed(tasks),
                total=len(tasks),
                desc="Async Calls",
                unit=" request",
        ):
            bob.append(await f)

        responses = await asyncio.gather(*tasks)

        # you now have all response bodies in this variable
        # print_responses(responses)
        return responses
예제 #26
0
async def getimg(url, s, t):
    #下载图片
    global img
    titles = ''
    try:
        timeout = aiohttp.ClientTimeout(total=10)
        async with aiohttp.ClientSession(connector=TCPConnector(ssl=False),
                                         timeout=timeout) as session:
            async with await session.get(url) as reponse:
                html = await reponse.text()
        ##这里还是一定要有个去\n的地方
        memlist = re.findall(mem, html.replace("\n", ""))
        # print(memlist)
        # print(url)
        # print(html)
        for i in memlist:
            # print i
            imglist = re.findall(img, i)
            titles = re.findall(detail,
                                i.replace('&nbsp; ', '').replace('&nbsp;', ''))
            # print(imglist)
            # if imglist == ['https://img1.gamersky.com/image2020/11/20201105_ls_red_141_3/gamersky_027small_054_20201151826435.jpg']:
            #     print(memlist)
            #     print(url)
            ####
            for j in imglist:
                #下载图片
                tit = titles[0] if len(titles) > 0 else random.choice(
                    ["Make", "Love", "No", "War", "by", "John", "Lenon"])
                #去除字体,把斜杠换全角
                font = re.search(r'">(.*?)<', tit)
                tit = tit.replace('/', '/').replace(
                    '\\', '\') if font is None else font.group(1)
                # print(('./youmin/'+t+'/'+str(s)+"--"+tit+j[-4:]))
                async with aiofiles.open(
                    './youmin/' + t + '/' + str(s) + "--" + tit + j[-4:],
                    'wb') as f:
                    async with aiohttp.ClientSession(connector=TCPConnector(
                            ssl=False)) as session:
                        async with await session.get(j) as reponse:
                            page_text = await reponse.read()
                    await f.write(page_text)
    except Exception as ex:
        print(url)
        print(imglist)
        print("title=", titles)
        print("\n出现如下异常%s" % ex, '\n')
    async def get_access_token(self) -> str:
        """Retrieves CrowdStrike Falcon access token required for API requests.

        Runs for max retries of 3 attempts in case of API rate limit hit.

        Returns:
            str: The access token retrieved.

        Raises:
            RuntimeError: An error occurred (json.decoder.JSONDecodeError) trying to deserialize the API response.
        """
        token = None
        body = None
        max_retries = 3
        async with ClientSession(connector=TCPConnector(ssl=self.verify_ssl),
                                 headers=TOKEN_RETRIEVAL_HEADERS,
                                 trust_env=self.proxy) as session:
            for _ in range(max_retries):
                data = {
                    'client_id': self.client_id,
                    'client_secret': self.client_secret
                }
                async with session.post(f'{self.base_url}/oauth2/token',
                                        data=data) as res:
                    if res.status == TOO_MANY_REQUESTS_STATUS_CODE:
                        demisto.debug(
                            'Token retrieval requests status: rate limit exceeded, will retry in 5 seconds.'
                        )
                        await sleep(5)
                    elif res.status == CREATED_STATUS_CODE:
                        try:
                            body = await res.json()
                            break
                        except json.decoder.JSONDecodeError:
                            raise RuntimeError(
                                f'Failed to decode successful token retrieval response: {str(res.content)}'
                            )
                    else:
                        try:
                            body = await res.json()
                            error = body.get('errors', [{}])
                            error_message = error[0].get('message', '')
                            raise RuntimeError(
                                f'Failed to retrieve token, verify client details are correct: {error_message}'
                            )
                        except json.decoder.JSONDecodeError:
                            raise RuntimeError(
                                f'Failed to decode token retrieval failure response: {str(res.content)}'
                            )
            if not body:
                raise RuntimeError(
                    f'Failed to retrieve token - got empty response: {str(res.content)}'
                )
            token = body.get('access_token')
            self.expiry_time = body.get('expires_in',
                                        MINUTES_30) - TIME_BUFFER_1_MINUTE
        if not token:
            raise RuntimeError('Failed to retrieve token')
        return token
예제 #28
0
파일: bot.py 프로젝트: AtieP/seasonalbot
 def __init__(self, redis_session: RedisSession, **kwargs):
     super().__init__(**kwargs)
     self.http_session = ClientSession(connector=TCPConnector(
         resolver=AsyncResolver(), family=socket.AF_INET))
     self._guild_available = asyncio.Event()
     self.redis_session = redis_session
     self.loop.create_task(self.check_channels())
     self.loop.create_task(self.send_log(self.name, "Connected!"))
예제 #29
0
 def __init__(self, **kwargs):
     super().__init__(**kwargs)
     self.http_session = ClientSession(
         connector=TCPConnector(
             resolver=AsyncResolver(),
             family=socket.AF_INET,
         )
     )
예제 #30
0
파일: aerial.py 프로젝트: hackerswe/Aerial
 def __init__(self, details: dict):
     super().__init__(platform=fortnitepy.Platform.XBOX,
                      connector=TCPConnector(limit=None),
                      auth=fortnitepy.AdvancedAuth(**details),
                      default_party_config=fortnitepy.DefaultPartyConfig(
                          privacy=fortnitepy.PartyPrivacy.PUBLIC,
                          team_change_allowed=False,
                          chat_enabled=False))