async def main():
    """
    Configure Retries - for more info see https://github.com/inyutin/aiohttp_retry
    """
    retry_options = ExponentialRetry(attempts=3)
    async with InfluxDBClientAsync(
            url="http://localhost:8086",
            token="my-token",
            org="my-org",
            client_session_type=RetryClient,
            client_session_kwargs={"retry_options": retry_options}) as client:
        """
        Write data:
        """
        print(f"\n------- Written data: -------\n")
        write_api = client.write_api()
        _point1 = Point("async_m").tag("location",
                                       "Prague").field("temperature", 25.3)
        _point2 = Point("async_m").tag("location",
                                       "New York").field("temperature", 24.3)
        successfully = await write_api.write(bucket="my-bucket",
                                             record=[_point1, _point2])
        print(f" > successfully: {successfully}")
        """
        Query: Stream of FluxRecords
        """
        print(f"\n------- Query: Stream of FluxRecords -------\n")
        query_api = client.query_api()
        records = await query_api.query_stream(
            'from(bucket:"my-bucket") '
            '|> range(start: -10m) '
            '|> filter(fn: (r) => r["_measurement"] == "async_m")')
        async for record in records:
            print(record)
 def get_client_session(self, config: SeekerConfig) -> ClientSession:
     async def _on_request_start(
         session: ClientSession,
         trace_config_ctx: SimpleNamespace,
         params: TraceRequestStartParams
     ) -> None:
         current_attempt = \
             trace_config_ctx.trace_request_ctx['current_attempt']
         if(current_attempt > 1):
             logger.warning(
                 f'::warn ::Retry Attempt #{current_attempt} ' +
                 f'of {config.max_tries}: {params.url}')
     trace_config = TraceConfig()
     trace_config.on_request_start.append(_on_request_start)
     limit_per_host = max(0, config.connect_limit_per_host)
     connector = TCPConnector(
         limit_per_host=limit_per_host,
         ttl_dns_cache=600  # 10-minute DNS cache
     )
     retry_options = ExponentialRetry(
                         attempts=config.max_tries,
                         max_timeout=config.max_time,
                         exceptions=[
                             aiohttp.ClientError,
                             asyncio.TimeoutError
                         ])
     return RetryClient(
             raise_for_status=True,
             connector=connector,
             timeout=ClientTimeout(total=config.timeout),
             headers={'User-Agent': config.agent},
             retry_options=retry_options,
             trace_configs=[trace_config])
Exemplo n.º 3
0
async def _download_from_asyncgen(
    items: AsyncGenerator,
    params: DownloadParams,
    tcp_connections: int = 64,
    nb_workers: int = 64,
    batch_size: int = 16,
    retries: int = 1,
    logger: logging.Logger = None,
):
    """Asynchronous downloader that takes an interable and downloads it

    Args:
        items (Union[Generator, AsyncGenerator]): (async/sync) generator that yiels a standardized dict of urls
        params (DownloadParams): Download parameter dict
        tcp_connections (int, optional): Maximum number of concurrent TCP connections. Defaults to 128.
        nb_workers (int, optional): Maximum number of workers. Defaults to 64.
        batch_size (int, optional): Maximum queue batch size. Defaults to 16.
        retries (int, optional): Maximum number of attempts. Defaults to 1.
        logger (logging.Logger, optional): Logger object. Defaults to None.
    Raises:
        NotImplementedError: If generator turns out to be invalid.
    """

    queue = asyncio.Queue(nb_workers)
    progressbar = tqdm(smoothing=0,
                       unit=" Downloads",
                       disable=logger.getEffectiveLevel() > logging.INFO)
    stats = {"failed": 0, "skipped": 0, "success": 0}

    retry_options = ExponentialRetry(attempts=retries)

    async with RetryClient(
            connector=aiohttp.TCPConnector(limit=tcp_connections),
            raise_for_status=True,
            retry_options=retry_options,
            trust_env=True,
    ) as session:

        loop = asyncio.get_event_loop()
        workers = [
            loop.create_task(
                _download_queue(queue,
                                session,
                                stats,
                                params=params,
                                progressbar=progressbar,
                                logger=logger)) for _ in range(nb_workers)
        ]

        # get chunks from async generator and add to async queue
        async with aiostream.stream.chunks(items, batch_size).stream() as chnk:
            async for batch in chnk:
                await queue.put(batch)

        await queue.join()

    for w in workers:
        w.cancel()

    return stats
Exemplo n.º 4
0
async def generate_link() -> Optional[str]:
    settings = Settings()
    assert settings.SPELLTABLE_AUTH_KEY

    headers = {
        "user-agent": f"spellbot/{__version__}",
        "key": settings.SPELLTABLE_AUTH_KEY,
    }

    data: Optional[dict[str, Any]] = None
    raw_data: Optional[bytes] = None
    try:
        async with RetryClient(
                raise_for_status=False,
                retry_options=ExponentialRetry(attempts=5),
        ) as client:
            async with client.post(settings.SPELLTABLE_CREATE,
                                   headers=headers) as resp:
                # Rather than use `resp.json()`, which respects minetype, let's just
                # grab the data and try to decode it ourselves.
                # https://github.com/inyutin/aiohttp_retry/issues/55
                raw_data = await resp.read()
                data = json.loads(raw_data)
                if not data or "gameUrl" not in data:
                    logger.warning(
                        "warning: gameUrl missing from SpellTable API response (%s): %s",
                        resp.status,
                        data,
                    )
                    return None
                assert data is not None
                returned_url = str(data["gameUrl"])
                wizards_url = returned_url.replace(
                    "www.spelltable.com",
                    "spelltable.wizards.com",
                )
                return wizards_url
    except ClientError as ex:
        add_span_error(ex)
        logger.warning(
            "warning: SpellTable API failure: %s, data: %s, raw: %s",
            ex,
            data,
            raw_data,
            exc_info=True,
        )
        return None
    except Exception as ex:
        add_span_error(ex)
        logger.error(
            "error: unexpected exception: %s, data: %s, raw: %s",
            ex,
            data,
            raw_data,
            exc_info=True,
        )
        return None
Exemplo n.º 5
0
async def test_override_options(aiohttp_client, loop):
    test_app = App()
    app = test_app.get_app()

    client = await aiohttp_client(app)
    retry_options = ExponentialRetry(attempts=1)
    retry_client = RetryClient(retry_options=retry_options)
    retry_client._client = client

    retry_options = ExponentialRetry(attempts=5)
    async with retry_client.get('/sometimes_error', retry_options) as response:
        text = await response.text()
        assert response.status == 200
        assert text == 'Ok!'

        assert test_app.counter == 3

    await retry_client.close()
    await client.close()
Exemplo n.º 6
0
async def test_not_found_error(aiohttp_client, loop):
    test_app = App()
    app = test_app.get_app()

    client = await aiohttp_client(app)
    retry_client = RetryClient()
    retry_client._client = client

    retry_options = ExponentialRetry(attempts=5, statuses={404})
    async with retry_client.get('/not_found_error', retry_options) as response:
        assert response.status == 404
        assert test_app.counter == 5

    await retry_client.close()
    await client.close()
Exemplo n.º 7
0
    async def fetch_elos(self, steam_ids: list[SteamId], *, headers: Optional[dict[str, str]] = None):
        if len(steam_ids) == 0:
            return None

        formatted_steam_ids = "+".join([str(steam_id) for steam_id in steam_ids])
        request_url = f"{self.url_base}{self.balance_api}/{formatted_steam_ids}"
        retry_options = ExponentialRetry(attempts=3, factor=0.1,
                                         statuses={500, 502, 504},
                                         exceptions={aiohttp.ClientResponseError, aiohttp.ClientPayloadError})
        async with RetryClient(raise_for_status=False, retry_options=retry_options,
                               timeout=ClientTimeout(total=5, connect=3, sock_connect=3, sock_read=5)) as retry_client:
            async with retry_client.get(request_url, headers=headers) as result:
                if result.status != 200:
                    return None
                return await result.json()
Exemplo n.º 8
0
async def test_sometimes_error_with_raise_for_status(aiohttp_client, loop):
    test_app = App()
    app = test_app.get_app()

    client = await aiohttp_client(app, raise_for_status=True)
    retry_client = RetryClient()
    retry_client._client = client

    retry_options = ExponentialRetry(attempts=5, exceptions={ClientResponseError})
    async with retry_client.get('/sometimes_error', retry_options) \
            as response:
        text = await response.text()
        assert response.status == 200
        assert text == 'Ok!'

        assert test_app.counter == 3

    await retry_client.close()
    await client.close()
Exemplo n.º 9
0
    async def _get_rate_limited(self, full_url, **kwargs):
        """Query API, respecting 60 requests per minute rate limit."""
        LOG.info('_get_rate_limited("%s", %s)', full_url, repr(kwargs))
        async with self.api_v1_limiter:
            # i.e. wait 0.1s, 0.2s, 0.4s, 0.8s, and finally give up
            retry_options = ExponentialRetry(
                attempts=4,
                exceptions=RETRY_EXCEPTIONS,
            )
            try:
                async with self.session.get(
                        full_url, params=kwargs,
                        retry_options=retry_options) as response:
                    if response.status == 200:
                        return await response.json()
                    else:
                        try:
                            json = await response.json()
                            msg = f"{json.get('error')} ({json.get('status')})"
                        except ContentTypeError:
                            data = await response.text()
                            document = BeautifulSoup(data, "html.parser")
                            # Only use the body, if present
                            if document.body:
                                text = document.body.find().text
                            else:
                                text = document
                            # Treat as much as we can as markdown
                            markdown = html2markdown.convert(text)
                            # Punt the rest back to bs4 to drop unhandled tags
                            msg = BeautifulSoup(markdown, "html.parser").text
                        lookup_failed_msg = f"Lookup failed: {msg}"
                        LOG.error(lookup_failed_msg)
                        raise LookupError(lookup_failed_msg)
            except Exception as e:  # pylint: disable=broad-except,invalid-name
                if any(isinstance(e, exc) for exc in retry_options.exceptions):
                    attempts = retry_options.attempts
                    msg = f"iNat not responding after {attempts} attempts. Please try again later."
                    LOG.error(msg)
                    raise LookupError(msg) from e
                raise e

        return None
Exemplo n.º 10
0
 async def create(cls,
                  epsagon_token,
                  retry_attempts=DEFAULT_RETRY_ATTEMPTS):
     """
     Creates a new EpsagonClient instance
     :param epsagon_token: used for authorization
     """
     self = cls()
     if not epsagon_token:
         raise ValueError("Epsagon token must be given")
     self.epsagon_token = epsagon_token
     retry_options = ExponentialRetry(attempts=retry_attempts,
                                      exceptions=(ClientError, ))
     self.client = RetryClient(auth=BasicAuth(login=self.epsagon_token),
                               headers={
                                   "Content-Type": "application/json",
                               },
                               retry_options=retry_options,
                               raise_for_status=True)
     return self
Exemplo n.º 11
0
    async def get_client(self, **kwargs):
        import aiohttp
        from aiohttp_retry import ExponentialRetry, RetryClient

        kwargs["retry_options"] = ExponentialRetry(
            attempts=self.SESSION_RETRIES,
            factor=self.SESSION_BACKOFF_FACTOR,
            max_timeout=self.REQUEST_TIMEOUT,
        )

        # The default timeout for the aiohttp is 300 seconds
        # which is too low for DVC's interactions (especially
        # on the read) when dealing with large data blobs. We
        # unlimit the total time to read, and only limit the
        # time that is spent when connecting to the remote server.
        kwargs["timeout"] = aiohttp.ClientTimeout(
            total=None,
            connect=self.REQUEST_TIMEOUT,
            sock_connect=self.REQUEST_TIMEOUT,
            sock_read=None,
        )

        return RetryClient(**kwargs)
Exemplo n.º 12
0
    'Accept-Encoding': 'gzip, deflate, br',
}
FEED_ACCEPT: Final = 'application/rss+xml, application/rdf+xml, application/atom+xml, ' \
                     'application/xml;q=0.9, text/xml;q=0.8, text/*;q=0.7, application/*;q=0.6'

EXCEPTIONS_SHOULD_RETRY: Final = (
    asyncio.TimeoutError,
    # aiohttp.ClientPayloadError,
    # aiohttp.ClientResponseError,
    # aiohttp.ClientConnectionError,
    aiohttp.ServerConnectionError,
    TimeoutError,
    ConnectionError)

RETRY_OPTION: Final = ExponentialRetry(attempts=2,
                                       start_timeout=1,
                                       exceptions=set(EXCEPTIONS_SHOULD_RETRY))

PIL.ImageFile.LOAD_TRUNCATED_IMAGES = True

logger = log.getLogger('RSStT.web')

_feedparser_thread_pool = ThreadPoolExecutor(1, 'feedparser_')

contentDispositionFilenameParser = partial(
    re.compile(r'(?<=filename=").+?(?=")').search, flags=re.I)


class WebError(Exception):
    def __init__(self,
                 error_name: str,
Exemplo n.º 13
0
 def __init__(self, api_base_url=__API_URL_BASE):
     self.api_base_url = api_base_url
     self.request_timeout = 120
     self.retry_options = ExponentialRetry(attempts=5,
                                           statuses=[502, 503, 504])
Exemplo n.º 14
0
def test_retry_timeout_exponential_backoff():
    retry = ExponentialRetry(attempts=10)
    timeouts = [retry.get_timeout(x) for x in range(10)]
    assert timeouts == [0.1, 0.2, 0.4, 0.8, 1.6, 3.2, 6.4, 12.8, 25.6, 30.0]