Beispiel #1
0
def test_connector_timeout(loop):
    server = AIOServer()
    session = AioSession(loop=loop)
    config = AioConfig(max_pool_connections=1, connect_timeout=1,
                       retries={'max_attempts': 0})
    s3_client = session.create_client('s3', config=config,
                                      endpoint_url=server.endpoint_url,
                                      aws_secret_access_key='xxx',
                                      aws_access_key_id='xxx')

    try:
        server.wait_until_up()

        @asyncio.coroutine
        def get_and_wait():
            yield from s3_client.get_object(Bucket='foo', Key='bar')
            yield from asyncio.sleep(100)

        # this should not raise as we won't have any issues connecting to the
        task1 = asyncio.Task(get_and_wait(), loop=loop)
        task2 = asyncio.Task(get_and_wait(), loop=loop)

        try:
            done, pending = yield from asyncio.wait([task1, task2],
                                                    timeout=3, loop=loop)

            # second request should not timeout just because there isn't a
            # connector available
            assert len(pending) == 2
        finally:
            task1.cancel()
            task2.cancel()
    finally:
        s3_client.close()
        yield from server.stop()
Beispiel #2
0
async def test_connector_timeout(event_loop):
    session = AioSession(loop=event_loop)
    config = AioConfig(max_pool_connections=1, connect_timeout=1,
                       retries={'max_attempts': 0})
    async with AIOServer() as server, \
            session.create_client('s3', config=config,
                                  endpoint_url=server.endpoint_url,
                                  aws_secret_access_key='xxx',
                                  aws_access_key_id='xxx') as s3_client:

        async def get_and_wait():
            await s3_client.get_object(Bucket='foo', Key='bar')
            await asyncio.sleep(100)

        task1 = asyncio.Task(get_and_wait(), loop=event_loop)
        task2 = asyncio.Task(get_and_wait(), loop=event_loop)

        try:
            done, pending = await asyncio.wait([task1, task2],
                                               timeout=3, loop=event_loop)

            # second request should not timeout just because there isn't a
            # connector available
            assert len(pending) == 2
        finally:
            task1.cancel()
            task2.cancel()
Beispiel #3
0
async def async_get_service(hass, config, discovery_info=None):
    """Get the AWS notification service."""
    if discovery_info is None:
        _LOGGER.error("Please config aws notify platform in aws component")
        return None

    session = None

    conf = discovery_info

    service = conf[CONF_SERVICE]
    region_name = conf[CONF_REGION]

    available_regions = await get_available_regions(hass, service)
    if region_name not in available_regions:
        _LOGGER.error(
            "Region %s is not available for %s service, must in %s",
            region_name,
            service,
            available_regions,
        )
        return None

    aws_config = conf.copy()

    del aws_config[CONF_SERVICE]
    del aws_config[CONF_REGION]
    if CONF_PLATFORM in aws_config:
        del aws_config[CONF_PLATFORM]
    if CONF_NAME in aws_config:
        del aws_config[CONF_NAME]
    if CONF_CONTEXT in aws_config:
        del aws_config[CONF_CONTEXT]

    if not aws_config:
        # no platform config, use the first aws component credential instead
        if hass.data[DATA_SESSIONS]:
            session = next(iter(hass.data[DATA_SESSIONS].values()))
        else:
            _LOGGER.error("Missing aws credential for %s", config[CONF_NAME])
            return None

    if session is None:
        credential_name = aws_config.get(CONF_CREDENTIAL_NAME)
        if credential_name is not None:
            session = hass.data[DATA_SESSIONS].get(credential_name)
            if session is None:
                _LOGGER.warning("No available aws session for %s", credential_name)
            del aws_config[CONF_CREDENTIAL_NAME]

    if session is None:
        if (profile := aws_config.get(CONF_PROFILE_NAME)) is not None:
            session = AioSession(profile=profile)
            del aws_config[CONF_PROFILE_NAME]
        else:
            session = AioSession()
Beispiel #4
0
async def test_connector_timeout2(event_loop):
    session = AioSession(loop=event_loop)
    config = AioConfig(max_pool_connections=1, connect_timeout=1,
                       read_timeout=1, retries={'max_attempts': 0})
    async with AIOServer() as server, \
            session.create_client('s3', config=config,
                                  endpoint_url=server.endpoint_url,
                                  aws_secret_access_key='xxx',
                                  aws_access_key_id='xxx') as s3_client:

        with pytest.raises(asyncio.TimeoutError):
            resp = await s3_client.get_object(Bucket='foo', Key='bar')
            await resp["Body"].read()
Beispiel #5
0
async def test_retry(session: AioSession, caplog):
    caplog.set_level(logging.DEBUG)

    config = AioConfig(
        connect_timeout=1,
        read_timeout=1,

        # this goes through a slightly different codepath than regular retries
        retries={
            "mode": "standard",
            "total_max_attempts": 3,
        },
    )

    async with session.create_client(
            's3',
            config=config,
            aws_secret_access_key="xxx",
            aws_access_key_id="xxx",
            endpoint_url='http://localhost:7878') as client:

        with pytest.raises(EndpointConnectionError):
            await client.get_object(Bucket='foo', Key='bar')

        assert 'sleeping for' in caplog.text
async def test_session_credentials():
    with mock.patch('aiobotocore.credentials.AioCredential'
                    'Resolver.load_credentials') as mock_obj:
        mock_obj.return_value = 'somecreds'

        session = AioSession()
        creds = await session.get_credentials()
        assert creds == 'somecreds'
async def _validate_aws_credentials(hass, credential):
    """Validate AWS credential config."""
    aws_config = credential.copy()
    del aws_config[CONF_NAME]
    del aws_config[CONF_VALIDATE]

    if (profile := aws_config.get(CONF_PROFILE_NAME)) is not None:
        session = AioSession(profile=profile)
        del aws_config[CONF_PROFILE_NAME]
        if CONF_ACCESS_KEY_ID in aws_config:
            del aws_config[CONF_ACCESS_KEY_ID]
        if CONF_SECRET_ACCESS_KEY in aws_config:
            del aws_config[CONF_SECRET_ACCESS_KEY]
Beispiel #8
0
    def __init__(
        self,
        stream_name,
        session=None,
        endpoint_url=None,
        region_name=None,
        retry_limit=None,
        expo_backoff=None,
        expo_backoff_limit=120,
        skip_describe_stream=False,
        create_stream=False,
        create_stream_shards=1,
    ):

        self.stream_name = stream_name

        if session:
            assert isinstance(session, AioSession)
            self.session = session
        else:
            self.session = AioSession()

        self.endpoint_url = endpoint_url
        self.region_name = region_name

        self.client = None
        self.shards = None

        self.stream_status = None

        self.retry_limit = retry_limit
        self.expo_backoff = expo_backoff
        self.expo_backoff_limit = expo_backoff_limit

        # connection states of kinesis client
        self.RECONNECT = "RECONNECT"
        self.ACTIVE = "ACTIVE"
        self.INITIALIZE = "INITIALIZE"

        self.stream_status = self.INITIALIZE
        # Short Lived producer might want to skip describing stream on startup
        self.skip_describe_stream = skip_describe_stream
        self._conn_lock = asyncio.Lock()
        self._reconnect_timeout = time.monotonic()
        self.create_stream = create_stream
        self.create_stream_shards = create_stream_shards
Beispiel #9
0
async def get_available_regions(hass, service):
    """Get available regions for a service."""
    session = AioSession()
    return await session.get_available_regions(service)
    # have to use discovery to load platform.
    for notify_config in conf[CONF_NOTIFY]:
        hass.async_create_task(
            discovery.async_load_platform(hass, Platform.NOTIFY, DOMAIN,
                                          notify_config, config))

    return validation


async def _validate_aws_credentials(hass, credential):
    """Validate AWS credential config."""
    aws_config = credential.copy()
    del aws_config[CONF_NAME]
    del aws_config[CONF_VALIDATE]

    if (profile := aws_config.get(CONF_PROFILE_NAME)) is not None:
        session = AioSession(profile=profile)
        del aws_config[CONF_PROFILE_NAME]
        if CONF_ACCESS_KEY_ID in aws_config:
            del aws_config[CONF_ACCESS_KEY_ID]
        if CONF_SECRET_ACCESS_KEY in aws_config:
            del aws_config[CONF_SECRET_ACCESS_KEY]
    else:
        session = AioSession()

    if credential[CONF_VALIDATE]:
        async with session.create_client("iam", **aws_config) as client:
            await client.get_user()

    return session