Exemplo n.º 1
0
async def aio_sts_assume_role(src_role_arn, dest_role_arn, dest_external_id=None):
    session_name = ''.join(random.choice('0123456789ABCDEF') for i in range(16))
    async with aioboto3.client('sts') as sts:
        src_role = await sts.assume_role(
            RoleArn=src_role_arn, RoleSessionName=session_name
        )
        async with aioboto3.Session(
            aws_access_key_id=src_role['Credentials']['AccessKeyId'],
            aws_secret_access_key=src_role['Credentials']['SecretAccessKey'],
            aws_session_token=src_role['Credentials']['SessionToken'],
        ).client('sts') as sts_client:
            sts_role = await (
                sts_client.assume_role(
                    RoleArn=dest_role_arn,
                    RoleSessionName=session_name,
                    ExternalId=dest_external_id,
                )
                if dest_external_id
                else sts_client.assume_role(
                    RoleArn=dest_role_arn, RoleSessionName=session_name
                )
            )

            return aioboto3.Session(
                aws_access_key_id=sts_role['Credentials']['AccessKeyId'],
                aws_secret_access_key=sts_role['Credentials']['SecretAccessKey'],
                aws_session_token=sts_role['Credentials']['SessionToken'],
            )
Exemplo n.º 2
0
async def test_getting_resource_cm(event_loop):
    """Simple getting of resource."""
    session = aioboto3.Session()

    async with session.resource('dynamodb',
                                region_name='eu-central-1') as resource:
        assert isinstance(resource.meta.client, AioBaseClient)
async def _fetch_data_via_aioboto(
    r_clone_settings: RCloneSettings,
    dir_tag: str,
    temp_dir: Path,
    node_id: NodeID,
    project_id: ProjectID,
) -> Path:
    save_to = temp_dir / f"aioboto_{dir_tag}_{uuid4()}"
    save_to.mkdir(parents=True, exist_ok=True)

    session = aioboto3.Session(
        aws_access_key_id=r_clone_settings.S3_ACCESS_KEY,
        aws_secret_access_key=r_clone_settings.S3_SECRET_KEY,
    )
    async with session.resource("s3",
                                endpoint_url=r_clone_settings.endpoint) as s3:
        bucket = await s3.Bucket(r_clone_settings.S3_BUCKET_NAME)
        async for s3_object in bucket.objects.all():
            key_path = f"{project_id}/{node_id}/{DY_SERVICES_R_CLONE_DIR_NAME}/"
            if s3_object.key.startswith(key_path):
                file_object = await s3_object.get()
                file_path = save_to / s3_object.key.replace(key_path, "")
                print(f"Saving file to {file_path}")
                file_content = await file_object["Body"].read()
                file_path.write_bytes(file_content)

    return save_to
Exemplo n.º 4
0
    async def establish_client_resource(self, service_dict: Dict[str, Any], item: str, region: str='', reestablish: bool=False):
        """Establish the AioSession client or resource, then re-establish every
        self.sleep_interval seconds.

        Args:
            service_dict (Dict[str, Any]): dict containing info about the service requested
            item (str): either 'client' or 'resource' depending on the aws service and python package
            region (str, optional): AWS region. Defaults to ''.
            reestablish (bool, optional): should async context manager be reinstantiated. Defaults to False.
        """

        kwargs = {'service_name': self.service, 'verify': False}
        if region:
            kwargs['region_name'] = region

        if reestablish:
            service_dict['busy'] = True
            await service_dict['obj'].__aexit__(None, None, None)

        if self.module == 'aiobotocore':
            service_dict['session'] = aiobotocore.session.get_session()
            service_dict['obj'] = await service_dict['session'].create_client(**kwargs).__aenter__()
        elif self.module == 'aioboto3':
            service_dict['session'] = aioboto3.Session()
            func = service_dict['session'].client if item == 'client' else service_dict['session'].resource
            service_dict['obj'] = await func(**kwargs).__aenter__()

        service_dict['busy'] = False
Exemplo n.º 5
0
    def __init__(
        self,
        bucket: str,
        s3_path: str,
        region_name=None,
        endpoint_url=None,
        aws_access_key_id=None,
        aws_secret_access_key=None,
        aws_session_token=None,
        config=None,
    ):
        if not isinstance(bucket, str):
            raise ValueError("bucket_name must be str")
        if not isinstance(s3_path, str):
            raise ValueError("s3_path must be str")

        self._bucket = bucket
        self._s3_path = s3_path
        self._s3_path.rstrip("/")
        if len(self._s3_path) == 0:
            raise ValueError(f"s3 path {self._s3_path} invalid")
        self._session = aioboto3.Session(
            region_name=region_name,
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key,
            aws_session_token=aws_session_token,
        )
        self._region_name = region_name
        self._endpoint_url = endpoint_url
        self._aws_access_key_id = aws_access_key_id
        self._aws_secret_access_key = aws_secret_access_key
        self._aws_session_token = aws_session_token
        self._config = config
Exemplo n.º 6
0
async def main():
    session = aioboto3.Session()
    ou = config.hub.ou
    role = config.hub.iam_role

    results = {}

    async for context in run.account_iterator(session, ou, role):
        acct_sess = context['session']

        async with acct_sess.client('securityhub') as sechub:
            try:
                await sechub.describe_hub()
                hub_enabled = True
            except sechub.exceptions.InvalidAccessException:
                hub_enabled = False
            log(context, results, 'Security Hub enabled', hub_enabled)

        async with acct_sess.client('config') as confserv:
            response = await confserv.describe_config_rules()
            count = len(response['ConfigRules'])
            log(context, results, 'Has config rules', count > 0)

            response = await confserv.describe_configuration_aggregators()
            count = len(response['ConfigurationAggregators'])
            log(context, results, 'Has config aggregators', count > 0)

    for account, info in sorted(results.items(), key=lambda x: x[0]):
        for criteria, status in info.items():
            print(f'{account}: {criteria}: {status}')
Exemplo n.º 7
0
async def test_kms_crypto_context_success(event_loop, moto_patch, region,
                                          bucket_name, kms_key_alias):
    session = aioboto3.Session()

    async with session.client('kms', region_name=region) as kms_client:
        resp = await kms_client.create_key(KeyUsage='ENCRYPT_DECRYPT',
                                           Origin='AWS_KMS')
        key_id = resp['KeyMetadata']['KeyId']

        await kms_client.create_alias(AliasName=kms_key_alias,
                                      TargetKeyId=key_id)

        # Create context
        kms_context = cse.KMSCryptoContext(
            kms_key_alias, kms_client_args={'region_name': region})
        assert kms_context.kms_key == kms_key_alias

        await kms_context.setup()
        assert kms_context._kms_client is not None

        aes_key, material_description, encrypted_aes_key = await kms_context.get_encryption_aes_key(
        )

        # Material description should denote what key is used
        assert material_description['kms_cmk_id'] == kms_key_alias

        resp = await kms_client.decrypt(CiphertextBlob=encrypted_aes_key,
                                        EncryptionContext=material_description)
        assert aes_key == resp['Plaintext']

        await kms_context.close()
Exemplo n.º 8
0
 async def GetObjects(self, stream: Stream):
     request = await stream.recv_message()
     session = aioboto3.Session()
     async with session.resource('s3') as s3resource:
         bucket = await s3resource.Bucket(request.bucket)
         async for obj in bucket.objects.all():
             etag = await obj.e_tag
             await stream.send_message(
                 AwsAPI_pb2.ObjectReply(name=obj.key, etag=etag))
Exemplo n.º 9
0
def test_chalice_async_http(moto_patch, region, bucket_name):
    session = aioboto3.Session()

    app.aioboto3 = session

    with Client(app) as client:
        response = client.http.get('/hello/myname')
        assert response.status_code == 200
        assert response.json_body['hello'] == 'myname'
Exemplo n.º 10
0
async def test_client_retries_disabled():
    """Verify that created S3 clients have retries disabled in config."""

    async with s3_client("test-profile-123"):
        # It should have obtained the client via a session,
        # passing a config object with retries disabled
        client_call = aioboto3.Session().client.mock_calls[0]
        config = client_call.kwargs["config"]
        assert config.retries == {"max_attempts": 1}
Exemplo n.º 11
0
 async def GetObjects(self, request, context):
     session = aioboto3.Session()
     async with session.resource('s3',
                                 region_name=request.region) as s3resource:
         bucket = await s3resource.Bucket(request.bucket)
         async for obj in bucket.objects.limit(50):
             etag = await obj.e_tag
             yield AwsAPI_pb2.ObjectReply(name=obj.key, etag=etag)
         print('request done')
Exemplo n.º 12
0
async def test_asymmetric_cse_encrypt_decrypt_aes_cbc(event_loop, moto_patch,
                                                      region, bucket_name,
                                                      s3_key_name):
    session = aioboto3.Session()

    async with session.client('s3', region_name=region) as s3_client:
        await s3_client.create_bucket(
            Bucket=bucket_name,
            CreateBucketConfiguration={'LocationConstraint': region})

        private_key = b'0\x82\x02w\x02\x01\x000\r\x06\t*\x86H\x86\xf7\r\x01\x01\x01\x05\x00\x04\x82\x02a0\x82\x02]\x02\x01\x00\x02\x81\x81\x00\xbb x \x88x\xa6\x1b\x94\r\x93\x82\x9bU4j\x90//4\x97\xfd\x0c\xdf\xd3\x10\xab}\x99\x19\xe4\xfe\xf1=\x8aM\xca\x06\xa6\xf3\xa5\xce8\x19Q\xcc\x12\x1a\xc2\xc4\xd9w\xeex\xf6\xbc\x1f\xb2u\xb3Z\x0e!fsLJ>\x7fi\xdcc\xb9:\xee2\xf8h5h\x1f\x96\xab\xa4\xfc\x02\x12=D\xde\xde}i~\xe8\xe1y\x16\xc0\xe1\xeb\xca\x16\xbde@+\x00\x9e\xbf\x12\xe7\x0c\xa7#\x88\x80\xa04\xe2M\xc2\x1f\xc2\x8a\xfc\x08M\x02\x03\x01\x00\x01\x02\x81\x81\x00\x92\x1d\x0fO\xaf\xe0-+\xd9\x96$9VZ\xd8\x9b\xe0\xcb\xc7\x1bU\x16UH,\x01\x976r&\xa3\x05b\x8f?\xff\xef\xa0\xf4\x19\xc9\xbc\xd5W\x07\xe4\xc5\xba9\x9d\x05\x85\xbd"\x9c\xdeV\r\xbe\x13\xf6\\\x94<\x99\xa0/\xa8\x8f\xd8\x14\xa3\x88\x88\x1b\xdf\xee\xbb\xaf\xcd\xc7k{\xb2\x9e\x90B\x05)\x7f\xedo\x95\xb9[\xf4\x8fQ\xc0\xee\xd0\xc9\xb9\x1e\xbfP\xe7\x8c\x87\xab\x87\n\xfd\xcb\x04\xe5\x9bEv\x0f)8\x94R;\xf8B\xc1\x02A\x00\xe8D\x96\xdd\x1f\xd4\xd1\xbc\xd2p\xd0\x11\x99pkp\xa9\xb5\xdd:\xa7\xdfn\xd6%\x82\xaeK\xb20\xd2\x03\xf2\r\x06\x1as\xc3_\x95\xf3\xab`>\xaa\x1c\xc1\x19]\xa3\xf2]Q+\xf9\xebi\x9feQ\xd6\xf4\xe3\x11\x02A\x00\xce? \xe6=\xad\x14\xf5\x96PY\xf8\xc1\xaa\xb8y\x9f{\xd8\xf4\x94\x8b}\x9c\\\xec\x10\x7f\xfbD"\xbbd\xa3g\x85\xbd\x97\x18\xd7\xde\x99\xb7\x1dw\xbfwb\xbb\xaa\x01\xaf~\x8aW K\xed;{\xf6t\x99}\x02A\x00\x9b\x13\xf8\x9a\x89?B\x0eM\x7fo\x1c\xe1\x12\xd3Yt\xa6m\xa0U\'tL\\\xdd$\xdc{\x8b\xe7\x1d%F\x96\xd5\xa0\x87H\xd1\xc8\xd0\x9a\xc1\x1c9x\xa0$\nk\xae\xec\x9cm\x10F\x04[\xd4\xc9\xad\xd5\xd1\x02@I\xf9V\x81~I\xa0$\xdd\xbf\x00&:\xc0R\xde<\x97\x9d\x1fLP#\xc3{\x88\xa7\xfa_R\xf6\xea#\x94\x80B\xf5\xd7E\xef\xd7Ef\xeaH\xd3\x01\xad\x06\x06Z\x08i\xe8\x90\x8bb\xf09\xcf\xa2{\xfb\xb9\x02@D\xbaAV\x03\x94,\xc7\xf3/\xbd\xf3I\xc2\x0fAI\xcd\x9e\xa1\xce\xdf\xa7\x19S\x86\xf3\xc2\x854]\xac\xab\xc8\x8f@\x03_-?{>\x1f\xcc\x1a@\xdb\n\xf0v5\xe4tL\xf3\x16kD\xb5\x83L(3\xd2'
        public_key = b'0\x81\x9f0\r\x06\t*\x86H\x86\xf7\r\x01\x01\x01\x05\x00\x03\x81\x8d\x000\x81\x89\x02\x81\x81\x00\xbb x \x88x\xa6\x1b\x94\r\x93\x82\x9bU4j\x90//4\x97\xfd\x0c\xdf\xd3\x10\xab}\x99\x19\xe4\xfe\xf1=\x8aM\xca\x06\xa6\xf3\xa5\xce8\x19Q\xcc\x12\x1a\xc2\xc4\xd9w\xeex\xf6\xbc\x1f\xb2u\xb3Z\x0e!fsLJ>\x7fi\xdcc\xb9:\xee2\xf8h5h\x1f\x96\xab\xa4\xfc\x02\x12=D\xde\xde}i~\xe8\xe1y\x16\xc0\xe1\xeb\xca\x16\xbde@+\x00\x9e\xbf\x12\xe7\x0c\xa7#\x88\x80\xa04\xe2M\xc2\x1f\xc2\x8a\xfc\x08M\x02\x03\x01\x00\x01'

        private_key = cse.AsymmetricCryptoContext.from_der_private_key(
            private_key)
        public_key = cse.AsymmetricCryptoContext.from_der_public_key(
            public_key)

        symmetric_crypto_context = cse.AsymmetricCryptoContext(
            public_key=public_key, private_key=private_key)
        s3_cse = cse.S3CSE(symmetric_crypto_context,
                           s3_client_args={'region_name': region})

        async with s3_cse:
            # Upload file
            await s3_cse.put_object(Body=DATA,
                                    Bucket=bucket_name,
                                    Key=s3_key_name)

            encrypted_resp = await s3_client.get_object(Bucket=bucket_name,
                                                        Key=s3_key_name)
            encrypted_resp['Body'] = await encrypted_resp['Body'].read()

            # Check it doesnt start with lorem ipsum
            assert not encrypted_resp['Body'].startswith(DATA[:10])

            # Check metadata for KMS encryption
            assert len(
                base64.b64decode(encrypted_resp['Metadata']
                                 ['x-amz-key'])) == 128  # 1024bit key
            assert encrypted_resp['Metadata'][
                'x-amz-unencrypted-content-length'] == str(len(DATA))
            assert encrypted_resp['Metadata']['x-amz-matdesc'] == '{}'

            assert 'x-amz-iv' in encrypted_resp['Metadata']
            assert 'x-amz-cek-alg' not in encrypted_resp['Metadata']
            assert 'x-amz-key-v2' not in encrypted_resp['Metadata']
            assert 'x-amz-wrap-alg' not in encrypted_resp['Metadata']
            assert 'x-amz-tag-len' not in encrypted_resp['Metadata']

            # This is a quick test to ensure decryption works, and resp['Body'] looks like an aiohttp obj
            unencrypted_resp = await s3_cse.get_object(Bucket=bucket_name,
                                                       Key=s3_key_name)
            unencrypted_resp['Body'] = await unencrypted_resp['Body'].read()

            assert unencrypted_resp['Body'] == DATA
Exemplo n.º 13
0
def s3_client(profile: str):
    # Certain aspects of the boto client can be tweaked by environment variables
    # for development.

    # Note: Session creation will fail if provided profile cannot be found.
    session = aioboto3.Session(profile_name=profile)

    return session.client(
        "s3",
        endpoint_url=os.environ.get("EXODUS_GW_S3_ENDPOINT_URL") or None,
    )
Exemplo n.º 14
0
 def __init__(self, host: str, space: str, access_key: str,
              secret_key: str):
     self.host = f"https://i.{host}/"
     self.space = space
     self.session = aioboto3.Session()
     self.client = self.session.client(
         "s3",
         region_name="sfo2",
         endpoint_url="https://sfo2.digitaloceanspaces.com",
         aws_access_key_id=access_key,
         aws_secret_access_key=secret_key,
     )
Exemplo n.º 15
0
async def test_kms_cse_encrypt_decrypt_aes_gcm(event_loop, moto_patch, region,
                                               bucket_name, s3_key_name):
    session = aioboto3.Session()

    async with session.client('s3', region_name=region) as s3_client:
        await s3_client.create_bucket(
            Bucket=bucket_name,
            CreateBucketConfiguration={'LocationConstraint': region})

        aes_key = b'O\x8b\xdc\x92\x87k\x9aJ{m\x82\xb3\x96\xf7\x93]\xa1\xb2Cl\x86<5\xbe\x13\xaf\xa8\x94\xa2O3\xef'
        encrypted_aes_key = b'encrypted_aes_key'
        material_descrition = {'kms_cmk_id': 'alias/cmk_id'}

        kms_crypto_context = cse.MockKMSCryptoContext(
            aes_key,
            material_descrition,
            encrypted_aes_key,
            authenticated_encryption=True)
        s3_cse = cse.S3CSE(kms_crypto_context,
                           s3_client_args={'region_name': region})

        async with s3_cse:
            # Upload file
            await s3_cse.put_object(Body=DATA,
                                    Bucket=bucket_name,
                                    Key=s3_key_name)

            encrypted_resp = await s3_client.get_object(Bucket=bucket_name,
                                                        Key=s3_key_name)
            encrypted_resp['Body'] = await encrypted_resp['Body'].read()

            # Check it doesnt start with lorem ipsum
            assert not encrypted_resp['Body'].startswith(DATA[:10])

            # Check metadata for KMS encryption
            assert encrypted_resp['Metadata'][
                'x-amz-cek-alg'] == 'AES/GCM/NoPadding'
            assert encrypted_resp['Metadata']['x-amz-tag-len'] == '128'
            assert encrypted_resp['Metadata']['x-amz-wrap-alg'] == 'kms'
            assert base64.b64decode(encrypted_resp['Metadata']
                                    ['x-amz-key-v2']) == encrypted_aes_key
            assert encrypted_resp['Metadata'][
                'x-amz-unencrypted-content-length'] == str(len(DATA))
            assert encrypted_resp['Metadata']['x-amz-matdesc'] == json.dumps(
                material_descrition)
            assert 'x-amz-iv' in encrypted_resp['Metadata']

            # This is a quick test to ensure decryption works, and resp['Body'] looks like an aiohttp obj
            unencrypted_resp = await s3_cse.get_object(Bucket=bucket_name,
                                                       Key=s3_key_name)
            unencrypted_resp['Body'] = await unencrypted_resp['Body'].read()

            assert unencrypted_resp['Body'] == DATA
Exemplo n.º 16
0
    async def GetObjects(self, stream):
        request = await stream.recv_message()
        total_count = 0
        session = aioboto3.Session()
        async with session.resource('s3') as s3resource:
            bucket = await s3resource.Bucket(request.bucket)
            async for obj in bucket.objects.all():
                e_tag = await obj.e_tag
                print(obj.key, e_tag)
                total_count += 1

        print(total_count)
        await stream.send_message(AwsAPI_pb2.ObjectReply(count=total_count))
Exemplo n.º 17
0
    def get_client(self):
        if self.client_callback:
            return self.client_callback()

        boto3_client_kwargs = {}
        if self.access_key and self.secret_key:
            boto3_client_kwargs['aws_access_key_id'] = self.access_key
            boto3_client_kwargs['aws_secret_access_key'] = self.secret_key
        if self.user_pool_region:
            boto3_client_kwargs['region_name'] = self.user_pool_region

        self.session = aioboto3.Session()
        return self.session.client('cognito-idp', **boto3_client_kwargs)
Exemplo n.º 18
0
    def __init__(self, profile: str):
        """Prepare a client for the given profile. This object must be used
        via 'async with' in order to obtain access to the client.

        Note: Session creation will fail if provided profile cannot be found.
        """

        session = aioboto3.Session(profile_name=profile)

        self._client_context = session.client(
            "dynamodb",
            endpoint_url=os.environ.get("EXODUS_GW_DYNAMODB_ENDPOINT_URL")
            or None,
        )
Exemplo n.º 19
0
async def main():
    session = aioboto3.Session()

    security_account = config.hub.security_account_name

    ou = config.config.ou
    role = config.config.iam_role

    # # Config rules
    async for context in run.account_iterator(session, ou, role):
        stack_name = config.config.stack_name
        account_name = context['account']['Name']
        print(f'Deleting {stack_name} from {account_name}')
        await delete(context['session'], config.config)
Exemplo n.º 20
0
    async def GetObjects(self, request, context):
        total_count = 0
        session = aioboto3.Session()
        print(request, context)
        request = await context.read()
        async with session.resource('s3') as s3resource:
            bucket = await s3resource.Bucket(request.bucket)
            async for obj in bucket.objects.all():
                e_tag = await obj.e_tag
                print(obj.key, e_tag)
                total_count += 1

        print(total_count)
        await context.write(AwsAPI_pb2.ObjectReply(count=total_count))
Exemplo n.º 21
0
async def test_symmetric_cse_encrypt_decrypt_aes_cbc(event_loop, moto_patch,
                                                     region, bucket_name,
                                                     s3_key_name):
    session = aioboto3.Session()

    async with session.client('s3', region_name=region) as s3_client:
        await s3_client.create_bucket(
            Bucket=bucket_name,
            CreateBucketConfiguration={'LocationConstraint': region})

        aes_key = b'O\x8b\xdc\x92\x87k\x9aJ{m\x82\xb3\x96\xf7\x93]\xa1\xb2Cl\x86<5\xbe\x13\xaf\xa8\x94\xa2O3\xef'

        symmetric_crypto_context = cse.SymmetricCryptoContext(aes_key)
        s3_cse = cse.S3CSE(symmetric_crypto_context,
                           s3_client_args={'region_name': region})

        async with s3_cse:
            # Upload file
            await s3_cse.put_object(Body=DATA,
                                    Bucket=bucket_name,
                                    Key=s3_key_name)

            encrypted_resp = await s3_client.get_object(Bucket=bucket_name,
                                                        Key=s3_key_name)
            encrypted_resp['Body'] = await encrypted_resp['Body'].read()

            # Check it doesnt start with lorem ipsum
            assert not encrypted_resp['Body'].startswith(DATA[:10])

            # Check metadata for KMS encryption
            assert len(
                base64.b64decode(
                    encrypted_resp['Metadata']['x-amz-key'])) == 48
            assert encrypted_resp['Metadata'][
                'x-amz-unencrypted-content-length'] == str(len(DATA))
            assert encrypted_resp['Metadata']['x-amz-matdesc'] == '{}'

            assert 'x-amz-iv' in encrypted_resp['Metadata']
            assert 'x-amz-cek-alg' not in encrypted_resp['Metadata']
            assert 'x-amz-key-v2' not in encrypted_resp['Metadata']
            assert 'x-amz-wrap-alg' not in encrypted_resp['Metadata']
            assert 'x-amz-tag-len' not in encrypted_resp['Metadata']

            # This is a quick test to ensure decryption works, and resp['Body'] looks like an aiohttp obj
            unencrypted_resp = await s3_cse.get_object(Bucket=bucket_name,
                                                       Key=s3_key_name)
            unencrypted_resp['Body'] = await unencrypted_resp['Body'].read()

            assert unencrypted_resp['Body'] == DATA
Exemplo n.º 22
0
def test_chalice_async_http_s3_client(event_loop, moto_patch, region,
                                      bucket_name):
    session = aioboto3.Session()

    app.aioboto3 = session

    s3 = boto3.client('s3', region_name=region)
    s3.create_bucket(Bucket=bucket_name,
                     CreateBucketConfiguration={'LocationConstraint': region})
    resp = s3.list_buckets()
    bucket_response = [bucket['Name'] for bucket in resp['Buckets']]

    with Client(app) as client:
        response = client.http.get('/list_buckets')
        assert response.status_code == 200
        assert response.json_body['buckets'] == bucket_response
Exemplo n.º 23
0
    def __init__(self, profile: str):
        """Prepare a client for the given profile. This object must be used
        via 'async with' in order to obtain access to the client.

        Note: Session creation will fail if provided profile cannot be found.
        """

        session = aioboto3.Session(profile_name=profile)

        self._client_context = session.client(
            "s3",
            endpoint_url=os.environ.get("EXODUS_GW_S3_ENDPOINT_URL") or None,
            # We don't allow any retries - it's not possible since we're streaming
            # request bodies directly to S3, we don't buffer it anywhere, so we
            # can't send it more than once.
            config=Config(retries={"max_attempts": 1}),
        )
Exemplo n.º 24
0
 def __init__(
     self,
     bucket: str,
     aws_access_key_id: str,
     aws_secret_access_key: str,
     region_name: str = None,
     profile_name: str = None,
     endpoint_url: str = None,
 ) -> None:
     self.bucket = bucket
     self.endpoint_url = endpoint_url
     self.session = aioboto3.Session(
         aws_access_key_id=aws_access_key_id,
         aws_secret_access_key=aws_secret_access_key,
         region_name=region_name,
         profile_name=profile_name,
     )
async def upload():
    session = aioboto3.Session()
    async with session.client(
        service_name="s3",
        aws_access_key_id=access_key,
        aws_secret_access_key=accrss_secret,
        region_name=region_name
    ) as s3:
        try:
            file_path = os.path.join(image_upload_folder, 'fox23456.jpg')
            print(file_path)
            async with aiofiles.open(file_path, mode='rb') as f:
                await s3.upload_fileobj(f, bucket_name, 'fox/fox-wild.jpg')
        except Exception as e:
            print(e)
            return ""

    return 'WORK DONE'
Exemplo n.º 26
0
async def main():
    async with contextlib.AsyncExitStack() as stack:
        aws_config = load_aws_config()
        region_names = [region['ext-name'] for region in aws_config['regions']]
        sess = aioboto3.Session()
        ec2_clients = {
            region_name: await stack.enter_async_context(
                sess.client('ec2', region_name=region_name), )
            for region_name in region_names
        }
        all_instances = dict(
            zip(
                region_names,
                asyncio.gather((ec2_instances(
                    ec2_clients['us-west-2'],
                    Filters=[dict(Name='tag:Name', Values=['Pangaea Node'])],
                ) for region_name in region_names))))
        from pprint import pprint
        pprint(all_instances)
Exemplo n.º 27
0
async def write_json_to_disk(seed, spoiler_type='spoiler'):
    filename = f"{spoiler_type}__{seed.hash}__{'-'.join(seed.code).replace(' ', '')}__{''.join(random.choices(string.ascii_letters + string.digits, k=4))}.txt"

    if spoiler_type == 'progression':
        sorteddict = create_progression_spoiler(seed)
    else:
        sorteddict = seed.get_formatted_spoiler(translate_dungeon_items=True)

    payload = gzip.compress(json.dumps(sorteddict, indent=4).encode('utf-8'))

    session = aioboto3.Session()
    async with session.client('s3') as s3:
        await s3.put_object(Bucket=os.environ.get('AWS_SPOILER_BUCKET_NAME'),
                            Key=filename,
                            Body=payload,
                            ACL='public-read',
                            ContentEncoding='gzip',
                            ContentDisposition='attachment')

    return f"{os.environ.get('SpoilerLogUrlBase')}/{filename}"
Exemplo n.º 28
0
 def __init__(self):
     super().__init__()
     self._fix_loggers_level()
     self.s3_session: aioboto3.Session = aioboto3.Session()
     self.s3_api_key: str = os.getenv(S3Uploader.ENV_S3_API_KEY, None)
     self.s3_api_secret_key: str = os.getenv(
         S3Uploader.ENV_S3_API_SECRET_KEY, None)
     self.s3_endpoint_url: str = os.getenv(S3Uploader.ENV_S3_ENDPOINT_URL,
                                           None)
     self.s3_bucket_name: str = os.getenv(S3Uploader.ENV_S3_BUCKET_NAME,
                                          None)
     self.s3_region_name: str = os.getenv(S3Uploader.ENV_S3_REGION_NAME,
                                          None)
     if None in (self.s3_api_key, self.s3_api_secret_key,
                 self.s3_endpoint_url, self.s3_bucket_name,
                 self.s3_region_name):
         raise TypeError(
             "Some s3 environment variables are missing, please ensure that "
             "S3_API_KEY, S3_API_SECRET_KEY, S3_BUCKET_NAME, S3_REGION_NAME "
             "and S3_ENDPOINT_URL are defined.")
Exemplo n.º 29
0
    async def _download_keys(self, keys_to_download):
        # Gets all records from list of keys.
        # Returns a list of lists of dicts

        # Set up async S3 client using same region and config as s3_client
        session = aioboto3.Session()
        async with session.client(
                's3',
                region_name=self._s3_client.meta.region_name,
                config=self._s3_client.meta.config) as async_s3_client:

            # Define coroutines, one for each file to download
            coros = [
                self._get_records_from_key(async_s3_client, k)
                for k in keys_to_download
            ]

            key_records = await asyncio.gather(*coros)

        return key_records
Exemplo n.º 30
0
    def __init__(
        self,
        table_name: str,
        namespace: str,
        key_attr_name: str = 'k',
        val_attr_name: str = 'v',
        create_if_not_exists: bool = False,
        context: ResourceCreatorContext = None,
        **kwargs,
    ):
        super().__init__(**kwargs)
        self.table_name = table_name
        self.namespace = namespace
        self.key_attr_name = key_attr_name
        self.val_attr_name = val_attr_name
        self.create_if_not_exists = create_if_not_exists

        resource_kwargs = get_valid_kwargs(AWSSession.resource, kwargs)
        self.context = context or aioboto3.Session().resource(
            'dynamodb', **resource_kwargs)
        self._table = None