async def setup(s3_cfg): session = get_session() s3_ctx = session.create_client("s3", region_name=region_name, config=s3_cfg) s3 = await s3_ctx.__aenter__() return (session, s3, s3_ctx)
async def main(): """ Entry point. """ # Open connection to object store. server = config('AWS_HOSTNAME') user_id = config('AWS_ACCESS_KEY') secret_key = config('AWS_SECRET_KEY') bucket = config('AWS_BUCKET') session = get_session() async with session.create_client('s3', endpoint_url=f'https://{server}', aws_secret_access_key=secret_key, aws_access_key_id=user_id) as client: try: # Get list of backup files files = await fetch_file_list(client, bucket) files_to_delete = decide_files_to_delete(files) # for debugging, list the files to delete for file in files_to_delete: print(file) if len(files_to_delete) > 0: await delete_files(client, bucket, files_to_delete) finally: del client
async def go(): session = get_session() async with session.create_client('dynamodb', region_name='us-west-2') as client: # Create random table name table_name = f'aiobotocore-{uuid.uuid4()}' print('Requesting table creation...') await client.create_table(TableName=table_name, AttributeDefinitions=[ { 'AttributeName': 'testKey', 'AttributeType': 'S' }, ], KeySchema=[ { 'AttributeName': 'testKey', 'KeyType': 'HASH' }, ], ProvisionedThroughput={ 'ReadCapacityUnits': 10, 'WriteCapacityUnits': 10 }) print("Waiting for table to be created...") waiter = client.get_waiter('table_exists') await waiter.wait(TableName=table_name) print(f"Table {table_name} created")
async def go(): # Boto should get credentials from ~/.aws/credentials or the environment session = get_session() async with session.create_client('sqs', region_name='us-west-2') as client: try: response = await client.get_queue_url(QueueName=QUEUE_NAME) except botocore.exceptions.ClientError as err: if err.response['Error']['Code'] == \ 'AWS.SimpleQueueService.NonExistentQueue': print(f"Queue {QUEUE_NAME} does not exist") sys.exit(1) else: raise queue_url = response['QueueUrl'] print('Putting messages on the queue') msg_no = 1 while True: try: msg_body = f'Message #{msg_no}' await client.send_message( QueueUrl=queue_url, MessageBody=msg_body ) msg_no += 1 print(f'Pushed "{msg_body}" to queue') await asyncio.sleep(random.randint(1, 4)) except KeyboardInterrupt: break print('Finished')
async def go(): bucket = 'dataintake' filename = 'dummy.bin' folder = 'aiobotocore' key = f'{folder}/{filename}' session = get_session() async with session.create_client( 's3', region_name='us-west-2', aws_secret_access_key=AWS_SECRET_ACCESS_KEY, aws_access_key_id=AWS_ACCESS_KEY_ID) as client: # upload object to amazon s3 data = b'\x01' * 1024 resp = await client.put_object(Bucket=bucket, Key=key, Body=data) print(resp) # getting s3 object properties of file we just uploaded resp = await client.get_object_acl(Bucket=bucket, Key=key) print(resp) # delete object from s3 resp = await client.delete_object(Bucket=bucket, Key=key) print(resp)
def _create_s3_client(): s3_session = get_session() boto_config = BotoCoreConfig(region_name=AWS_REGION_NAME) return s3_session.create_client('s3', region_name=AWS_REGION_NAME, aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET, config=boto_config)
async def s3client(): params = {} endpoint_url = os.getenv('S3_ENDPOINT_URL') if endpoint_url: params['endpoint_url'] = endpoint_url from aiobotocore.session import get_session session = get_session() async with session.create_client('s3', **params) as client: yield client
def create_client_async(self): """Create client async.""" return get_session().create_client( "s3", endpoint_url=self.endpoint_url, aws_access_key_id=self.access_key_id, aws_secret_access_key=self.secret_access_key, region_name="EU", )
async def __aenter__(self): session = get_session() self._client = await session.create_client( 's3', endpoint_url=self._endpoint_url, region_name=self._region_name, aws_secret_access_key=self._aws_secret_access_key, aws_access_key_id=self._aws_access_key_id).__aenter__() return self
async def go(): session = get_session() async with session.create_client('dynamodb', region_name='us-west-2') as client: table_name = 'test' print('Writing to dynamo') start = 0 while True: # Loop adding 25 items to dynamo at a time request_items = create_batch_write_structure(table_name, start, 25) response = await client.batch_write_item(RequestItems=request_items ) if len(response['UnprocessedItems']) == 0: print('Wrote 25 items to dynamo') else: # Hit the provisioned write limit print('Hit write limit, backing off then retrying') await asyncio.sleep(5) # Items left over that haven't been inserted unprocessed_items = response['UnprocessedItems'] print('Resubmitting items') # Loop until unprocessed items are written while len(unprocessed_items) > 0: response = await client.batch_write_item( RequestItems=unprocessed_items) # If any items are still left over, add them to the # list to be written unprocessed_items = response['UnprocessedItems'] # If there are items left over, we could do with # sleeping some more if len(unprocessed_items) > 0: print('Backing off for 5 seconds') await asyncio.sleep(5) # Inserted all the unprocessed items, exit loop print('Unprocessed items successfully inserted') break start += 25 # See if DynamoDB has the last item we inserted final_item = 'item' + str(start + 24) print(f'Item "{final_item}" should exist') response = await client.get_item(TableName=table_name, Key={'pk': { 'S': final_item }}) print(f'Response: {response["Item"]}')
async def get_client() -> Generator[Tuple[AioBaseClient, str], None, None]: """ Return AioBaseClient client and bucket """ server = config.get('OBJECT_STORE_SERVER') user_id = config.get('OBJECT_STORE_USER_ID') secret_key = config.get('OBJECT_STORE_SECRET') bucket = config.get('OBJECT_STORE_BUCKET') session = get_session() async with session.create_client('s3', endpoint_url=f'https://{server}', aws_secret_access_key=secret_key, aws_access_key_id=user_id) as client: try: yield client, bucket finally: del client
async def go(): session = get_session() async with session.create_client('sqs', region_name='us-west-2') as client: print('Creating test_queue1') response = await client.create_queue(QueueName='test_queue1') queue_url = response['QueueUrl'] response = await client.list_queues() print('Queue URLs:') for queue_name in response.get('QueueUrls', []): print(f' {queue_name}') print(f'Deleting queue {queue_url}') await client.delete_queue(QueueUrl=queue_url) print('Done')
async def go(): # Boto should get credentials from ~/.aws/credentials or the environment session = get_session() async with session.create_client('sqs', region_name='us-west-2') as client: try: response = await client.get_queue_url(QueueName=QUEUE_NAME) except botocore.exceptions.ClientError as err: if err.response['Error']['Code'] == \ 'AWS.SimpleQueueService.NonExistentQueue': print("Queue {0} does not exist".format(QUEUE_NAME)) sys.exit(1) else: raise queue_url = response['QueueUrl'] print('Pulling messages off the queue') while True: try: # This loop wont spin really fast as there is # essentially a sleep in the receive_message call response = await client.receive_message( QueueUrl=queue_url, WaitTimeSeconds=2, ) if 'Messages' in response: for msg in response['Messages']: print(f'Got msg "{msg["Body"]}"') # Need to remove msg from queue or else it'll reappear await client.delete_message( QueueUrl=queue_url, ReceiptHandle=msg['ReceiptHandle']) else: print('No messages in queue') except KeyboardInterrupt: break print('Finished')
def __init__(self, table_name, region=None): self.session = get_session() self.client = None # Needs async init self.table_name = table_name self.region = region
def _create_sqs_client(): sqs_session = get_session() return sqs_session.create_client('sqs', region_name=AWS_REGION_NAME, aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)
def get_aiosession(): return get_session()
def __init__(self): self.endpoint_url = os.environ.get("AWS_S3_ENDPOINT_URL") self.bucket = os.environ["AWS_S3_BUCKET"] self.botocore_session = get_session()
async def test_get_session(): session = get_session() assert isinstance(session, AioSession)
from typing import AsyncGenerator from aiobotocore.client import AioBaseClient from aiobotocore.session import get_session from sqlmodel.ext.asyncio.session import AsyncSession from app.core.config import settings from app.core.db import ASYNC_ENGINE session = get_session() async def get_boto() -> AioBaseClient: """ Create a boto client which can be shared """ async with session.create_client( 's3', region_name=settings.AWS_REGION, aws_secret_access_key=settings.AWS_S3_SECRET_ACCESS_KEY, aws_access_key_id=settings.AWS_S3_ACCESS_KEY_ID, ) as client: yield client async def get_db_session() -> AsyncSession: """ Return a session to the database """ return AsyncSession(ASYNC_ENGINE, expire_on_commit=False)
def get_client(self): session = get_session() return session.create_client(self.boto_service_name, **self._client_options)
async def _initialize(self): """Initialize our async resources: aioboto3""" boto_session = get_session() self._dynamo = await boto_session.create_client( 'dynamodb', region_name=DYNAMO_REGION).__aenter__() log.info("DDB client initialized")
def __init__( self, region=None, availability_zone=None, bootstrap=None, auto_shutdown=None, ami=None, instance_type=None, vpc=None, subnet_id=None, security_groups=None, filesystem_size=None, key_name=None, iam_instance_profile=None, docker_image=None, debug=False, **kwargs, ): self.boto_session = get_session() self.config = dask.config.get("cloudprovider.ec2", {}) self.scheduler_class = EC2Scheduler self.worker_class = EC2Worker self.region = region if region is not None else self.config.get("region") self.availability_zone = ( availability_zone if availability_zone is not None else self.config.get("availability_zone") ) self.bootstrap = ( bootstrap if bootstrap is not None else self.config.get("bootstrap") ) self.auto_shutdown = ( auto_shutdown if auto_shutdown is not None else self.config.get("auto_shutdown") ) self.ami = ami if ami is not None else self.config.get("ami") self.instance_type = ( instance_type if instance_type is not None else self.config.get("instance_type") ) self.gpu_instance = self.instance_type.startswith(("p", "g")) self.vpc = vpc if vpc is not None else self.config.get("vpc") self.subnet_id = ( subnet_id if subnet_id is not None else self.config.get("subnet_id") ) self.security_groups = ( security_groups if security_groups is not None else self.config.get("security_groups") ) self.filesystem_size = ( filesystem_size if filesystem_size is not None else self.config.get("filesystem_size") ) self.key_name = ( key_name if key_name is not None else self.config.get("key_name") ) self.iam_instance_profile = ( iam_instance_profile if iam_instance_profile is not None else self.config.get("iam_instance_profile") ) self.debug = debug self.options = { "cluster": self, "config": self.config, "region": self.region, "availability_zone": self.availability_zone, "bootstrap": self.bootstrap, "ami": self.ami, "docker_image": docker_image or self.config.get("docker_image"), "instance_type": self.instance_type, "gpu_instance": self.gpu_instance, "vpc": self.vpc, "subnet_id": self.subnet_id, "security_groups": self.security_groups, "filesystem_size": self.filesystem_size, "key_name": self.key_name, "iam_instance_profile": self.iam_instance_profile, } self.scheduler_options = {**self.options} self.worker_options = {**self.options} super().__init__(debug=debug, **kwargs)