def receive_message( queue_url: str, wait_seconds: Optional[int] = 1) -> Optional[Dict[str, Any]]: client = common.get_client('sqs') try: response = client.receive_message(QueueUrl=queue_url, MaxNumberOfMessages=1, WaitTimeSeconds=wait_seconds, AttributeNames=['All'], MessageAttributeNames=['All']) if len(response.get('Messages', [])) == 0: return None message = response['Messages'][0] message_id = message['MessageId'] logger.info( f'Successfully received message {message_id} from {queue_url}') return { 'message': message['Body'], 'message_id': message_id, 'receipt_handle': message['ReceiptHandle'] } except botocore.exceptions.ClientError: logger.exception(f'Failed to receive message from {queue_url}') raise
def upload_file(s3_file: dict) -> dict: client = common.get_client('s3') filename = s3_file.get('filename') key = s3_file.get('key') bucket = s3_file.get('bucket') try: client.upload_file( Filename=filename, Bucket=bucket, Key=key, ) common.print_resource('Successfully uploaded file to S3', meta_dict={ 'filename': filename, 'bucket': bucket, 'key': key }) return s3_file except botocore.exceptions.ClientError as error: common.print_resource('Failed to upload file to S3', meta_dict={ 'error': error, 'filename': filename, 'bucket': bucket, 'key': key, })
def get_queue_arn(queue_url: str) -> str: client = common.get_client('sqs') response = client.get_queue_attributes(QueueUrl=queue_url, AttributeNames=['QueueArn']) return response['Attributes']['QueueArn']
def _create_table(table_definition: Dict[str, Any]) -> str: client = common.get_client('dynamodb') table_name = table_definition.get('name') parameters = common.clear_nones(dict( TableName=table_name, AttributeDefinitions=_map_attributes( table_definition.get('attributes') ), KeySchema=_map_key_schema( table_definition.get('key_schema') ), ProvisionedThroughput=_map_provisioned_throughput( table_definition.get('throughput') ), GlobalSecondaryIndexes=_map_global_secondary_indexes( table_definition.get('global_secondary_indexes') ), LocalSecondaryIndexes=_map_local_secondary_indexes( table_definition.get('local_secondary_indexes'), ) )) client.create_table(**parameters) common.print_resource( 'Successfully created DynamoDB table', meta_dict={'name': table_name} ) return table_name
def create_queue(queue_name: str) -> str: client = common.get_client('sqs') try: response = client.get_queue_url(QueueName=queue_name) queue_url = response['QueueUrl'] common.print_resource('Queue already exists, not creating it', meta_dict={ 'queue_name': queue_name, 'queue_url': queue_url }) return queue_url except botocore.exceptions.ClientError: response = client.create_queue(QueueName=queue_name) queue_url = response['QueueUrl'] common.print_resource('Created queue', meta_dict={ 'queue_name': queue_name, 'queue_url': queue_url }) return queue_url
def _update_table(table_definition: Dict[str, Any]) -> str: client = common.get_client('dynamodb') table_name = table_definition.get('name') existing_table = client.describe_table( TableName=table_name, )['Table'] provisioned_throughput = _map_provisioned_throughput( table_definition.get('throughput') ) current_provisioned_throughput = common.pick_from_dict( existing_table['ProvisionedThroughput'], ['WriteCapacityUnits', 'ReadCapacityUnits'] ) if current_provisioned_throughput == provisioned_throughput: provisioned_throughput = None attribute_definitions = _map_attributes( table_definition.get('attributes') ) if common.compare_unordered_lists(attribute_definitions, existing_table['AttributeDefinitions']): attribute_definitions = None index_updates = _get_global_secondary_updates( table_definition.get('global_secondary_indexes'), table_name, ) if len(index_updates) == 0: index_updates = None parameters = common.clear_nones(dict( TableName=table_name, AttributeDefinitions=attribute_definitions, ProvisionedThroughput=provisioned_throughput, GlobalSecondaryIndexUpdates=index_updates, )) if parameters == {'TableName': table_name}: common.print_resource( 'Not updating DynamoDB table, nothing to do', meta_dict={'name': table_name} ) return table_name client.update_table(**parameters) common.print_resource( 'Successfully updated DynamoDB table', meta_dict={'name': table_name} ) return table_name
def table_exists(table_name: str) -> bool: client = common.get_client('dynamodb') try: response = client.describe_table( TableName=table_name, ) return 'Table' in response except botocore.exceptions.ClientError: return False
def delete_message(queue_url: str, receipt_handle: str, message_id: str): client = common.get_client('sqs') try: client.delete_message(QueueUrl=queue_url, ReceiptHandle=receipt_handle) except botocore.exceptions.ClientError: logger.exception( f'Failed to delete message {message_id} from {queue_url}') raise
def _get_global_secondary_updates(shorthand_indexes: List[Dict[str, Any]], table_name: str) -> [Dict[str, Dict[str, Any]]]: client = common.get_client('dynamodb') global_secondary_indexes = _map_global_secondary_indexes(shorthand_indexes) response = client.describe_table( TableName=table_name, ) current_global_secondary_indexes = response['Table'].get( 'GlobalSecondaryIndexes', [] ) output = [] for current_index in current_global_secondary_indexes: new_index = common.find( global_secondary_indexes, lambda index: current_index['IndexName'] == index['IndexName'] ) if new_index is None: output.append({ 'Delete': { 'IndexName': current_index['IndexName'] } }) continue if current_index['ProvisionedThroughput'] != new_index['ProvisionedThroughput']: output.append({ 'Update': common.pick_from_dict( new_index, ['IndexName', 'ProvisionedThroughput'], ) }) if global_secondary_indexes is not None: for modify_index in global_secondary_indexes: existing_index = common.find( current_global_secondary_indexes, lambda index: modify_index['IndexName'] == index['IndexName'] ) if existing_index is None: output.append({ 'Create': modify_index, }) return output
def create_topic(topic_name: str) -> str: client = common.get_client('sns') response = client.create_topic(Name=topic_name) topic_arn = response['TopicArn'] common.print_resource('Created SNS topic', meta_dict={ 'name': topic_name, 'topic_arn': topic_arn }) return topic_arn
def create_bucket(bucket_name: str): client = common.get_client('s3') if not bucket_exists(client, bucket_name): client.create_bucket(Bucket=bucket_name) common.print_resource('Bucket created', meta_dict={'name': bucket_name}) else: common.print_resource('Bucket already exists', meta_dict={'name': bucket_name}) return bucket_name
def subscribe_queue_to_bucket(bucket_name: str, queue_url: str, events: list): client = common.get_client('s3') queue_arn = sqs.get_queue_arn(queue_url) client = common.get_client('s3') response = client.get_bucket_notification_configuration(Bucket=bucket_name) subscription_exists = any([ queue_arn == item.get('QueueArn') and item.get('Events') == events for item in response.get('QueueConfigurations', []) ]) if subscription_exists: common.print_resource('Queue to bucket subscription already exists', meta_list=[ f'bucket: {bucket_name}', f'queue: {queue_arn}', f'events: {events}', ]) else: client.put_bucket_notification_configuration( Bucket=bucket_name, NotificationConfiguration={ 'QueueConfigurations': [{ 'QueueArn': queue_arn, 'Events': events }] }) common.print_resource('Created queue to bucket subscription', meta_list=[ f'bucket: {bucket_name}', f'queue: {queue_arn}', f'events: {events}', ])
def send_message(queue_url: str, message: str): client = common.get_client('sqs') try: response = client.send_message( QueueUrl=queue_url, MessageBody=message, ) message_id = response['MessageId'] logger.info( f'Successfully sent message {message_id} to queue {queue_url}') return message_id except botocore.exceptions.ClientError: logger.exception(f'Failed to send message to {queue_url}') raise
def provision_key(key: str, value: str): client = common.get_client('ssm') key_exists = _key_exists(key) client.put_parameter( Name=key, Value=value, Type='SecureString', Overwrite=True, ) message = 'Updated key in parameter store' if key_exists \ else 'Created key in parameter store' common.print_resource(message, meta_dict={ 'key': key, 'value': common.censor(value) })
def invoke(function_name: str, payload: str, message_id: Optional[str] = 'N/A'): client = common.get_client('lambda') try: client.invoke( FunctionName=function_name, InvocationType='Event', LogType='Tail', Payload=payload.encode(), ) logger.info( f'Successfully invoked {function_name} with message {message_id}') except botocore.exceptions.ClientError: logger.exception(f'Failed to invoke {function_name}') raise
def list_objects(bucket: str, prefix: str, ignore_empty: Optional[bool] = True) -> List[Dict[str, Any]]: client = common.get_client('s3') items = [] try: paginator = client.get_paginator('list_objects_v2') parameters = dict( Bucket=bucket, MaxKeys=1000, Prefix=prefix, ) for page in paginator.paginate(**parameters): for item in page.get('Contents', []): if ignore_empty and item.get('Size', 0) == 0: continue items.append({ 'bucket': page['Name'], 'key': item['Key'], 'region': page['ResponseMetadata']['HTTPHeaders'] ['x-amz-bucket-region'], 'etag': item['ETag'], 'size': item['Size'], }) except botocore.exceptions.ClientError: logger.exception(f'Failed to list objects in bucket {bucket}') raise return items
def create_stream(stream_name: str, shard_count: int = 1) -> str: client = common.get_client('kinesis') try: response = client.describe_stream(StreamName=stream_name) stream_exists = 'StreamDescription' in response except botocore.exceptions.ClientError: stream_exists = False if not stream_exists: client.create_stream( StreamName=stream_name, ShardCount=shard_count, ) common.print_resource('Kinesis stream created', meta_dict={'name': stream_name}) else: common.print_resource('Kinesis stream already exists', meta_dict={'name': stream_name}) return stream_name
def subscribe_queue_to_topic(topic_arn: str, queue_url: str): client = common.get_client('sns') queue_arn = sqs.get_queue_arn(queue_url) response = client.list_subscriptions_by_topic(TopicArn=topic_arn, ) subscription_exists = any( [queue_arn == item['Endpoint'] for item in response['Subscriptions']]) if subscription_exists: common.print_resource( 'Queue to topic subscription already exists', meta_list=[f'topic: {topic_arn}', f'qeue: {queue_arn}']) else: client.subscribe( TopicArn=topic_arn, Protocol='sqs', Endpoint=queue_arn, ) common.print_resource( 'Added queue subscription on topic', meta_list=[f'topic: {topic_arn}', f'qeue: {queue_arn}'])
def _key_exists(key: str) -> bool: client = common.get_client('ssm') return len(client.get_parameters(Names=[key]).get('Parameters')) > 0