def create_queue(queue_name: str) -> str: client = common.get_client('sqs') try: response = client.get_queue_url(QueueName=queue_name) queue_url = response['QueueUrl'] common.print_resource('Queue already exists, not creating it', meta_dict={ 'queue_name': queue_name, 'queue_url': queue_url }) return queue_url except botocore.exceptions.ClientError: response = client.create_queue(QueueName=queue_name) queue_url = response['QueueUrl'] common.print_resource('Created queue', meta_dict={ 'queue_name': queue_name, 'queue_url': queue_url }) return queue_url
def _create_table(table_definition: Dict[str, Any]) -> str: client = common.get_client('dynamodb') table_name = table_definition.get('name') parameters = common.clear_nones(dict( TableName=table_name, AttributeDefinitions=_map_attributes( table_definition.get('attributes') ), KeySchema=_map_key_schema( table_definition.get('key_schema') ), ProvisionedThroughput=_map_provisioned_throughput( table_definition.get('throughput') ), GlobalSecondaryIndexes=_map_global_secondary_indexes( table_definition.get('global_secondary_indexes') ), LocalSecondaryIndexes=_map_local_secondary_indexes( table_definition.get('local_secondary_indexes'), ) )) client.create_table(**parameters) common.print_resource( 'Successfully created DynamoDB table', meta_dict={'name': table_name} ) return table_name
def upload_file(s3_file: dict) -> dict: client = common.get_client('s3') filename = s3_file.get('filename') key = s3_file.get('key') bucket = s3_file.get('bucket') try: client.upload_file( Filename=filename, Bucket=bucket, Key=key, ) common.print_resource('Successfully uploaded file to S3', meta_dict={ 'filename': filename, 'bucket': bucket, 'key': key }) return s3_file except botocore.exceptions.ClientError as error: common.print_resource('Failed to upload file to S3', meta_dict={ 'error': error, 'filename': filename, 'bucket': bucket, 'key': key, })
def _update_table(table_definition: Dict[str, Any]) -> str: client = common.get_client('dynamodb') table_name = table_definition.get('name') existing_table = client.describe_table( TableName=table_name, )['Table'] provisioned_throughput = _map_provisioned_throughput( table_definition.get('throughput') ) current_provisioned_throughput = common.pick_from_dict( existing_table['ProvisionedThroughput'], ['WriteCapacityUnits', 'ReadCapacityUnits'] ) if current_provisioned_throughput == provisioned_throughput: provisioned_throughput = None attribute_definitions = _map_attributes( table_definition.get('attributes') ) if common.compare_unordered_lists(attribute_definitions, existing_table['AttributeDefinitions']): attribute_definitions = None index_updates = _get_global_secondary_updates( table_definition.get('global_secondary_indexes'), table_name, ) if len(index_updates) == 0: index_updates = None parameters = common.clear_nones(dict( TableName=table_name, AttributeDefinitions=attribute_definitions, ProvisionedThroughput=provisioned_throughput, GlobalSecondaryIndexUpdates=index_updates, )) if parameters == {'TableName': table_name}: common.print_resource( 'Not updating DynamoDB table, nothing to do', meta_dict={'name': table_name} ) return table_name client.update_table(**parameters) common.print_resource( 'Successfully updated DynamoDB table', meta_dict={'name': table_name} ) return table_name
def create_topic(topic_name: str) -> str: client = common.get_client('sns') response = client.create_topic(Name=topic_name) topic_arn = response['TopicArn'] common.print_resource('Created SNS topic', meta_dict={ 'name': topic_name, 'topic_arn': topic_arn }) return topic_arn
def create_bucket(bucket_name: str): client = common.get_client('s3') if not bucket_exists(client, bucket_name): client.create_bucket(Bucket=bucket_name) common.print_resource('Bucket created', meta_dict={'name': bucket_name}) else: common.print_resource('Bucket already exists', meta_dict={'name': bucket_name}) return bucket_name
def provision_key(key: str, value: str): client = common.get_client('ssm') key_exists = _key_exists(key) client.put_parameter( Name=key, Value=value, Type='SecureString', Overwrite=True, ) message = 'Updated key in parameter store' if key_exists \ else 'Created key in parameter store' common.print_resource(message, meta_dict={ 'key': key, 'value': common.censor(value) })
def create_stream(stream_name: str, shard_count: int = 1) -> str: client = common.get_client('kinesis') try: response = client.describe_stream(StreamName=stream_name) stream_exists = 'StreamDescription' in response except botocore.exceptions.ClientError: stream_exists = False if not stream_exists: client.create_stream( StreamName=stream_name, ShardCount=shard_count, ) common.print_resource('Kinesis stream created', meta_dict={'name': stream_name}) else: common.print_resource('Kinesis stream already exists', meta_dict={'name': stream_name}) return stream_name
def subscribe_queue_to_bucket(bucket_name: str, queue_url: str, events: list): client = common.get_client('s3') queue_arn = sqs.get_queue_arn(queue_url) client = common.get_client('s3') response = client.get_bucket_notification_configuration(Bucket=bucket_name) subscription_exists = any([ queue_arn == item.get('QueueArn') and item.get('Events') == events for item in response.get('QueueConfigurations', []) ]) if subscription_exists: common.print_resource('Queue to bucket subscription already exists', meta_list=[ f'bucket: {bucket_name}', f'queue: {queue_arn}', f'events: {events}', ]) else: client.put_bucket_notification_configuration( Bucket=bucket_name, NotificationConfiguration={ 'QueueConfigurations': [{ 'QueueArn': queue_arn, 'Events': events }] }) common.print_resource('Created queue to bucket subscription', meta_list=[ f'bucket: {bucket_name}', f'queue: {queue_arn}', f'events: {events}', ])
def subscribe_queue_to_topic(topic_arn: str, queue_url: str): client = common.get_client('sns') queue_arn = sqs.get_queue_arn(queue_url) response = client.list_subscriptions_by_topic(TopicArn=topic_arn, ) subscription_exists = any( [queue_arn == item['Endpoint'] for item in response['Subscriptions']]) if subscription_exists: common.print_resource( 'Queue to topic subscription already exists', meta_list=[f'topic: {topic_arn}', f'qeue: {queue_arn}']) else: client.subscribe( TopicArn=topic_arn, Protocol='sqs', Endpoint=queue_arn, ) common.print_resource( 'Added queue subscription on topic', meta_list=[f'topic: {topic_arn}', f'qeue: {queue_arn}'])