def get_aws_resource(resourceType, accountId, awsRegion, roleName, sessionName): """ This function Assumes role based and returns a resource object Args: resourceType (string): Resource type to initilize (Ex: ec2, s3) accountId (string): Target account Id to assume role awsRegion (string): AWS region to initilize service roleName (string): Role name to assume sessionName (string): Assume role session name Returns: serviceResource (ServiceResource): botocore service resource """ stsClient = boto3.client('sts') try: # generate roleArn based using accountId and roleName roleArn = f'arn:aws:iam::{accountId}:role/{roleName}' role = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=sessionName) accessKey = role['Credentials']['AccessKeyId'] secretKey = role['Credentials']['SecretAccessKey'] sessionToken = role['Credentials']['SessionToken'] serviceResource = boto3.resource(resourceType, region_name=awsRegion, aws_access_key_id=accessKey, aws_secret_access_key=secretKey, aws_session_token=sessionToken) return serviceResource except Exception as error: logger.error('Unexpected Error occured while assuming role for' f'Account: {accountId}, Error: {error}') raise error
def existing_bucket(bucketName, s3, subscriberAccountId, awsRegion): """ This function parses the bucketpolicy on an existing bucket and also identifies exceptional buckets """ try: exceptionalBucket = bucketList['publiclyAccessibleBuckets'] if bucketName in exceptionalBucket: logger.debug( f'Found the bucket: {bucketName} in {awsRegion} in the exceptions bucket list' ) except Exception as e: logger.error(f'Got error: {e} while fetching exceptional bucket') raise e
def replace_resource_arn_with_bucketName(bucketName, defaultDenyPolicy): """ This function fetches the deny policy from the CONF_S3BUCKET, and replaces the resource arn in the statements with the bucketName """ try: for resource in defaultDenyPolicy['statement']['resource']: if resource in [ 'arn:aws:s3:::examplebucket/*', 'arn:aws:s3:::examplebucket' ]: replacingWithBucketName = resource.replace( "examplebucket", bucketName) logger.info( f'Replacing examplebucket in the defaultDenyPolicy with {bucketName}' ) return defaultDenyPolicy.write(replacingWithBucketName) except Exception as e: logger.debug( f'Got error: {e} while replacing the resource ARN with bucket name' )
def get_config(bucket, key): """ Read json configurations stored on S3 Args: bucket (string): Bucket name key (string): configuration file/path returns: configuration (json): configuration """ try: s3 = boto3.resource('s3') obj = s3.Object(bucket, key) configuration = json.loads(obj.get()['Body'].read()) return configuration except Exception as error: logger.error('Unexpected error occured while reading object from S3' f'Error: {error}') raise error
def notify_email(toEmail, fromEmail, message): """ This function sends Email notification Args: toEmail (string): Recipient address fromEmail (string): Sender address (should be verified in SES) message (string): Email Body """ emailRegion = os.environ.get('EmailRegion', 'us-east-1') notification = os.environ.get('Notification', 'True') try: if notification == 'True': sesClient = boto3.client('ses', region_name=emailRegion) sesClient.send_email(Source=fromEmail, Destination={'ToAddresses': [toEmail]}, Message={ 'Subject': {'Data': os.environ['Email_Subject']}, 'Body': {'Text': {'Data': message}} } ) logger.debug(f'Email notification is send to {toEmail}') return True else: logger.info('Email notifications are disabled') except Exception as error: logger.error('Unexpected error occured while sending Email' f'notifications, Error is {error}')
def apply_policy_to_new_bucket(bucketName, s3, newBucketDenyPolicy, subscriberAccountId, awsRegion): """ This function enforces bucket policy on the new bucket """ try: s3.put_bucket_policy(Bucket=bucketName, Policy=newBucketDenyPolicy) message = 'Applied preventPublicAccessPolicy to the bucket: {bucketName} in the {subscriberAccountId} account in the {awsRegion} region' logger.debug( f'Bucket policy applied successfully and send an email notification' ) notify_email(toEmail, fromEmail, message) except Exception as e: message = f'Got error: {e} while applying the bucket policy to the bucket: {bucketName} in the {subscriberAccountId} account in the {awsRegion} region' logger.debug(message) notify_email(toEmail, fromEmail, message) logger.debug(f'Notifying the recepient of the error') raise e
def lambda_handler(event, context): """ This is the main lambda function """ subscriberAccountId = event['account'] logger.debug(f'Found the subscriber account: {subscriberAccountId}') awsRegion = event['detail']['awsRegion'] logger.debug(f'Found the aaccount: {awsRegion}') sessionName = context.function_name logger.debug(f'Found the session: {sessionName}') eventName = event['detail']['eventName'] logger.debug(f'Found the event {eventName}') bucketName = event['detail']['requestParameters']['bucketName'] logger.debug(f'Found the bucket: {bucketName}') print(defaultDenyPolicy) print(type(defaultDenyPolicy)) s3 = get_aws_client('s3', subscriberAccountId, awsRegion, roleName, sessionName) if eventName == 'CreateBucket': logger.debug( f'Executing the Lambda function as the event is {eventName}') #newBucketDenyPolicy= replace_resource_arn_with_bucketName(bucketName, defaultDenyPolicy) apply_policy_to_new_bucket(bucketName, s3, defaultDenyPolicy, subscriberAccountId, awsRegion) elif eventName in ['PutBucketPolicy', 'DeleteBucketPolicy']: logger.info( f'Executing the Lambda function as the event is {eventName}') existing_bucket(bucketName, s3, subscriberAccountId, awsRegion)
import os import botocore from utils.common import get_config, notify_email, get_aws_client from utils.logger import LoggerUtils as logger # global variables s3Bucket = os.environ['CONF_S3BUCKET'] s3Key = os.environ['CONF_DenyPolicy'] defaultDenyPolicy = get_config(s3Bucket, s3Key) bucketList = get_config(s3Bucket, "095139704753.json") toEmail = os.environ['ToEmail'] fromEmail = os.environ['FromEmail'] roleName = os.environ['ROLE_NAME'] notification = True if os.environ['Notifications'] == 'True' else False logger.setLevel() def existing_bucket(bucketName, s3, subscriberAccountId, awsRegion): """ This function parses the bucketpolicy on an existing bucket and also identifies exceptional buckets """ try: exceptionalBucket = bucketList['publiclyAccessibleBuckets'] if bucketName in exceptionalBucket: logger.debug( f'Found the bucket: {bucketName} in {awsRegion} in the exceptions bucket list' ) except Exception as e: logger.error(f'Got error: {e} while fetching exceptional bucket') raise e