def test_emr_launch_function(): app = core.App() stack = core.Stack(app, 'test-stack') vpc = ec2.Vpc(stack, 'Vpc') success_topic = sns.Topic(stack, 'SuccessTopic') failure_topic = sns.Topic(stack, 'FailureTopic') profile = emr_profile.EMRProfile( stack, 'test-profile', profile_name='test-profile', vpc=vpc) configuration = cluster_configuration.ClusterConfiguration( stack, 'test-configuration', configuration_name='test-configuration') function = emr_launch_function.EMRLaunchFunction( stack, 'test-function', launch_function_name='test-function', emr_profile=profile, cluster_configuration=configuration, cluster_name='test-cluster', success_topic=success_topic, failure_topic=failure_topic, allowed_cluster_config_overrides=configuration.override_interfaces['default'], wait_for_cluster_start=False ) resolved_function = stack.resolve(function.to_json()) print(default_function) print(resolved_function) assert default_function == resolved_function
def test_emr_secure_launch_function(self): stack = core.Stack(core.App(), 'test-stack') vpc = ec2.Vpc(stack, 'Vpc') success_topic = sns.Topic(stack, 'SuccessTopic') failure_topic = sns.Topic(stack, 'FailureTopic') profile = emr_profile.EMRProfile( stack, 'test-profile', profile_name='test-profile', vpc=vpc,) configuration = cluster_configuration.ClusterConfiguration( stack, 'test-configuration', configuration_name='test-configuration', secret_configurations={ 'SecretConfiguration': secretsmanager.Secret(stack, 'Secret') }) function = emr_launch_function.EMRLaunchFunction( stack, 'test-function', description='test description', launch_function_name='test-function', emr_profile=profile, cluster_configuration=configuration, cluster_name='test-cluster', success_topic=success_topic, failure_topic=failure_topic, allowed_cluster_config_overrides=configuration.override_interfaces['default'], wait_for_cluster_start=False ) self.print_and_assert(self.default_function, function)
def create_all_topics(self) -> None: """ Create all stack topics """ # Internal topics # General alarm topic to signal problems in stack execution # and e-mail subscription self.topics_["alarm_topic"] = sns.Topic(self, "alarm_topic") self.topics_["alarm_topic"].add_subscription( sns_subscriptions.EmailSubscription(settings.operator_email)) # Public STAC item topic for new STAC items self.topics_["stac_item_topic"] = sns.Topic(self, "stac_item_topic") core.CfnOutput( self, "stac_item_topic_output", value=self.topics_["stac_item_topic"].topic_arn, description="STAC item topic", ) sit_policy = iam.PolicyDocument( assign_sids=True, statements=[ iam.PolicyStatement( actions=["SNS:Subscribe", "SNS:Receive"], principals=[iam.AnyPrincipal()], resources=[self.topics_["stac_item_topic"].topic_arn], ) ], ) sit_policy.add_statements( iam.PolicyStatement( actions=[ "SNS:GetTopicAttributes", "SNS:SetTopicAttributes", "SNS:AddPermission", "SNS:RemovePermission", "SNS:DeleteTopic", "SNS:Subscribe", "SNS:ListSubscriptionsByTopic", "SNS:Publish", "SNS:Receive", ], principals=[iam.AccountPrincipal(self.account)], resources=[self.topics_["stac_item_topic"].topic_arn], )) # We could add the document directly to stac_item_policy sns.TopicPolicy( self, "sns_public_topic_policy", topics=[self.topics_["stac_item_topic"]], policy_document=sit_policy, ) # Reconcile topic, used internally for reconciliation operations self.topics_["reconcile_stac_item_topic"] = sns.Topic( self, "reconcile_stac_item_topic")
def __init__(self, app: cdk.App, id: str, **kwargs) -> None: super().__init__(app, id) jsonFileBucket = s3.Bucket(self, 'jsonFileBucket') newActionTopic = sns.Topic(self, 'newActions') lambdaErrorSnsTopic = sns.Topic(self, 'lambdaError') awslib.BucketCleanupFunction(self, 'jsonFileBucketCleanup', bucket=jsonFileBucket) bucketResource = jsonFileBucket.node.find_child('Resource') bucketResource.add_override('DeletionPolicy', 'Delete') with open("aws_action_watchdog/lambda-handler.js", encoding="utf8") as fp: handler_code = fp.read() handler_code = handler_code.replace('{{S3BucketName}}', jsonFileBucket.bucket_name) handler_code = handler_code.replace('{{SnsTopicArn}}', newActionTopic.topic_arn) actionWatchdogLambda = lambda_.Function( self, 'watchdog_Lambda', runtime=lambda_.Runtime.NODEJS810, timeout=15, code=lambda_.Code.inline(handler_code), handler='index.handler') schedule = events.Rule(self, 'lambdaScheduleEvent', schedule=events.Schedule.rate( interval=24, unit=events.TimeUnit.Hour)) schedule.add_target( target=targets.LambdaFunction(actionWatchdogLambda)) alarm = actionWatchdogLambda.metric_all_errors().create_alarm( self, 'lambdaErrorAlarm', threshold=0, evaluation_periods=1, comparison_operator=cloudwatch.ComparisonOperator. GreaterThanThreshold, treat_missing_data=cloudwatch.TreatMissingData.Missing, alarm_name='Lambda Error', datapoints_to_alarm=1, ) alarm.add_alarm_action(actions.SnsAction(lambdaErrorSnsTopic)) newActionTopic.grant_publish(actionWatchdogLambda) jsonFileBucket.grant_read_write(actionWatchdogLambda)
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # The code that defines your stack goes here sns_topic = sns.Topic(self, 'Topic') snsEmail = core.CfnParameter( self, 'SNSEmail', default='PAnong@automation_rocks.com', description='Email Endpoint for SNS Notifications', type='String') email = sns_topic.add_subscription( subscriptions.EmailSubscription(snsEmail.value_as_string)) cwAlarm = cw.CfnAlarm( self, 'VPCAlarm', actions_enabled=True, alarm_actions=[sns_topic.topic_arn], alarm_description= "A CloudWatch Alarm that triggers when changes are made to the VPC.", comparison_operator="GreaterThanOrEqualToThreshold", evaluation_periods=1, treat_missing_data="notBreaching", threshold=1, metric_name="VpcEventCount", namespace="CloudTrailMetrics", period=300, statistic="Sum", )
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) #SNS Topic MySnsTopic = sns.Topic(self, "MySnsTopic") # Custom EventBridge Bus custom_bus = events.EventBus(self, "bus", event_bus_name="test-bus-cdk") # EventBridge Rule rule = events.Rule(self, "rule", event_bus=custom_bus) # Event Pattern to filter events rule.add_event_pattern(source=["my-application"], detail_type=["message"]) # SNS topic as target for Eventbridge Rue rule.add_target(targets.SnsTopic(MySnsTopic)) # CDK Outputs CfnOutput(self, "SNS topic name", description="SNS topic name", value=MySnsTopic.topic_name) CfnOutput(self, "SNS topic ARN", description="SNS topic ARN", value=MySnsTopic.topic_arn)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) queue = sqs.Queue( self, "PycdkworkshopQueue", visibility_timeout=core.Duration.seconds(300), ) topic = sns.Topic(self, "PycdkworkshopTopic") bucket = s3.Bucket(self, id='s3cdkbucket', versioned=True) lambdafunction = _lambda.Function( self, id='lambdafunction', runtime=_lambda.Runtime.PYTHON_3_7, handler='hello.handler', code=_lambda.Code.from_asset(path='lambdacode')) lambdaapi = api.LambdaRestApi(self, id='restapi', handler=lambdafunction) topic.add_subscription(subs.SqsSubscription(queue))
def __init__(self, scope: core.Construct, id: str, bank_account_service: lambda_.Function, stage: Optional[str] = 'prod', **kwargs) -> None: super().__init__(scope, id+'-'+stage, **kwargs) # create SNS topic topic = sns.Topic(self, "BankTopic", display_name="SMSOutbound", topic_name="SMSOutbound") topic.add_subscription(subs.EmailSubscription(email_address="*****@*****.**")) # create the EventBridge stuff bus_name = 'banking-demo-events-'+stage bus = events.EventBus(self, id, event_bus_name=bus_name) events.Rule(self, "HUMAN_REVIEWED_APPLICATION", event_bus=bus, event_pattern=events.EventPattern( detail_type=["HUMAN_REVIEWED_APPLICATION"]), rule_name="HUMAN_REVIEWED_APPLICATION", enabled=True, targets=[ targets.SnsTopic(topic) ]) events.Rule(self, "APPLICATION_SUBMITTED", event_bus=bus, event_pattern=events.EventPattern( detail_type=["APPLICATION_SUBMITTED"]), rule_name="APPLICATION_SUBMITTED", enabled=True) events.Rule(self, "APPLICATION_APPROVED", event_bus=bus, event_pattern=events.EventPattern( detail_type=["APPLICATION_APPROVED"]), rule_name="APPLICATION_APPROVED", enabled=True, targets=[ targets.LambdaFunction(lambda_.Function.from_function_arn( self, "func", bank_account_service.function_arn)) ]) self._event_bus_arn = bus.event_bus_arn
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) queue = sqs.Queue( self, "CbsappQueue", visibility_timeout=core.Duration.seconds(300), ) topic = sns.Topic(self, "CbsappTopic") table = dynamodb.Table(self, id='dynamoTable', table_name='testcdktable', partition_key=dynamodb.Attribute( name='lastname', type=dynamodb.AttributeType.STRING)) function = _lambda.Function(scope=self, id='lambdafunction', function_name='_lambda', code=_lambda.Code.asset('lambdacode'), handler='lambdahandler.main', runtime=_lambda.Runtime.PROVIDED, memory_size=512, timeout=core.Duration.seconds(120)) topic.add_subscription(subs.SqsSubscription(queue))
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) queue = sqs.Queue(self, "sqsqueue", visibility_timeout=core.Duration.seconds(300)) topic = sns.Topic(self, "snstopic") topic.add_subscription(subs.SqsSubscription(queue)) bucket = s3.Bucket( self, "s3Bucket", encryption=s3.BucketEncryption.KMS_MANAGED, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, removal_policy=core.RemovalPolicy.DESTROY, ) bucket.add_event_notification(s3.EventType.OBJECT_CREATED_PUT, s3n.SnsDestination(topic)) s3deploy.BucketDeployment( self, "DeployFile", sources=[s3deploy.Source.asset("./assets")], destination_bucket=bucket, retain_on_delete=False, )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Setup SNS topic we will be sending messages to on failure sns_topic = sns.Topic(self, "ServiceStatusTopic", display_name="Service Status Topic") # Create lambda resource using code from local disk lambda_handler = _lambda.Function( self, "SimpleHealthCheckLambda", code=_lambda.Code.from_asset(LAMBDA_FUNC_PATH), runtime=_lambda.Runtime.PYTHON_3_7, handler="simple_health_check.simple_health_check", memory_size=256, timeout=core.Duration.seconds(5)) # Subscribe an HTTPS endpoint to our SNS topic and grant the lambda function permissions to use it sns_topic.add_subscription( sns_subscriptions.UrlSubscription( "https://endpointstatus.snagajob.com")) sns_topic.grant_publish(lambda_handler) # Create a CloudWatch Event trigger for every minute rule = events.Rule( self, "SimpleHealthCheckLambdaRule", schedule=events.Schedule.expression("cron(0/1 * * * ? *)")) # Add lambda function as target of event rule rule.add_target(targets.LambdaFunction(lambda_handler))
def __init__(self, app: App, id: str) -> None: super().__init__(app, id) # Lambda Function lambdaFn = _lambda.Function(self, "SNSEventHandler", runtime=_lambda.Runtime.PYTHON_3_9, code=_lambda.Code.from_asset("lambda"), handler="handler.main", timeout=Duration.seconds(10)) # Set Lambda Logs Retention and Removal Policy logs.LogGroup(self, 'logs', log_group_name=f"/aws/lambda/{lambdaFn.function_name}", removal_policy=RemovalPolicy.DESTROY, retention=logs.RetentionDays.ONE_DAY) # SNS topic topic = sns.Topic(self, 'sns-to-lambda-topic-test', display_name='My SNS topic') # subscribe Lambda to SNS topic topic.add_subscription(subs.LambdaSubscription(lambdaFn)) # Output information about the created resources CfnOutput(self, 'snsTopicArn', value=topic.topic_arn, description='The arn of the SNS topic') CfnOutput(self, 'functionName', value=lambdaFn.function_name, description='The name of the handler function')
def __init__(self, scope: core.Construct, id: str, props, **kwargs) -> None: super().__init__(scope, id, **kwargs) ################################################################################ # Create a Lambda function to process the CodeBuild state change events # and send out appropriate Slack messages # Permissions for the Lambda lambda_role = _iam.Role( self, id='UmccriseCodeBuildSlackLambdaRole', assumed_by=_iam.ServicePrincipal('lambda.amazonaws.com'), managed_policies=[ _iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonSSMReadOnlyAccess'), _iam.ManagedPolicy.from_aws_managed_policy_name( 'service-role/AWSLambdaBasicExecutionRole'), _iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonEC2ContainerRegistryReadOnly') ]) # The Lambda function itself function = _lambda.Function( self, id='UmccriseCodeBuildSlackLambda', handler='notify_slack.lambda_handler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambdas/slack'), environment={ 'SLACK_HOST': 'hooks.slack.com', 'SLACK_CHANNEL': props['slack_channel'], 'ECR_NAME': props['ecr_name'], 'AWS_ACCOUNT': props['aws_account'] # TODO: get from kwargs (env) }, role=lambda_role) ################################################################################ # Create a reference to the UMCCRise CodeBuild project # TODO: should probably use cross-stack resource references cb_project = cb.Project.from_project_name( self, id='UmccriseCodeBuildProject', project_name=props['codebuild_project_name']) ################################################################################ # Create an SNS topic to receive CodeBuild state change events sns_topic = _sns.Topic(self, id='UmccriseCodeBuildSnsTopic', display_name='UmccriseCodeBuildSnsTopic', topic_name='UmccriseCodeBuildSnsTopic') sns_topic.grant_publish(cb_project) sns_topic.add_subscription(_sns_subs.LambdaSubscription(function)) # Send state change events to SNS topic cb_project.on_state_change( id='UmccriseCodebuildStateChangeRule', rule_name='UmccriseCodebuildStateChangeRule', target=targets.SnsTopic(sns_topic))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) prj_name = self.node.try_get_context('project_name') env_name = self.node.try_get_context('env') lambda_function = lb.Function(self, "notifiaction_lambda", runtime=lb.Runtime.PYTHON_3_8, code=lb.Code.asset('lambda'), handler='hello.handler') cw_rule = events.Rule(self, 'cwrule', schedule=events.Schedule.cron(minute='0', hour='5', month='*', week_day='*', year='*')) cw_rule.add_target(targets.LambdaFunction(lambda_function)) # cw_rule.add_target(targets.LambdaFunction(lambda_function)) lambda_topic = sns.Topic(self, 'lambdatopic', topic_name='serverless-lambda-topic') lambda_topic.add_subscription(subs.LambdaSubscription(lambda_function))
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # Create SNS Topic # SNS topic topic = sns.Topic(self, 'sns-to-lambda-topic', display_name='My SNS topic') # Create Lambda function lambdaFn = _lambda.Function(self, "SNSPublisher", runtime=_lambda.Runtime.PYTHON_3_9, code=_lambda.Code.from_asset("lambda"), handler="handler.main", timeout=cdk.Duration.seconds(10)) # Set Lambda Logs Retention and Removal Policy logs.LogGroup(self, 'logs', log_group_name=f"/aws/lambda/{lambdaFn.function_name}", removal_policy=cdk.RemovalPolicy.DESTROY, retention=logs.RetentionDays.ONE_DAY) # Grant publish to lambda function topic.grant_publish(lambdaFn) cdk.CfnOutput(self, 'snsTopicArn', value=topic.topic_arn, description='The arn of the SNS topic') cdk.CfnOutput(self, 'functionName', value=lambdaFn.function_name, description='The name of the handler function')
def __init__(self, scope: core.Construct, id: str, ec2_params: dict, lambda_params: dict, sns_params: dict, **kwargs) -> None: """ deploys all AWS resources for plus environment Resources: AWS::EC2::Instance with your details AWS::Lambda::Function with your policies AWS::Cloudwatch::Alarm for EC2 and Lambda AWS::Cloudwatch::Dashboard for EC2 and Lamnbda AWS::SNS::Topic for EC2 and Lambda alarms """ super().__init__(scope, id, ec2_params, lambda_params, **kwargs) # sns topic = sns.Topic(self, "Topic", topic_name=sns_params['topic_name'], display_name=sns_params['display_name']) topic.add_subscription( subscriptions.EmailSubscription(sns_params['endpoint'])) # ec2 self.ec2_alarm.add_alarm_action(cw_actions.SnsAction(topic)) self.ec2_alarm.add_ok_action(cw_actions.SnsAction(topic)) # lambda self.lambda_alarm.add_alarm_action(cw_actions.SnsAction(topic)) self.lambda_alarm.add_ok_action(cw_actions.SnsAction(topic))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) topic = sns.Topic( self, "MySecondTopic", display_name="My Second Topic" )
def __init__(self, scope: core.Construct, id: str, sns_topic_arn: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # SNS Topic creation topic = sns.Topic(self, 'TheXRayTracerSnsTopic', display_name='The XRay Tracer CDK Pattern Topic') sns_lambda = _lambda.Function( self, "snsLambdaHandler", runtime=_lambda.Runtime.NODEJS_12_X, handler="sns_publish.handler", code=_lambda.Code.from_asset("lambda_fns"), tracing=_lambda.Tracing.ACTIVE, environment={"TOPIC_ARN": topic.topic_arn}) topic.grant_publish(sns_lambda) apigw_topic = sns.Topic.from_topic_arn(self, 'SNSTopic', sns_topic_arn) apigw_topic.add_subscription( subscriptions.LambdaSubscription(sns_lambda)) sns_subscriber_lambda = _lambda.Function( self, "snsSubscriptionLambdaHandler", runtime=_lambda.Runtime.NODEJS_12_X, handler="sns_subscribe.handler", code=_lambda.Code.from_asset("lambda_fns"), tracing=_lambda.Tracing.ACTIVE) topic.add_subscription( subscriptions.LambdaSubscription(sns_subscriber_lambda))
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # Create the queue MySqsQueue = sqs.Queue(self, "MySqsQueue") # Create the Topic MySnsTopic = sns.Topic(self, "MySnsTopic") # Create an SQS topic subscription object sqsSubscription = snssubs.SqsSubscription(MySqsQueue) # Add the SQS subscription to the sns topic MySnsTopic.add_subscription(sqsSubscription) # Add policy statement to SQS Policy that is created as part of the new queue iam.PolicyStatement(actions=['SQS:SendMessage'], effect=iam.Effect.ALLOW, conditions={'ArnEquals': MySnsTopic.topic_arn}, resources=[MySqsQueue.queue_arn], principals=[ iam.ServicePrincipal('sns.amazonaws.com') ] ) CfnOutput(self, "SQS queue name", description="SQS queue name", value=MySqsQueue.queue_name) CfnOutput(self, "SQS queue ARN", description="SQS queue arn", value=MySqsQueue.queue_arn) CfnOutput(self, "SQS queue URL", description="SQS queue URL", value=MySqsQueue.queue_url) CfnOutput(self, "SNS topic name", description="SNS topic name", value=MySnsTopic.topic_name) CfnOutput(self, "SNS topic ARN", description="SNS topic ARN", value=MySnsTopic.topic_arn)
def __init__(self, app: core.App, id: str, **kwargs): super().__init__(app, id, **kwargs) # [ SNS ] Topic: # # - The error topic for all issues. topic = sns.Topic(self, 'Topic', display_name='Pipeline Alert') # [ SNS ] Topic: # # - The error topic for all issues. sns_target = targets.SnsTopic(topic) # [ SNS ] Subscription: # # - Takes all emails in the list and creates email subscriptions for each. for email in notification_emails: topic.add_subscription( sns_subscriptions.EmailSubscription(email_address=email)) # [ SNS ] # # self.sns_target = sns_target
def __init__(self, scope: core.Construct, id: str, slack_channel: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) lambda_role = _iam.Role( self, 'SlackLambdaRole', assumed_by=_iam.ServicePrincipal('lambda.amazonaws.com'), managed_policies=[ _iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonSSMReadOnlyAccess'), _iam.ManagedPolicy.from_aws_managed_policy_name( 'service-role/AWSLambdaBasicExecutionRole') ]) function = _lambda.Function(self, 'IapSlackLambda', handler='notify_slack.lambda_handler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambdas/iap'), environment={ "SLACK_HOST": "hooks.slack.com", "SLACK_CHANNEL": slack_channel }, role=lambda_role) sns_topic = _sns.Topic(self, 'IapSnsTopic', display_name='IapSnsTopic', topic_name='IapSnsTopic') sns_topic.grant_publish( _iam.AccountPrincipal(self.illumina_iap_account)) sns_topic.add_subscription(_sns_subs.LambdaSubscription(function))
def create(self): self.sns_topic = aws_sns.Topic( scope=self, id=self.unique_name, display_name=self.unique_name, topic_name=self.unique_name, ) self.add_email_subscription()
def test_get_function(self): stack = core.Stack(core.App(), 'test-stack', env=core.Environment(account='123456789012', region='us-east-1')) vpc = ec2.Vpc.from_lookup(stack, 'test-vpc', vpc_id='vpc-12345678') success_topic = sns.Topic(stack, 'SuccessTopic') failure_topic = sns.Topic(stack, 'FailureTopic') profile = emr_profile.EMRProfile( stack, 'test-profile', profile_name='test-profile', vpc=vpc) configuration = cluster_configuration.ClusterConfiguration( stack, 'test-configuration', configuration_name='test-configuration') function = emr_launch_function.EMRLaunchFunction( stack, 'test-function', launch_function_name='test-function', emr_profile=profile, cluster_configuration=configuration, cluster_name='test-cluster', description='test description', success_topic=success_topic, failure_topic=failure_topic, allowed_cluster_config_overrides=configuration.override_interfaces['default'], wait_for_cluster_start=False ) ssm = boto3.client('ssm') ssm.put_parameter( Name=f'{emr_profile.SSM_PARAMETER_PREFIX}/{profile.namespace}/{profile.profile_name}', Value=json.dumps(profile.to_json())) ssm.put_parameter( Name=f'{cluster_configuration.SSM_PARAMETER_PREFIX}/' f'{configuration.namespace}/{configuration.configuration_name}', Value=json.dumps(configuration.to_json())) ssm.put_parameter( Name=f'{emr_launch_function.SSM_PARAMETER_PREFIX}/{function.namespace}/{function.launch_function_name}', Value=json.dumps(function.to_json())) restored_function = emr_launch_function.EMRLaunchFunction.from_stored_function( stack, 'test-restored-function', namespace=function.namespace, launch_function_name=function.launch_function_name, ) self.assertEquals(function.to_json(), restored_function.to_json())
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) #template = cfn_inc.CfnInclude(self, id='Template', template_file='template.yaml') # The code that defines your stack goes here bucket_names = 'config-1' + str(core.Aws.ACCOUNT_ID) sns_topic = _sns.Topic(self, id='topic-config', topic_name='config-topic') sns_topic.add_subscription(subscriptions.EmailSubscription("*****@*****.**")) bucket = s3.Bucket(self, id='s3cdkbuckets',bucket_name=bucket_names,versioned=True) bucket_arn2 = str(bucket.bucket_arn) + "/AWSLogs/" + str(core.Aws.ACCOUNT_ID) + "/Config/*" bucket_policy = bucket.add_to_resource_policy(iam.PolicyStatement(effect=iam.Effect.ALLOW, resources=[bucket.bucket_arn], actions=["s3:GetBucketAcl"], sid = "AWSConfigBucketPermissionsCheck", principals=[iam.ServicePrincipal("config.amazonaws.com")] )) bucket_policy2 = bucket.add_to_resource_policy(iam.PolicyStatement(effect=iam.Effect.ALLOW, resources=[bucket_arn2], actions=["s3:PutObject"], sid = "AWSConfigBucketDelivery", principals=[iam.ServicePrincipal("config.amazonaws.com")], conditions={"StringEquals": { "s3:x-amz-acl": "bucket-owner-full-control"} })) recorder = config.CfnConfigurationRecorder(self, id='recorder', role_arn='arn:aws:iam::306646308112:role/aws-service-role/config.amazonaws.com/AWSServiceRoleForConfig', recording_group=None) channel = config.CfnDeliveryChannel(self, id='channel', s3_bucket_name=bucket.bucket_name, sns_topic_arn=sns_topic.topic_arn) time.sleep(20) srule = config.CfnConfigRule(self, id='rule1', source=config.CfnConfigRule.SourceProperty(owner="AWS", source_identifier="REQUIRED_TAGS"), input_parameters={"tag1Key":"tagVal"}) srule2 = config.CfnConfigRule(self, id='rule2', source=config.CfnConfigRule.SourceProperty(owner="AWS", source_identifier="S3_BUCKET_LEVEL_PUBLIC_ACCESS_PROHIBITED")) srule3 = config.CfnConfigRule(self, id='rule3', source=config.CfnConfigRule.SourceProperty(owner="AWS", source_identifier="VPC_SG_OPEN_ONLY_TO_AUTHORIZED_PORTS")) srule.add_depends_on(recorder) srule2.add_depends_on(recorder) srule3.add_depends_on(recorder) event_rule = _events.Rule(self, id='event_rule', event_pattern = { "source": ["aws.config"], "detail": { "messageType": ["ConfigurationItemChangeNotification"], "newEvaluationResult": { "compliance_type": ["NON_COMPLIANT"] } } }) event_rule.add_target(targets.SnsTopic(sns_topic))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) def get_userdata(): with open('bootstrap.sh', 'r') as userdata: return userdata.read() kratos_role = aws_iam.Role.from_role_arn( self, 'KratosXL', role_arn="arn:aws:iam::88888888:role/KratosRole") lambda_role = aws_iam.Role.from_role_arn( self, 'LambdaXL', role_arn="arn:aws:iam::999999999:role/Lambda_Kratos") sns_topic = aws_sns.Topic(self, "Topic", display_name="cdk-sns-trigger") lambda_function = aws_lambda.Function( self, "FetchAtopLogs", runtime=aws_lambda.Runtime.PYTHON_3_6, role=lambda_role, handler="lambda_handler.lambda_handler", code=aws_lambda.Code.from_asset('myfunc')) lambda_function.add_event_source( aws_lambda_event_sources.SnsEventSource(sns_topic)) sns_subscription = aws_sns_subscriptions.LambdaSubscription( lambda_function) def generate_instances(count=1): amazon_linux_2 = aws_ec2.GenericLinuxImage( {"us-east-1": "ami-0fc61db8544a617ed"}) ec2_objects = [] for i in range(count): ec2_instnace = aws_ec2.Instance( self, f"CDK-Instance-{i + int(1)}", instance_type=aws_ec2.InstanceType('t2.micro'), role=kratos_role, machine_image=amazon_linux_2, security_group=aws_ec2.CfnSecurityGroup( self, id=f"SG{i + int(1)}", group_description=f"SG-CDK-{i}"), vpc=aws_ec2.Vpc.from_lookup(self, f'CDK-VPC-{i + int(1)}', vpc_id="vpc-eeeee3"), user_data=aws_ec2.UserData.custom(get_userdata()), key_name="covidQuarantine") ec2_objects.append(ec2_instnace) return ec2_objects generate_instances()
def _create_slack_notify_sns_topic(self) -> Resource: """ Create SNS topic for slack notification. """ topic_id = f"{self.name_prefix}-sns_slack_notify_topic" topic = sns.Topic(scope=self, id=topic_id, fifo=False, topic_name=f"{self.name_prefix}-slack_notify") return topic
def test_fail_chain(): default_fragment_json = { 'Type': 'Parallel', 'End': True, 'Branches': [{ 'StartAt': 'test-fragment: Failure Notification', 'States': { 'test-fragment: Failure Notification': { 'Next': 'test-fragment: Execution Failed', 'InputPath': '$', 'Parameters': { 'TopicArn': { 'Ref': 'testtopicB3D54793' }, 'Message': 'TestMessage', 'Subject': 'TestSubject' }, 'OutputPath': '$', 'Type': 'Task', 'Resource': { 'Fn::Join': [ '', [ 'arn:', { 'Ref': 'AWS::Partition' }, ':states:::sns:publish' ] ] }, 'ResultPath': '$.PublishResult' }, 'test-fragment: Execution Failed': { 'Type': 'Fail', 'Comment': 'TestComment', 'Error': 'TestError', 'Cause': 'TestCause' } } }] } stack = core.Stack(core.App(), 'test-stack') fragment = emr_chains.Fail(stack, 'test-fragment', message=sfn.TaskInput.from_text('TestMessage'), subject='TestSubject', topic=sns.Topic(stack, 'test-topic'), cause='TestCause', comment='TestComment', error='TestError') print_and_assert(default_fragment_json, fragment)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Create lambda function lambda_pipeline_alerts = _lambda.Function( self, id="lambda_pipeline_alerts_asset", function_name='lambda_pipeline_alerts_asset', code=_lambda.Code.asset("lambda_pipeline_alerts_asset"), runtime=_lambda.Runtime.PYTHON_3_7, handler="index.lambda_handler") # add Env Vars lambda_pipeline_alerts.add_environment( 'SLACK_WEB_HOOK_URL', 'https://hooks.slack.com/services/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' ) lambda_pipeline_alerts.add_environment('SLACK_CHANNEL', '#tech-pay-deploys') # Create sns topic for the pipeline events sns_topic_pipeline_alerts = _sns.Topic(self, id='sns_pipeline_alerts', display_name='pipelines-events', topic_name='pipelines-events') # add lambda to sns subscription sns_topic_pipeline_alerts.add_subscription( _sns_subscription.LambdaSubscription(lambda_pipeline_alerts)) # Create the event rule event_rule = _events.Rule( self, id='pipeline_alerts', rule_name='pipeline_alerts', description='Cloud Watch Event Rule to check pipeline events') # Cloud watch event configuration event_source = ["aws.codepipeline"] event_detail_type = ["CodePipeline Pipeline Execution State Change"] event_detail = {"state": ["FAILED"]} # add event pattern to send to target event_rule.add_event_pattern(detail=event_detail, detail_type=event_detail_type, source=event_source) # add target pipeline_name = _events.EventField.from_path('$.detail.pipeline') event_rule.add_target( _targets.SnsTopic( sns_topic_pipeline_alerts, message=_events.RuleTargetInput.from_text( f':rotating_light:The Pipeline `{pipeline_name}` has failed.:rotating_light:' )))
def __init__(self, scope: core.Construct, id: str,UserName="******",EmailAddress="default",**kwargs): super().__init__(scope, id, **kwargs) self.SNSTopicList = {} self.Topic_Batch_Job_Notification = _sns.Topic(self, "Batch_Job_Notification", display_name="BatchJobNotification_" + UserName, topic_name="BatchJobNotification_" + UserName ) self.Topic_Batch_Job_Notification.add_subscription(_subs.EmailSubscription(EmailAddress)) self.SNSTopicList["Topic_Batch_Job_Notification"] = self.Topic_Batch_Job_Notification
def __init__(self, scope: core.Construct, id: str, notification_email): super().__init__(scope, id) stack_name = core.Stack.of(self).stack_name self.topic = sns.Topic( self, "topic", topic_name=f"{stack_name}-{names.NOTIFICATIONS_TOPIC}") self.topic.add_subscription( sns_subs.EmailSubscription(notification_email)) self.custom_metric_namespace = f"{stack_name}-{names.METRIC_NAMESPACE}"