def __init__(self, scope: core.App, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        pass_through_lambda = _lambda.Function(
            self,
            'PassThroughLambda',
            runtime=_lambda.Runtime.PYTHON_3_7,
            code=_lambda.Code.asset('lambda'),
            handler='pass_through_lambda.handler')

        loop_count_lambda = _lambda.Function(
            self,
            'LoopCountLambda',
            runtime=_lambda.Runtime.PYTHON_3_7,
            code=_lambda.Code.asset('lambda'),
            handler='loop_count_lambda.handler')

        start_state_machine = sfn.Task(
            self,
            "Start CodeBuild Lambda",
            task=sfn_tasks.InvokeFunction(pass_through_lambda))

        wait_x = sfn.Wait(
            self,
            "Wait X Seconds",
            time=sfn.WaitTime.seconds_path('$.wait_time'),
        )

        get_state_machine_status = sfn.Task(
            self,
            "Get Build Status",
            task=sfn_tasks.InvokeFunction(loop_count_lambda))

        is_complete = sfn.Choice(self, "Job Complete?")

        state_machine_failed = sfn.Fail(self,
                                        "Build Failed",
                                        cause="AWS Batch Job Failed",
                                        error="DescribeJob returned FAILED")

        state_machine_success = sfn.Pass(self, "Build Successs")

        definition = start_state_machine\
            .next(wait_x)\
            .next(get_state_machine_status)\
            .next(is_complete
                  .when(sfn.Condition.string_equals(
                      "$.status", "FAILED"), state_machine_failed)
                  .when(sfn.Condition.string_equals(
                      "$.status", "SUCCEEDED"), state_machine_success)
                  .otherwise(wait_x))

        sfn.StateMachine(
            self,
            "StateMachine",
            definition=definition,
            timeout=core.Duration.seconds(60),
        )
Beispiel #2
0
    def build(
            scope: core.Construct,
            id: str,
            *,
            default_fail_if_cluster_running: bool,
            cluster_configuration_path: str = '$.ClusterConfiguration.Cluster',
            output_path: str = '$',
            result_path: str = '$.ClusterConfiguration.Cluster') -> sfn.Task:
        # We use a nested Construct to avoid collisions with Lambda and Task ids
        construct = core.Construct(scope, id)

        fail_if_cluster_running_lambda = emr_lambdas.FailIfClusterRunningBuilder.get_or_build(
            construct)

        return sfn.Task(
            construct,
            'Fail If Cluster Running',
            output_path=output_path,
            result_path=result_path,
            task=sfn_tasks.InvokeFunction(
                fail_if_cluster_running_lambda,
                payload={
                    'ExecutionInput':
                    sfn.TaskInput.from_context_at('$$.Execution.Input').value,
                    'DefaultFailIfClusterRunning':
                    default_fail_if_cluster_running,
                    'ClusterConfiguration':
                    sfn.TaskInput.from_data_at(
                        cluster_configuration_path).value
                }))
Beispiel #3
0
    def build(
            scope: core.Construct,
            id: str,
            *,
            cluster_configuration_path: str = '$.ClusterConfiguration.Cluster',
            output_path: str = '$',
            result_path: str = '$.ClusterConfiguration.Cluster') -> sfn.Task:
        # We use a nested Construct to avoid collisions with Lambda and Task ids
        construct = core.Construct(scope, id)

        update_cluster_tags_lambda = emr_lambdas.UpdateClusterTagsBuilder.get_or_build(
            construct)

        return sfn.Task(
            construct,
            'Update Cluster Tags',
            output_path=output_path,
            result_path=result_path,
            task=sfn_tasks.InvokeFunction(
                update_cluster_tags_lambda,
                payload={
                    'ExecutionInput':
                    sfn.TaskInput.from_context_at('$$.Execution.Input').value,
                    'ClusterConfiguration':
                    sfn.TaskInput.from_data_at(
                        cluster_configuration_path).value
                }))
Beispiel #4
0
    def build(
            scope: core.Construct,
            id: str,
            *,
            override_cluster_configs_lambda: Optional[
                aws_lambda.Function] = None,
            allowed_cluster_config_overrides: Optional[Dict[str, str]] = None,
            cluster_configuration_path: str = '$.ClusterConfiguration.Cluster',
            output_path: str = '$',
            result_path: str = '$.ClusterConfiguration.Cluster') -> sfn.Task:
        # We use a nested Construct to avoid collisions with Lambda and Task ids
        construct = core.Construct(scope, id)

        override_cluster_configs_lambda = \
            emr_lambdas.OverrideClusterConfigsBuilder.get_or_build(construct) \
            if override_cluster_configs_lambda is None \
            else override_cluster_configs_lambda

        return sfn.Task(
            construct,
            'Override Cluster Configs',
            output_path=output_path,
            result_path=result_path,
            task=sfn_tasks.InvokeFunction(
                override_cluster_configs_lambda,
                payload={
                    'ExecutionInput':
                    sfn.TaskInput.from_context_at('$$.Execution.Input').value,
                    'ClusterConfiguration':
                    sfn.TaskInput.from_data_at(
                        cluster_configuration_path).value,
                    'AllowedClusterConfigOverrides':
                    allowed_cluster_config_overrides
                }))
Beispiel #5
0
    def _lambda_glue_crawler_task(self):
        root_path = Path(os.path.dirname(os.path.abspath(__file__)))
        lambda_handler = root_path.joinpath('lambdas',
                                            'trigger_glue_crawler').as_posix()

        func = lambda_.Function(
            self,
            "TriggerGlueCrawlerLambdaHandler",
            handler="lambda.lambda_handler",
            code=lambda_.AssetCode(lambda_handler),
            environment={"crawlerName": f"{self.glue_crawler.name}"},
            initial_policy=[
                iam.PolicyStatement(
                    actions=["glue:StartCrawler"],
                    resources=["*"],
                ),
            ],
            timeout=core.Duration.seconds(30),
            runtime=lambda_.Runtime.PYTHON_3_7,
        )

        # turn the lambda into a stepfunction task so we can use it in our state machine
        task = sfn.Task(
            self,
            "TriggerGlueCrawlerLambda",
            task=sfnt.InvokeFunction(func),
        )

        return task
Beispiel #6
0
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 *,
                 emr_step: emr_code.EMRStep,
                 cluster_id: str,
                 result_path: Optional[str] = None,
                 output_path: Optional[str] = None,
                 fail_chain: Optional[sfn.IChainable] = None,
                 wait_for_step_completion: bool = True):
        super().__init__(scope, id)

        override_step_args = emr_lambdas.OverrideStepArgsBuilder.get_or_build(
            self)

        override_step_args_task = sfn.Task(
            self,
            f'{emr_step.name} - Override Args',
            result_path=f'$.{id}ResultArgs',
            task=sfn_tasks.InvokeFunction(
                override_step_args,
                payload={
                    'ExecutionInput':
                    sfn.TaskInput.from_context_at('$$.Execution.Input').value,
                    'StepName':
                    emr_step.name,
                    'Args':
                    emr_step.args
                }))

        resolved_step = emr_step.resolve(self)
        resolved_step['HadoopJarStep']['Args'] = sfn.TaskInput.from_data_at(
            f'$.{id}ResultArgs').value

        integration_pattern = sfn.ServiceIntegrationPattern.SYNC if wait_for_step_completion \
            else sfn.ServiceIntegrationPattern.FIRE_AND_FORGET

        add_step_task = sfn.Task(self,
                                 emr_step.name,
                                 output_path=output_path,
                                 result_path=result_path,
                                 task=emr_tasks.EmrAddStepTask(
                                     cluster_id=cluster_id,
                                     step=resolved_step,
                                     integration_pattern=integration_pattern))

        if fail_chain:
            override_step_args_task.add_catch(fail_chain,
                                              errors=['States.ALL'],
                                              result_path='$.Error')
            add_step_task.add_catch(fail_chain,
                                    errors=['States.ALL'],
                                    result_path='$.Error')

        override_step_args_task.next(add_step_task)

        self._start = override_step_args_task
        self._end = add_step_task
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Step Function Starts Here

        # The first thing we need to do is see if they are asking for pineapple on a pizza
        pineapple_check_lambda = _lambda.Function(self, "pineappleCheckLambdaHandler",
                                                  runtime=_lambda.Runtime.NODEJS_12_X,
                                                  handler="orderPizza.handler",
                                                  code=_lambda.Code.from_asset("lambdas"),
                                                  )

        # Step functions are built up of steps, we need to define our first step
        order_pizza = step_fn.Task(self, 'Order Pizza Job',
                                   task=step_fn_tasks.InvokeFunction(pineapple_check_lambda),
                                   input_path='$.flavour',
                                   result_path='$.pineappleAnalysis'
                                   )

        # Pizza Order failure step defined
        job_failed = step_fn.Fail(self, 'Sorry, We Dont add Pineapple',
                                  cause='Failed To Make Pizza',
                                  error='They asked for Pineapple')

        # If they didnt ask for pineapple let's cook the pizza
        cook_pizza = step_fn.Pass(self, 'Lets make your pizza')

        # If they ask for a pizza with pineapple, fail. Otherwise cook the pizza
        definition = step_fn.Chain \
            .start(order_pizza) \
            .next(step_fn.Choice(self, 'With Pineapple?') \
                  .when(step_fn.Condition.boolean_equals('$.pineappleAnalysis.containsPineapple', True), job_failed) \
                  .otherwise(cook_pizza))

        state_machine = step_fn.StateMachine(self, 'StateMachine', definition=definition, timeout=core.Duration.minutes(5))

        # Dead Letter Queue Setup
        dlq = sqs.Queue(self, 'stateMachineLambdaDLQ', visibility_timeout=core.Duration.seconds(300))

        # defines an AWS Lambda resource to connect to our API Gateway
        state_machine_lambda = _lambda.Function(self, "stateMachineLambdaHandler",
                                                runtime=_lambda.Runtime.NODEJS_12_X,
                                                handler="stateMachineLambda.handler",
                                                code=_lambda.Code.from_asset("lambdas"),
                                                environment={
                                                    'statemachine_arn': state_machine.state_machine_arn
                                                }
                                                )

        state_machine.grant_start_execution(state_machine_lambda)

        # defines an API Gateway REST API resource backed by our "sqs_publish_lambda" function.
        api_gw.LambdaRestApi(self, 'Endpoint',
                             handler=state_machine_lambda
                             )
Beispiel #8
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)
        #lf1= Function(self, id="my_stack_lambda", runtime=Runtime.PYTHON_3_7, handler='handlers/my_lambda_handler', code='', function_name='my_example_lambda')
        my_table = _dynamodb.Table(self,
                                   id='dynamoTable',
                                   table_name='testcdktabe',
                                   partition_key=_dynamodb.Attribute(
                                       name='lastname',
                                       type=_dynamodb.AttributeType.STRING))
        my_s3_bucket = _s3.Bucket(self,
                                  id='s3bucket',
                                  bucket_name='mynpbsample3bucket')

        my_lambda_function = _lambda.Function(
            self,
            id='lambdafunction',
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler='hello.handler',
            code=_lambda.Code.asset('lambdacode'))

        process_purchase_function = _lambda.Function(
            self,
            id='process_purchase',
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler='process_purchase.handler',
            code=_lambda.Code.asset('lambdacode'))

        process_refund_function = _lambda.Function(
            self,
            id='process_refund',
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler='process_refund.handler',
            code=_lambda.Code.asset('lambdacode'))

        #start_state = sfn.Pass(self, "start_state")

        definition = sfn.Task(
            self,
            'Get Process Type',
            task=tasks.InvokeFunction(process_purchase_function))

        sfn.StateMachine(
            self,
            "MyStateMachine",
            definition=definition,
            timeout=core.Duration.seconds(30),
        )

        my_topic = sns.Topic(self,
                             "MyTopic",
                             display_name="Customer Subscription")
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        logging_lambda = lambda_func.Function(
            scope=self,
            id="logging_lambda",
            function_name="logging_lambda",
            handler="logging-lambda.main",
            runtime=lambda_func.Runtime.PYTHON_3_7,
            code=lambda_func.Code.from_asset("./code"))

        second_lambda = lambda_func.Function(
            scope=self,
            id="second_lambda",
            function_name="second_lambda",
            handler="second-lambda.main",
            runtime=lambda_func.Runtime.PYTHON_3_7,
            code=lambda_func.Code.from_asset("./code"))

        logging_lambda_task = tasks.InvokeFunction(logging_lambda)
        logging_step = stepfunctions.Task(scope=self,
                                          id="invoke_logging_function",
                                          task=logging_lambda_task)

        second_lambda_task = tasks.InvokeFunction(second_lambda)
        second_step = stepfunctions.Task(scope=self,
                                         id="invoke_second_function",
                                         task=second_lambda_task)

        definition = logging_step.next(second_step)

        stepfunctions.StateMachine(
            scope=self,
            id="state_machine",
            state_machine_name="state_machine",
            definition=definition,
        )
Beispiel #10
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # space for feeder Lambda function
        feeder = aws_lambda.Function(self,
                                                    id='_feeder',
                                                    code=aws_lambda.Code.asset('./code'),
                                                    handler='feeder.handler',
                                                    runtime=aws_lambda.Runtime.PYTHON_3_7,
                                                    description='Feeder function for the Witness project')

        # space for saver Lambda function
        saver = aws_lambda.Function(self,
                                                    id='_saver',
                                                    code=aws_lambda.Code.asset('./code'),
                                                    handler='saver.handler',
                                                    runtime=aws_lambda.Runtime.PYTHON_3_7,
                                                    description='Saver function for the Witness project')
        # space for feeder lambda trigger
        archive.add_event_notification(aws_s3.EventType.OBJECT_CREATED_PUT, s3n.LambdaDestination(feeder))


        # space for stepfunction
        feederTask = aws_stepfunctions.Task(        self,
                                                    id='_feederTask',
                                                    task=aws_tasks.InvokeFunction(feeder))

        saverTask = aws_stepfunctions.Task(         self,
                                                    id='_saverTask',
                                                    task=aws_tasks.InvokeFunction(saver))                                            

        definition = feederTask.next(saverTask)

        orchestrator = aws_stepfunctions.StateMachine(self,
                                                    id='_orchestrator',
                                                    state_machine_name='witness_orchestrator',
                                                    definition=definition)
Beispiel #11
0
    def build(scope: core.Construct,
              id: str,
              *,
              cluster_name: str,
              cluster_tags: List[core.Tag],
              profile_namespace: str,
              profile_name: str,
              configuration_namespace: str,
              configuration_name: str,
              output_path: str = '$',
              result_path: str = '$.ClusterConfiguration') -> sfn.Task:
        # We use a nested Construct to avoid collisions with Lambda and Task ids
        construct = core.Construct(scope, id)

        load_cluster_configuration_lambda = emr_lambdas.LoadClusterConfigurationBuilder.build(
            construct,
            profile_namespace=profile_namespace,
            profile_name=profile_name,
            configuration_namespace=configuration_namespace,
            configuration_name=configuration_name)

        return sfn.Task(construct,
                        'Load Cluster Configuration',
                        output_path=output_path,
                        result_path=result_path,
                        task=sfn_tasks.InvokeFunction(
                            load_cluster_configuration_lambda,
                            payload={
                                'ClusterName':
                                cluster_name,
                                'ClusterTags': [{
                                    'Key': t.key,
                                    'Value': t.value
                                } for t in cluster_tags],
                                'ProfileNamespace':
                                profile_namespace,
                                'ProfileName':
                                profile_name,
                                'ConfigurationNamespace':
                                configuration_namespace,
                                'ConfigurationName':
                                configuration_name,
                            }))
Beispiel #12
0
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 name: str,
                 state_machine: sfn.StateMachine,
                 input: Optional[Mapping[str, any]] = None,
                 fail_chain: Optional[sfn.IChainable] = None):
        super().__init__(scope, id)

        state_machine_task = sfn.Task(
            self,
            name,
            task=emr_tasks.StartExecutionTask(
                state_machine=state_machine,
                input=input,
                integration_pattern=sfn.ServiceIntegrationPattern.SYNC))

        parse_json_string = emr_lambdas.ParseJsonStringBuilder.get_or_build(
            self)

        parse_json_string_task = sfn.Task(
            self,
            f'{name} - Parse JSON Output',
            result_path='$',
            task=sfn_tasks.InvokeFunction(
                parse_json_string,
                payload={
                    'JsonString': sfn.TaskInput.from_data_at('$.Output').value
                }))

        if fail_chain:
            state_machine_task.add_catch(fail_chain,
                                         errors=['States.ALL'],
                                         result_path='$.Error')
            parse_json_string_task.add_catch(fail_chain,
                                             errors=['States.ALL'],
                                             result_path='$.Error')

        state_machine_task.next(parse_json_string_task)

        self._start = state_machine_task
        self._end = parse_json_string_task
Beispiel #13
0
    def _lambda_quality_check_task(self):
        lambda_role = iam.Role(
            self,
            id=f"QualityLambdaRole",
            assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AmazonS3FullAccess"),
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AmazonAthenaFullAccess"),
            ],
        )

        root_path = Path(os.path.dirname(os.path.abspath(__file__)))
        lambda_handler = root_path.joinpath('lambdas',
                                            'quality_check').as_posix()

        func = lambda_.Function(
            self,
            "QualityCheckAthenaLambdaHandler",
            handler="lambda.lambda_handler",
            code=lambda_.AssetCode(lambda_handler),
            environment={"athenaDatabase": f"{self.glue_db_name}"},
            role=lambda_role,
            timeout=core.Duration.seconds(30),
            runtime=lambda_.Runtime.PYTHON_3_7,
        )

        # turn the lambda into a stepfunction task so we can use it in our state machine
        task = sfn.Task(
            self,
            "QualityCheckAthenaLambda",
            task=sfnt.InvokeFunction(func),
        )

        return task
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 id_checker: str,
                 event_bus: str,
                 stage: Optional[str] = 'prod',
                 **kwargs) -> None:
        super().__init__(scope, id + '-' + stage, **kwargs)

        app_table_name = id + '-applications-table-' + stage
        app_table = ddb.Table(self,
                              id=app_table_name,
                              table_name=app_table_name,
                              partition_key=ddb.Attribute(
                                  name='id', type=ddb.AttributeType.STRING),
                              billing_mode=ddb.BillingMode.PAY_PER_REQUEST)

        events_table_name = id + '-events-table-' + stage
        events_table = ddb.Table(self,
                                 id=events_table_name,
                                 table_name=events_table_name,
                                 partition_key=ddb.Attribute(
                                     name='id', type=ddb.AttributeType.STRING),
                                 billing_mode=ddb.BillingMode.PAY_PER_REQUEST,
                                 stream=ddb.StreamViewType.NEW_IMAGE)

        self._table_stream_arn = events_table.table_stream_arn

        # create our Lambda function for the bank account service
        func_name = id + '-' + stage + '-' + 'account-application'
        lambda_assets = lambda_.Code.from_asset('account_application_service')
        handler = lambda_.Function(self,
                                   func_name,
                                   code=lambda_assets,
                                   runtime=lambda_.Runtime.NODEJS_10_X,
                                   handler='main.handler',
                                   environment={
                                       'ACCOUNTS_TABLE_NAME':
                                       app_table.table_name,
                                       'EVENTS_TABLE_NAME':
                                       events_table.table_name,
                                       'REGION': core.Aws.REGION
                                   })

        gw.LambdaRestApi(self, id=stage + '-' + id, handler=handler)

        # grant main Lambda function access to DynamoDB tables
        app_table.grant_read_write_data(handler.role)
        events_table.grant_read_write_data(handler.role)

        p_statement = iam.PolicyStatement(actions=[
            'ssm:Describe*', 'ssm:Get*', 'ssm:List*', 'events:*', 'states:*'
        ],
                                          effect=iam.Effect.ALLOW,
                                          resources=['*'])
        handler.add_to_role_policy(statement=p_statement)

        # create the Lambda function for the event publisher
        evt_publisher = id + '-' + stage + '-' + 'event-publisher'
        evt_handler = lambda_.Function(
            self,
            evt_publisher,
            code=lambda_assets,
            runtime=lambda_.Runtime.NODEJS_10_X,
            handler='event-publisher.handler',
            events=[
                lambda_es.DynamoEventSource(
                    table=events_table,
                    starting_position=lambda_.StartingPosition.LATEST)
            ],
            environment={
                'EVENT_BRIDGE_ARN': event_bus,
                'REGION': core.Aws.REGION
            })

        evt_handler.add_to_role_policy(statement=p_statement)

        # set up StepFunctions
        approve_application = sf.Task(
            self,
            'Approve Application',
            task=sft.InvokeFunction(handler,
                                    payload={
                                        'body': {
                                            'command':
                                            'APPROVE_ACCOUNT_APPLICATION',
                                            'data': {
                                                'id.$': '$.application.id'
                                            }
                                        }
                                    }),
            result_path='$.approveApplication')

        reject_application = sf.Task(self,
                                     'Reject Application',
                                     task=sft.InvokeFunction(
                                         handler,
                                         payload={
                                             'body': {
                                                 'command':
                                                 'REJECT_ACCOUNT_APPLICATION',
                                                 'data': {
                                                     'id.$': '$.application.id'
                                                 }
                                             }
                                         }),
                                     result_path='$.rejectApplication')

        id_checker_handler = lambda_.Function.from_function_arn(
            self, 'IdentityChecker', function_arn=id_checker)
        check_identity = sf.Task(self,
                                 'Check Identity',
                                 task=sft.InvokeFunction(
                                     id_checker_handler,
                                     payload={
                                         'body': {
                                             'command': 'CHECK_IDENTITY',
                                             'data': {
                                                 'application.$':
                                                 '$.application'
                                             }
                                         }
                                     }))

        wait_for_human_review = sf.Task(self, 'Wait for Human Review',
                                        task=sft.RunLambdaTask(handler,
                                                               integration_pattern=sf.ServiceIntegrationPattern.WAIT_FOR_TASK_TOKEN,
                                                               payload={
                                                                   'body': {
                                                                       'command': 'FLAG_ACCOUNT_APPLICATION_FOR_HUMAN_REVIEW',
                                                                       'data': {
                                                                           'id.$': '$.application.id',
                                                                           'taskToken': sf.Context.task_token
                                                                       }
                                                                   }
                                                               }), result_path='$.humanReview') \
            .next(
            sf.Choice(self, 'Human Approval Choice')
            .when(sf.Condition.string_equals('$.humanReview.decision', 'APPROVE'), next=approve_application)
            .when(sf.Condition.string_equals('$.humanReview.decision', 'REJECT'), next=reject_application))

        sm_definition = sf.Parallel(self, 'Perform Automated Checks', result_path='$.checks') \
            .branch(check_identity) \
            .branch(sf.Pass(self, 'Check Fraud Model', result=sf.Result({'flagged': False}))) \
            .next(
            sf.Choice(self, 'Automated Checks Choice')
                .when(sf.Condition.boolean_equals('$.checks[0].flagged', True), next=wait_for_human_review)
                .when(sf.Condition.boolean_equals('$.checks[1].flagged', True), next=wait_for_human_review)
                .otherwise(approve_application))

        state_machine = sf.StateMachine(self,
                                        'OpenAccountStateMachine' + stage,
                                        definition=sm_definition)
        ssm.CfnParameter(self,
                         id='StateMachineArnSSM',
                         type='String',
                         value=state_machine.state_machine_arn,
                         name='StateMachineArnSSM')
    def __init__(self, app: core.App, id: str, props, **kwargs) -> None:
        super().__init__(app, id, **kwargs)

        run_data_bucket_name = ''
        run_data_bucket = s3.Bucket.from_bucket_name(
            self, run_data_bucket_name, bucket_name=run_data_bucket_name)

        # IAM roles for the lambda functions
        lambda_role = iam.Role(
            self,
            'EchoTesLambdaRole',
            assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    'service-role/AWSLambdaBasicExecutionRole')
            ])

        copy_lambda_role = iam.Role(
            self,
            'CopyToS3LambdaRole',
            assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    'service-role/AWSLambdaBasicExecutionRole')
            ])
        run_data_bucket.grant_write(copy_lambda_role)

        callback_role = iam.Role(
            self,
            'CallbackTesLambdaRole',
            assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    'service-role/AWSLambdaBasicExecutionRole'),
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    'AWSStepFunctionsFullAccess')
            ])

        # Lambda function to call back and complete SFN async tasks
        lmbda.Function(self,
                       'CallbackLambda',
                       function_name='callback_iap_tes_lambda_dev',
                       handler='callback.lambda_handler',
                       runtime=lmbda.Runtime.PYTHON_3_7,
                       code=lmbda.Code.from_asset('lambdas'),
                       role=callback_role,
                       timeout=core.Duration.seconds(20))

        samplesheet_mapper_function = lmbda.Function(
            self,
            'SampleSheetMapperTesLambda',
            function_name='showcase_ss_mapper_iap_tes_lambda_dev',
            handler='launch_tes_task.lambda_handler',
            runtime=lmbda.Runtime.PYTHON_3_7,
            code=lmbda.Code.from_asset('lambdas'),
            role=lambda_role,
            timeout=core.Duration.seconds(20),
            environment={
                'IAP_API_BASE_URL': props['iap_api_base_url'],
                'TASK_ID': props['task_id'],
                'TASK_VERSION': 'tvn.0ee81865bf514b7bb7b7ea305c88191f',
                # 'TASK_VERSION': 'tvn.b4735419fbe4455eb2b91960e48921f9',  # echo task
                'SSM_PARAM_JWT': props['ssm_param_name'],
                'GDS_LOG_FOLDER': props['gds_log_folder'],
                'IMAGE_NAME': 'umccr/alpine_pandas',
                'IMAGE_TAG': '1.0.1',
                'TES_TASK_NAME': 'SampleSheetMapper'
            })

        bcl_convert_function = lmbda.Function(
            self,
            'BclConvertTesLambda',
            function_name='showcase_bcl_convert_iap_tes_lambda_dev',
            handler='launch_tes_task.lambda_handler',
            runtime=lmbda.Runtime.PYTHON_3_7,
            code=lmbda.Code.from_asset('lambdas'),
            role=lambda_role,
            timeout=core.Duration.seconds(20),
            environment={
                'IAP_API_BASE_URL': props['iap_api_base_url'],
                'TASK_ID': props['task_id'],
                'TASK_VERSION': 'tvn.ab3e85f9aaf24890ad169fdab3825c0d',
                # 'TASK_VERSION': 'tvn.b4735419fbe4455eb2b91960e48921f9',  # echo task
                'SSM_PARAM_JWT': props['ssm_param_name'],
                'GDS_LOG_FOLDER': props['gds_log_folder'],
                'IMAGE_NAME':
                '699120554104.dkr.ecr.us-east-1.amazonaws.com/public/dragen',
                'IMAGE_TAG': '3.5.2',
                'TES_TASK_NAME': 'BclConvert'
            })

        fastq_mapper_function = lmbda.Function(
            self,
            'FastqMapperTesLambda',
            function_name='showcase_fastq_mapper_iap_tes_lambda_dev',
            handler='launch_tes_task.lambda_handler',
            runtime=lmbda.Runtime.PYTHON_3_7,
            code=lmbda.Code.from_asset('lambdas'),
            role=lambda_role,
            timeout=core.Duration.seconds(20),
            environment={
                'IAP_API_BASE_URL': props['iap_api_base_url'],
                'TASK_ID': props['task_id'],
                'TASK_VERSION': 'tvn.f90aa88da2fe490fb6e6366b65abe267',
                # 'TASK_VERSION': 'tvn.b4735419fbe4455eb2b91960e48921f9',  # echo task
                'SSM_PARAM_JWT': props['ssm_param_name'],
                'GDS_LOG_FOLDER': props['gds_log_folder'],
                'IMAGE_NAME': 'umccr/alpine_pandas',
                'IMAGE_TAG': '1.0.1',
                'TES_TASK_NAME': 'FastqMapper'
            })

        gather_samples_function = lmbda.Function(
            self,
            'GatherSamplesTesLambda',
            function_name='showcase_gather_samples_iap_tes_lambda_dev',
            handler='gather_samples.lambda_handler',
            runtime=lmbda.Runtime.PYTHON_3_7,
            code=lmbda.Code.from_asset('lambdas'),
            role=lambda_role,
            timeout=core.Duration.seconds(20),
            environment={
                'IAP_API_BASE_URL': props['iap_api_base_url'],
                'SSM_PARAM_JWT': props['ssm_param_name']
            })

        dragen_function = lmbda.Function(
            self,
            'DragenTesLambda',
            function_name='showcase_dragen_iap_tes_lambda_dev',
            handler='launch_tes_task.lambda_handler',
            runtime=lmbda.Runtime.PYTHON_3_7,
            code=lmbda.Code.from_asset('lambdas'),
            role=lambda_role,
            timeout=core.Duration.seconds(20),
            environment={
                'IAP_API_BASE_URL': props['iap_api_base_url'],
                'TASK_ID': props['task_id'],
                'TASK_VERSION': 'tvn.096b39e90e4443abae0333e23fcabc61',
                # 'TASK_VERSION': 'tvn.b4735419fbe4455eb2b91960e48921f9',  # echo task
                'SSM_PARAM_JWT': props['ssm_param_name'],
                'GDS_LOG_FOLDER': props['gds_log_folder'],
                'IMAGE_NAME':
                '699120554104.dkr.ecr.us-east-1.amazonaws.com/public/dragen',
                'IMAGE_TAG': '3.5.2',
                'TES_TASK_NAME': 'Dragen'
            })

        multiqc_function = lmbda.Function(
            self,
            'MultiQcTesLambda',
            function_name='showcase_multiqc_iap_tes_lambda_dev',
            handler='launch_tes_task.lambda_handler',
            runtime=lmbda.Runtime.PYTHON_3_7,
            code=lmbda.Code.from_asset('lambdas'),
            role=lambda_role,
            timeout=core.Duration.seconds(20),
            environment={
                'IAP_API_BASE_URL': props['iap_api_base_url'],
                'TASK_ID': props['task_id'],
                'TASK_VERSION': 'tvn.983a0239483d4253a8a0531fa1de0376',
                # 'TASK_VERSION': 'tvn.b4735419fbe4455eb2b91960e48921f9',  # echo task
                'SSM_PARAM_JWT': props['ssm_param_name'],
                'GDS_LOG_FOLDER': props['gds_log_folder'],
                'IMAGE_NAME': 'umccr/multiqc_dragen',
                'IMAGE_TAG': '1.1',
                'TES_TASK_NAME': 'MultiQC'
            })

        copy_report_to_s3 = lmbda.Function(
            self,
            'CopyReportToS3Lambda',
            function_name='showcase_copy_report_lambda_dev',
            handler='copy_to_s3.lambda_handler',
            runtime=lmbda.Runtime.PYTHON_3_7,
            code=lmbda.Code.from_asset('lambdas'),
            role=copy_lambda_role,
            timeout=core.Duration.seconds(20),
            environment={
                'IAP_API_BASE_URL': props['iap_api_base_url'],
                'SSM_PARAM_JWT': props['ssm_param_name'],
                'GDS_RUN_VOLUME': props['gds_run_volume'],
                'S3_RUN_BUCKET': props['s3_run_bucket']
            })

        # IAP JWT access token stored in SSM Parameter Store
        secret_value = ssm.StringParameter.from_secure_string_parameter_attributes(
            self,
            "JwtToken",
            parameter_name=props['ssm_param_name'],
            version=props['ssm_param_version'])
        secret_value.grant_read(samplesheet_mapper_function)
        secret_value.grant_read(bcl_convert_function)
        secret_value.grant_read(fastq_mapper_function)
        secret_value.grant_read(gather_samples_function)
        secret_value.grant_read(dragen_function)
        secret_value.grant_read(multiqc_function)
        secret_value.grant_read(copy_report_to_s3)

        # SFN task definitions
        task_samplesheet_mapper = sfn.Task(
            self,
            "SampleSheetMapper",
            task=sfn_tasks.RunLambdaTask(
                samplesheet_mapper_function,
                integration_pattern=sfn.ServiceIntegrationPattern.
                WAIT_FOR_TASK_TOKEN,
                payload={
                    "taskCallbackToken": sfn.Context.task_token,
                    "runId.$": "$.runfolder"
                }),
            result_path="$.guid")

        task_bcl_convert = sfn.Task(
            self,
            "BclConvert",
            task=sfn_tasks.RunLambdaTask(
                bcl_convert_function,
                integration_pattern=sfn.ServiceIntegrationPattern.
                WAIT_FOR_TASK_TOKEN,
                payload={
                    "taskCallbackToken": sfn.Context.task_token,
                    "runId.$": "$.runfolder"
                }),
            result_path="$.guid")

        task_fastq_mapper = sfn.Task(
            self,
            "FastqMapper",
            task=sfn_tasks.RunLambdaTask(
                fastq_mapper_function,
                integration_pattern=sfn.ServiceIntegrationPattern.
                WAIT_FOR_TASK_TOKEN,
                payload={
                    "taskCallbackToken": sfn.Context.task_token,
                    "runId.$": "$.runfolder"
                }),
            result_path="$.guid")

        task_gather_samples = sfn.Task(self,
                                       "GatherSamples",
                                       task=sfn_tasks.InvokeFunction(
                                           gather_samples_function,
                                           payload={"runId.$": "$.runfolder"}),
                                       result_path="$.sample_ids")

        task_dragen = sfn.Task(
            self,
            "DragenTask",
            task=sfn_tasks.RunLambdaTask(
                dragen_function,
                integration_pattern=sfn.ServiceIntegrationPattern.
                WAIT_FOR_TASK_TOKEN,
                payload={
                    "taskCallbackToken": sfn.Context.task_token,
                    "runId.$": "$.runId",
                    "index.$": "$.index",
                    "item.$": "$.item"
                }),
            result_path="$.exit_status")

        task_multiqc = sfn.Task(
            self,
            "MultiQcTask",
            task=sfn_tasks.RunLambdaTask(
                multiqc_function,
                integration_pattern=sfn.ServiceIntegrationPattern.
                WAIT_FOR_TASK_TOKEN,
                payload={
                    "taskCallbackToken": sfn.Context.task_token,
                    "runId.$": "$.runfolder",
                    "samples.$": "$.sample_ids"
                }))

        task_copy_report_to_s3 = sfn.Task(
            self,
            "CopyReportToS3",
            task=sfn_tasks.InvokeFunction(copy_report_to_s3,
                                          payload={"runId.$": "$.runfolder"}),
            result_path="$.copy_report")

        scatter = sfn.Map(self,
                          "Scatter",
                          items_path="$.sample_ids",
                          parameters={
                              "index.$": "$$.Map.Item.Index",
                              "item.$": "$$.Map.Item.Value",
                              "runId.$": "$.runfolder"
                          },
                          result_path="$.mapresults",
                          max_concurrency=20).iterator(task_dragen)

        definition = task_samplesheet_mapper \
            .next(task_bcl_convert) \
            .next(task_fastq_mapper) \
            .next(task_gather_samples) \
            .next(scatter) \
            .next(task_multiqc) \
            .next(task_copy_report_to_s3)

        sfn.StateMachine(
            self,
            "ShowcaseSfnStateMachine",
            definition=definition,
        )
Beispiel #16
0
    def __init__(self, app: core.App, id: str, **kwargs) -> None:
        super().__init__(app, id, **kwargs)

        # Create both lambdas

        with open("lambda-submit.py", encoding="utf8") as fp:
            lambda_submit_code = fp.read()

        lambdaFn1 = lambda_.Function(
            self,
            "submitsmbatch",
            code=lambda_.InlineCode(lambda_submit_code),
            handler="index.lambda_handler",
            timeout=core.Duration.seconds(300),
            runtime=lambda_.Runtime.PYTHON_3_7,
            environment={
            "transform_job_name":transform_job_name,
            "model_name":model_name,
            "max_concurrent":max_concurrent,
            "max_payload_size":max_payload_size,
            "s3_uri_in":s3_uri_in,
            "s3_uri_out":s3_uri_out,
            "instance_type":instance_type,
            "instance_count":instance_count,
            }
        )

        # Add perms
        lambdaFn1.add_to_role_policy(aws_iam.PolicyStatement(
            actions = ['sagemaker:CreateTransformJob',],
            resources = ['arn:aws:sagemaker:{}:{}:transform-job/{}*'.format(my_region,my_acc_id,transform_job_name),]
            ))

       
        with open("lambda-check.py", encoding="utf8") as fp:
            lambda_check_code = fp.read()

        lambdaFn2 = lambda_.Function(
            self,
            "checksmbatch",
            code=lambda_.InlineCode(lambda_check_code),
            handler="index.lambda_handler",
            timeout=core.Duration.seconds(300),
            runtime=lambda_.Runtime.PYTHON_3_7,
            environment={"model_name":model_name, # CHANGE TO YOUR ENDPOINT NAME!!
                        "content_type":"text/csv"}
        )
        # Add perms
        lambdaFn2.add_to_role_policy(aws_iam.PolicyStatement(
            actions = ['sagemaker:DescribeTransformJob',],
            resources = ['arn:aws:sagemaker:{}:{}:transform-job/{}*'.format(my_region,my_acc_id,transform_job_name),]
            ))
        # Define state machine

        # submit_job_activity = sfn.Activity(
        #     self, "SubmitJob"
        # )
        # check_job_activity = sfn.Activity(
        #     self, "CheckJob"
        # )

        submit_job = sfn.Task(
            self, "Submit Job",
            task=sfn_tasks.InvokeFunction(lambdaFn1),
        )

        wait_x = sfn.Wait(
            self, "Wait 1 minute",
            time=sfn.WaitTime.duration(core.Duration.minutes(1)),
        )
        get_status = sfn.Task(
            self, "Get Job Status",
            task=sfn_tasks.InvokeFunction(lambdaFn2),
        )
        is_complete = sfn.Choice(
            self, "Job Complete?"
        )
        job_failed = sfn.Fail(
            self, "Job Failed",
            cause="AWS Batch Job Failed",
            error="DescribeJob returned FAILED"
        )
        final_status = sfn.Task(
            self, "Get Final Job Status",
            task=sfn_tasks.InvokeFunction(lambdaFn2),
        )

        definition = submit_job\
            .next(wait_x)\
            .next(get_status)\
            .next(is_complete
                  .when(sfn.Condition.string_equals(
                      "$.status", "Failed"), job_failed)
                  .when(sfn.Condition.string_equals(
                      "$.status", "Completed"), final_status)
                  .otherwise(wait_x))

        sfn.StateMachine(
            self, "SMbatchInference",
            definition=definition,
        )
Beispiel #17
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Lets create couple of instances to test):
        vpc = _ec2.Vpc(self,
                       "abacVPC",
                       cidr="10.13.0.0/21",
                       max_azs=2,
                       nat_gateways=0,
                       subnet_configuration=[
                           _ec2.SubnetConfiguration(
                               name="pubSubnet",
                               cidr_mask=24,
                               subnet_type=_ec2.SubnetType.PUBLIC)
                       ])
        core.Tag.add(vpc,
                     key="ServiceProvider",
                     value="KonStone",
                     include_resource_types=[])

        weak_sg = _ec2.SecurityGroup(
            self,
            "web_sec_grp",
            vpc=vpc,
            description="Allow internet access from the world",
            allow_all_outbound=True)
        # vpc_cidr_block
        # weak_sg.add_ingress_rule(_ec2.Peer.any_ipv4(),
        weak_sg.add_ingress_rule(_ec2.Peer.ipv4(vpc.vpc_cidr_block),
                                 _ec2.Port.tcp(22),
                                 "Allow SSH access from the VPC Only.")

        # We are using the latest AMAZON LINUX AMI
        # Benefit of having SSM Agent pre-installed
        ami_id = _ec2.AmazonLinuxImage(generation=_ec2.AmazonLinuxGeneration.
                                       AMAZON_LINUX_2).get_image(self).image_id

        # https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_iam/Role.html
        instace_profile_role = _iam.Role(
            self,
            'ec2ssmroleid',
            assumed_by=_iam.ServicePrincipal('ec2.amazonaws.com'),
            role_name="instace_profile_role")

        instace_profile_role.add_managed_policy(
            _iam.ManagedPolicy.from_aws_managed_policy_name(
                'AmazonSSMManagedInstanceCore'))

        instance_profile_role_additional_perms = _iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=[
                "arn:aws:logs:*:*:*",
            ],
            actions=["logs:Create*", "logs:PutLogEvents"])
        instance_profile_role_additional_perms.sid = "PutBucketPolicy"
        instace_profile_role.add_to_policy(
            instance_profile_role_additional_perms)

        inst_profile_01 = _iam.CfnInstanceProfile(
            self,
            "instProfile01Id",
            roles=[instace_profile_role.role_name],
        )

        # Let us bootstrap the server with the required agents
        try:
            with open("./bootstrap_scripts/install_agents.sh",
                      mode='rb') as file:
                bootstrap_data = file.read()
        except OSError:
            print('Failed to get UserData script')

        install_agents = _ec2.UserData.for_linux()
        install_agents.add_commands(str(bootstrap_data, 'utf-8'))

        # The EC2 Instance to monitor for failed SSH Logins
        ssh_monitored_inst_01 = _ec2.CfnInstance(
            self,
            "sshMonitoredInstance01",
            image_id=ami_id,
            instance_type="t2.micro",
            monitoring=False,
            tags=[{
                "key": "ServiceProvider",
                "value": "KonStone"
            }],
            iam_instance_profile=inst_profile_01.ref,
            network_interfaces=[{
                "deviceIndex": "0",
                "associatePublicIpAddress": True,
                "subnetId": vpc.public_subnets[0].subnet_id,
                "groupSet": [weak_sg.security_group_id]
            }],  #https: //github.com/aws/aws-cdk/issues/3419
            user_data=core.Fn.base64(install_agents.render()),
        )
        """
        linux_ami = _ec2.GenericLinuxImage({ "cn-northwest-1": "ami-0f62e91915e16cfc2","eu-west-1": "ami-12345678"})
        ssh_monitored_inst_01_02 = _ec2.Instance(self,
            "monitoredInstance02",
            instance_type=_ec2.InstanceType(instance_type_identifier="t2.micro"),
            instance_name="monitoredInstance02",
            machine_image=linux_ami,
            vpc=vpc,
            security_group=[weak_sg.security_group_id],
            # vpc_subnets=_ec2.SubnetSelection(subnet_type=_ec2.SubnetType.PUBLIC)
            vpc_subnets=vpc.public_subnets[0].subnet_id,
            # user_data=_ec2.UserData.custom(t_user_data)
            )
        """

        # The log group name to store logs
        info_sec_ops_log_group = _logs.LogGroup(
            self,
            "infoSecOpsLogGroupId",
            log_group_name=(f"/Mystique/InfoSec/Automation/"
                            f"{ssh_monitored_inst_01.ref}"),
            retention=_logs.RetentionDays.ONE_WEEK)

        # Defines an AWS Lambda resource

        with open("lambda_src/quarantine_ec2_instance.py",
                  encoding="utf8") as fp:
            quarantine_ec2_instance_fn_handler_code = fp.read()

        quarantine_ec2_instance_fn = _lambda.Function(
            self,
            id='quarantineEc2InstanceFnId',
            function_name="quarantine_ec2_instance",
            runtime=_lambda.Runtime.PYTHON_3_7,
            code=_lambda.InlineCode(quarantine_ec2_instance_fn_handler_code),
            handler='index.lambda_handler',
            timeout=core.Duration.seconds(5))
        quarantine_ec2_instance_fn_perms = _iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=[
                "*",
            ],
            actions=[
                "ec2:RevokeSecurityGroupIngress",
                "ec2:DescribeSecurityGroupReferences",
                "ec2:RevokeSecurityGroupEgress",
                "ec2:ApplySecurityGroupsToClientVpnTargetNetwork",
                "ec2:DescribeSecurityGroups", "ec2:CreateSecurityGroup",
                "ec2:DescribeInstances", "ec2:CreateTags", "ec2:StopInstances",
                "ec2:CreateVolume", "ec2:CreateSnapshots",
                "ec2:CreateSnapshot", "ec2:DescribeSnapshots",
                "ec2:ModifyInstanceAttribute"
            ])
        quarantine_ec2_instance_fn_perms.sid = "AllowLambdaToQuarantineEC2"
        quarantine_ec2_instance_fn.add_to_role_policy(
            quarantine_ec2_instance_fn_perms)

        info_sec_ops_topic = _sns.Topic(self,
                                        "infoSecOpsTopicId",
                                        display_name="InfoSecTopic",
                                        topic_name="InfoSecOpsTopic")

        # Ref: https://docs.aws.amazon.com/cdk/api/latest/docs/aws-stepfunctions-readme.html
        ###############################################################################
        ################# STEP FUNCTIONS EXPERIMENTAL CODE - UNSTABLE #################
        ###############################################################################

        quarantine_ec2_instance_task = _sfn.Task(
            self,
            "Quarantine EC2 Instance",
            task=_tasks.InvokeFunction(quarantine_ec2_instance_fn),
            result_path="$")

        notify_secops_task = _sfn.Task(
            self,
            "Notify InfoSecOps",
            task=_tasks.PublishToTopic(
                info_sec_ops_topic,
                integration_pattern=_sfn.ServiceIntegrationPattern.
                FIRE_AND_FORGET,
                message=_sfn.TaskInput.from_data_at("$.message"),
                subject="SSH Error Response Notification"))

        ssh_error_response_failure = _sfn.Fail(
            self,
            "SSH Error Response Actions Failed",
            cause="All Response Actions were NOT completed",
            error="Check Logs")

        ssh_error_response_success = _sfn.Succeed(
            self,
            "SSH Error Response Actions Succeeded",
            comment="All Response Action Completed Successfully",
        )

        ssh_error_response_sfn_definition = quarantine_ec2_instance_task\
            .next(notify_secops_task\
                .next(_sfn.Choice(self, "SSH Errors Response Complete?")\
                    .when(_sfn.Condition.number_equals("$.SdkHttpMetadata.HttpStatusCode", 200),ssh_error_response_success)\
                    .when(_sfn.Condition.not_(
                        _sfn.Condition.number_equals("$.SdkHttpMetadata.HttpStatusCode", 200)), ssh_error_response_failure)\
                    .otherwise(ssh_error_response_failure)
                    )
            )

        ssh_error_response_statemachine = _sfn.StateMachine(
            self,
            "stateMachineId",
            definition=ssh_error_response_sfn_definition,
            timeout=core.Duration.minutes(5))

        ###############################################################################
        ################# STEP FUNCTIONS EXPERIMENTAL CODE - UNSTABLE #################
        ###############################################################################

        # LAMBDA TO TRIGGER STATE MACHINE - since state cannot be invoked by SNS
        with open("lambda_src/trigger_state_machine.py",
                  encoding="utf8") as fp:
            trigger_state_machine_fn_handler_code = fp.read()

        trigger_state_machine_fn = _lambda.Function(
            self,
            id='sshErrorResponseFnId',
            function_name="trigger_ssh_error_response_state_machine_fn",
            runtime=_lambda.Runtime.PYTHON_3_7,
            code=_lambda.InlineCode(trigger_state_machine_fn_handler_code),
            # code=_lambda.Code.asset("lambda_src/is_policy_permissive.py"),
            # code=_lambda.Code.asset('lambda_src'),
            # code=_lambda.InlineCode(code_body),
            handler='index.lambda_handler',
            timeout=core.Duration.seconds(5),
            environment={
                "STATE_MACHINE_ARN":
                f"{ssh_error_response_statemachine.state_machine_arn}",
            })

        trigger_state_machine_fn_perms = _iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=[
                f"{ssh_error_response_statemachine.state_machine_arn}",
            ],
            actions=["states:StartExecution"])
        trigger_state_machine_fn_perms.sid = "PutBucketPolicy"
        trigger_state_machine_fn.add_to_role_policy(
            trigger_state_machine_fn_perms)
        """
        version = trigger_state_machine_fn.add_version(name=datetime.now().isoformat())
        trigger_state_machine_fn_alias = _lambda.Alias(self, 
            'lmdaAliasId',
            alias_name='MystiqueTestAlias',
            version=version
            )
        """

        # Lets add permission to SNS to trigger our lambda function
        trigger_lambda_perms = _iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=[
                trigger_state_machine_fn.function_arn,
            ],
            actions=[
                "lambda:InvokeFunction",
            ])
        trigger_lambda_perms.sid = "TriggerLambaFunction"
        # info_sec_ops_topic.add_to_resource_policy( trigger_lambda_perms )

        # Subscribe InfoSecOps Email to topic
        info_sec_ops_topic.add_subscription(
            _subs.EmailSubscription(global_args.INFO_SEC_OPS_EMAIL))
        # info_sec_ops_topic.add_subscription(_subs.LambdaSubscription(trigger_state_machine_fn))

        trigger_state_machine_fn_alarm = trigger_state_machine_fn.metric_all_errors(
        ).create_alarm(
            self,
            "fn-error-alarm",
            threshold=5,
            alarm_name="trigger_state_machine_fn_error_alarm",
            evaluation_periods=5,
            period=core.Duration.minutes(1),
        )

        subscribe_trigger_state_machine_fn_to_logs = _logs.SubscriptionFilter(
            self,
            "sshErrorLogSubscriptionId",
            log_group=info_sec_ops_log_group,
            destination=_logs_destination.LambdaDestination(
                trigger_state_machine_fn),
            filter_pattern=_logs.FilterPattern.space_delimited(
                "Mon", "day", "timestamp", "ip", "id", "status",
                "...").where_string("status", "=", "Invalid"),
        )

        # https://pypi.org/project/aws-cdk.aws-logs/
        # We are creating three filter
        # tooManySshDisconnects, invalidSshUser and invalidSshKey:
        # When a user tries to SSH with invalid username the next line is logged in the SSH log file:
        # Apr 20 02:39:35 ip-172-31-63-56 sshd[17136]: Received disconnect from xxx.xxx.xxx.xxx: 11:  [preauth]
        too_many_ssh_disconnects_metric = _cloudwatch.Metric(
            namespace=f"{global_args.OWNER}",
            metric_name="tooManySshDisconnects")
        too_many_ssh_disconnects_filter = _logs.MetricFilter(
            self,
            "tooManySshDisconnectsFilterId",
            log_group=info_sec_ops_log_group,
            metric_namespace=too_many_ssh_disconnects_metric.namespace,
            metric_name=too_many_ssh_disconnects_metric.metric_name,
            filter_pattern=_logs.FilterPattern.space_delimited(
                "Mon", "day", "timestamp", "ip", "id", "msg1", "msg2",
                "...").where_string("msg2", "=", "disconnect"),
            metric_value="1")

        invalid_ssh_user_metric = _cloudwatch.Metric(
            namespace=f"{global_args.OWNER}",
            metric_name="invalidSshUser",
        )
        invalid_ssh_user_filter = _logs.MetricFilter(
            self,
            "invalidSshUserFilterId",
            log_group=info_sec_ops_log_group,
            metric_namespace=invalid_ssh_user_metric.namespace,
            metric_name=invalid_ssh_user_metric.metric_name,
            filter_pattern=_logs.FilterPattern.space_delimited(
                "Mon", "day", "timestamp", "ip", "id", "status",
                "...").where_string("status", "=", "Invalid"),
            metric_value="1")

        invalid_ssh_key_metric = _cloudwatch.Metric(
            namespace=f"{global_args.OWNER}", metric_name="invalidSshKey")

        invalid_ssh_key_filter = _logs.MetricFilter(
            self,
            "invalidSshKeyFilterId",
            log_group=info_sec_ops_log_group,
            metric_namespace=invalid_ssh_key_metric.namespace,
            metric_name=invalid_ssh_key_metric.metric_name,
            filter_pattern=_logs.FilterPattern.space_delimited(
                "Mon", "day", "timestamp", "ip", "id", "msg1", "msg2",
                "...").where_string("msg1", "=", "Connection").where_string(
                    "msg2", "=", "closed"),
            metric_value="1")

        # Now let us create alarms
        # alarm is raised there are more than 5(threshold) of the measured metrics in two(datapoint) of the last three seconds(evaluation):
        # Period=60Seconds, Eval=3, Threshold=5
        too_many_ssh_disconnects_alarm = _cloudwatch.Alarm(
            self,
            "tooManySshDisconnectsAlarmId",
            alarm_name="too_many_ssh_disconnects_alarm",
            alarm_description=
            "The number disconnect requests is greater then 5, even 1 time in 3 minutes",
            metric=too_many_ssh_disconnects_metric,
            actions_enabled=True,
            period=core.Duration.minutes(1),
            threshold=5,
            evaluation_periods=3,
            datapoints_to_alarm=1,
            statistic="sum",
            comparison_operator=_cloudwatch.ComparisonOperator.
            GREATER_THAN_OR_EQUAL_TO_THRESHOLD)

        invalid_ssh_user_alarm = _cloudwatch.Alarm(
            self,
            "invalidSshUserAlarmId",
            alarm_name="too_many_invalid_ssh_users_alarm",
            alarm_description=
            "The number of invalid ssh users connecting is greater then 5, even 1 time in 3 minutes",
            metric=invalid_ssh_user_metric,
            actions_enabled=True,
            period=core.Duration.minutes(1),
            threshold=5,
            evaluation_periods=3,
            datapoints_to_alarm=1,
            statistic="sum",
            comparison_operator=_cloudwatch.ComparisonOperator.
            GREATER_THAN_THRESHOLD)
        invalid_ssh_user_alarm.add_alarm_action(
            _cloudwatch_actions.SnsAction(info_sec_ops_topic))

        invalid_ssh_key_alarm = _cloudwatch.Alarm(
            self,
            "invalidSshKeyAlarmId",
            alarm_name="too_many_invalid_ssh_key_alarm",
            alarm_description=
            "The number of invalid ssh keys connecting is greater then 5, even 1 time in 3 minutes",
            metric=invalid_ssh_key_metric,
            actions_enabled=True,
            period=core.Duration.minutes(1),
            threshold=5,
            evaluation_periods=3,
            datapoints_to_alarm=1,
            statistic="sum",
            comparison_operator=_cloudwatch.ComparisonOperator.
            GREATER_THAN_OR_EQUAL_TO_THRESHOLD)
        invalid_ssh_key_alarm.add_alarm_action(
            _cloudwatch_actions.SnsAction(info_sec_ops_topic))

        ###########################################
        ################# OUTPUTS #################
        ###########################################

        output0 = core.CfnOutput(
            self,
            "SecuirtyAutomationFrom",
            value=f"{global_args.SOURCE_INFO}",
            description=
            "To know more about this automation stack, check out our github page."
        )

        output1_1 = core.Fn.get_att(
            logical_name_of_resource="sshMonitoredInstance01",
            attribute_name="PublicIp")
        output1 = core.CfnOutput(self,
                                 "MonitoredInstance",
                                 value=output1_1.to_string(),
                                 description="Web Server Public IP to attack")

        output2 = core.CfnOutput(
            self,
            "SSHAlarms",
            value=
            (f"https://console.aws.amazon.com/cloudwatch/home?region="
             f"{core.Aws.REGION}"
             f"#/configuration/"
             f"#alarmsV2:?search=ssh&alarmStateFilter=ALL&alarmTypeFilter=ALL"
             ),
            description="Check out the cloudwatch Alarms")

        output3 = core.CfnOutput(
            self,
            "SubscribeToNotificationTopic",
            value=(f"https://console.aws.amazon.com/sns/v3/home?"
                   f"{core.Aws.REGION}"
                   f"#/topic/"
                   f"{info_sec_ops_topic.topic_arn}"),
            description=
            "Add your email to subscription and confirm subscription")

        output_test_1 = core.CfnOutput(
            self,
            "ToGenInvalidKeyErrors",
            value=
            (f"for i in {{1..30}}; do ssh -i $RANDOM ec2-user@{output1_1.to_string()}; sleep 2; done &"
             ),
            description=
            "Generates random key names and connects to server 30 times over 60 seconds"
        )

        output_test_2 = core.CfnOutput(
            self,
            "ToGenInvalidUserErrors",
            value=
            (f"for i in {{1..30}}; do ssh ec2-user$RANDOM@{output1_1.to_string()}; sleep 2; done &"
             ),
            description=
            "Generates random user names and connects to server 30 times over 60 seconds"
        )
        """
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        ###
        # DynamoDB Table
        ###
        # We store Flight, Hotel and Rental Car bookings in the same table.
        #
        # For more help with single table DB structures see - https://www.dynamodbbook.com/
        # pk - the trip_id e.g. 1234
        # sk - bookingtype#booking_id e.g. HOTEL#345634, FLIGHT#574576, PAYMENT#45245
        table = dynamo_db.Table(self, "Bookings",
                                partition_key=dynamo_db.Attribute(name="pk", type=dynamo_db.AttributeType.STRING),
                                sort_key=dynamo_db.Attribute(name="sk", type=dynamo_db.AttributeType.STRING)
                                )

        ###
        # Lambda Functions
        ###
        # We need Booking and Cancellation functions for our 3 services
        #
        # All functions need access to our DynamoDB table above.
        # We also need to take payment for this trip
        #
        # 1) Flights
        # 2) Hotel
        # 3) Payment

        # 1) Flights
        reserve_flight_lambda = self.create_lambda(scope=self, lambda_id="reserveFlightLambdaHandler",
                                                   handler='flights/reserveFlight.handler', table=table)
        confirm_flight_lambda = self.create_lambda(scope=self, lambda_id="confirmFlightLambdaHandler",
                                                   handler='flights/confirmFlight.handler', table=table)
        cancel_flight_lambda = self.create_lambda(scope=self, lambda_id="cancelFlightLambdaHandler",
                                                  handler='flights/cancelFlight.handler', table=table)

        # 2) Hotel
        reserve_hotel_lambda = self.create_lambda(scope=self, lambda_id="reserveHotelLambdaHandler",
                                                  handler='hotel/reserveHotel.handler', table=table)
        confirm_hotel_lambda = self.create_lambda(scope=self, lambda_id="confirmHotelLambdaHandler",
                                                  handler='hotel/confirmHotel.handler', table=table)
        cancel_hotel_lambda = self.create_lambda(scope=self, lambda_id="cancelHotelLambdaHandler",
                                                 handler='hotel/cancelHotel.handler', table=table)

        # 3) Payment For Holiday
        take_payment_lambda = self.create_lambda(scope=self, lambda_id="takePaymentLambdaHandler",
                                                 handler='payment/takePayment.handler', table=table)
        refund_payment_lambda = self.create_lambda(scope=self, lambda_id="refundPaymentLambdaHandler",
                                                   handler='payment/refundPayment.handler', table=table)

        ###
        # Saga Pattern Step Function
        ###
        # Follows a strict order:
        # 1) Reserve Flights and Hotel
        # 2) Take Payment
        # 3) Confirm Flight and Hotel booking

        # Our two end states
        booking_succeeded = step_fn.Succeed(self, 'We have made your booking!')
        booking_failed = step_fn.Fail(self, "Sorry, We Couldn't make the booking")

        # 1) Reserve Flights and Hotel
        cancel_hotel_reservation = step_fn.Task(self, 'CancelHotelReservation',
                                                task=step_fn_tasks.InvokeFunction(cancel_hotel_lambda),
                                                result_path='$.CancelHotelReservationResult'
                                                ).add_retry(max_attempts=3).next(booking_failed)

        reserve_hotel = step_fn.Task(self, 'ReserveHotel',
                                     task=step_fn_tasks.InvokeFunction(reserve_hotel_lambda),
                                     result_path='$.ReserveHotelResult'
                                     ).add_catch(cancel_hotel_reservation, result_path="$.ReserveHotelError")

        cancel_flight_reservation = step_fn.Task(self, 'CancelFlightReservation',
                                                 task=step_fn_tasks.InvokeFunction(cancel_flight_lambda),
                                                 result_path='$.CancelFlightReservationResult'
                                                 ).add_retry(max_attempts=3).next(cancel_hotel_reservation)

        reserve_flight = step_fn.Task(self, 'ReserveFlight',
                                      task=step_fn_tasks.InvokeFunction(reserve_flight_lambda),
                                      result_path='$.ReserveFlightResult'
                                      ).add_catch(cancel_flight_reservation, result_path="$.ReserveFlightError")

        # 2) Take Payment
        refund_payment = step_fn.Task(self, 'RefundPayment',
                                      task=step_fn_tasks.InvokeFunction(refund_payment_lambda),
                                      result_path='$.RefundPaymentResult'
                                      ).add_retry(max_attempts=3).next(cancel_flight_reservation)

        take_payment = step_fn.Task(self, 'TakePayment',
                                    task=step_fn_tasks.InvokeFunction(take_payment_lambda),
                                    result_path='$.TakePaymentResult'
                                    ).add_catch(refund_payment, result_path="$.TakePaymentError")

        # 3) Confirm Flight and Hotel Booking
        confirm_hotel = step_fn.Task(self, 'ConfirmHotelBooking',
                                     task=step_fn_tasks.InvokeFunction(confirm_hotel_lambda),
                                     result_path='$.ConfirmHotelBookingResult'
                                     ).add_catch(refund_payment, result_path="$.ConfirmHotelBookingError")

        confirm_flight = step_fn.Task(self, 'ConfirmFlight',
                                      task=step_fn_tasks.InvokeFunction(confirm_flight_lambda),
                                      result_path='$.ConfirmFlightResult'
                                      ).add_catch(refund_payment, result_path="$.ConfirmFlightError")

        definition = step_fn.Chain \
            .start(reserve_hotel) \
            .next(reserve_flight) \
            .next(take_payment) \
            .next(confirm_hotel) \
            .next(confirm_flight) \
            .next(booking_succeeded)

        saga = step_fn.StateMachine(self, 'BookingSaga', definition=definition, timeout=core.Duration.minutes(5))

        # defines an AWS Lambda resource to connect to our API Gateway and kick
        # off our step function
        saga_lambda = _lambda.Function(self, "sagaLambdaHandler",
                                       runtime=_lambda.Runtime.NODEJS_12_X,
                                       handler="sagaLambda.handler",
                                       code=_lambda.Code.from_asset("lambdas"),
                                       environment={
                                           'statemachine_arn': saga.state_machine_arn
                                       }
                                       )
        saga.grant_start_execution(saga_lambda)

        # defines an API Gateway REST API resource backed by our "stateMachineLambda" function.
        api_gw.LambdaRestApi(self, 'SagaPatternSingleTable',
                             handler=saga_lambda
                             )
Beispiel #19
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here
        pvt_bkt = _s3.Bucket(self, "s3bucket")
        core.Tag.add(pvt_bkt, key="isMonitoredBucket", value="True")

        # Lets create a cloudtrail to track s3 data events
        s3_data_event_trail = _cloudtrail.Trail(
            self,
            "s3DataEventTrailId",
            is_multi_region_trail=False,
            include_global_service_events=False,
            enable_file_validation=True)

        # Lets capture S3 Data Events only for our bucket- TO REDUCE COST
        s3_data_event_trail.add_s3_event_selector(
            prefixes=[f"{pvt_bkt.bucket_arn}/"],
            include_management_events=True,
            read_write_type=_cloudtrail.ReadWriteType.ALL)

        # Defines an AWS Lambda resource
        """
        with open("lambda_src/make_object_private.py", encoding="utf8") as fp:
            make_object_private_fn_handler_code = fp.read()

        remediate_object_acl_fn = _lambda.Function(
            self,
            id='remediateObjAclFn',
            function_name="remediate_object_acl_fn",
            runtime=_lambda.Runtime.PYTHON_3_7,
            code=_lambda.InlineCode(make_object_private_fn_handler_code),
            handler='index.lambda_handler',
            timeout=core.Duration.seconds(10)
            )

        # Lets add the necessary permission for the lambda function
        remediate_object_acl_fn_perms=_iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=[
                "arn:aws:s3:::*",
                ],
            actions=[
                "s3:GetObjectAcl",
                "s3:PutObjectAcl"
            ]
            )
        remediate_object_acl_fn_perms.sid="PutBucketPolicy"
        remediate_object_acl_fn.add_to_role_policy( remediate_object_acl_fn_perms )
        """

        with open("lambda_src/is_object_private.py", encoding="utf8") as fp:
            is_object_private_fn_handler_code = fp.read()

        is_object_private_fn = _lambda.Function(
            self,
            id='isObjPrivateFn',
            function_name="is_object_private_fn",
            runtime=_lambda.Runtime.PYTHON_3_7,
            code=_lambda.InlineCode(is_object_private_fn_handler_code),
            handler='index.lambda_handler',
            timeout=core.Duration.seconds(3))

        # Lets add the necessary permission for the lambda function
        is_object_private_fn_perms = _iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=[
                "arn:aws:s3:::*",
            ],
            actions=["s3:GetObjectAcl"])
        is_object_private_fn.sid = "CheckObjectAcl"
        is_object_private_fn.add_to_role_policy(is_object_private_fn_perms)

        with open("lambda_src/make_object_private.py", encoding="utf8") as fp:
            make_object_private_fn_handler_code = fp.read()

        remediate_object_acl_fn = _lambda.Function(
            self,
            id='remediateObjAclFn',
            function_name="remediate_object_acl_fn",
            runtime=_lambda.Runtime.PYTHON_3_7,
            code=_lambda.InlineCode(make_object_private_fn_handler_code),
            handler='index.lambda_handler',
            timeout=core.Duration.seconds(10))

        # Lets add the necessary permission for the lambda function
        remediate_object_acl_fn_perms = _iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=[
                "arn:aws:s3:::*",
            ],
            actions=["s3:PutObjectAcl"])
        remediate_object_acl_fn_perms.sid = "PutObjectAcl"
        remediate_object_acl_fn.add_to_role_policy(
            remediate_object_acl_fn_perms)

        info_sec_ops_topic = _sns.Topic(self,
                                        "infoSecOpsTopicId",
                                        display_name="InfoSecTopic",
                                        topic_name="InfoSecOpsTopic")

        # Subscribe InfoSecOps Email to topic
        info_sec_ops_topic.add_subscription(
            _subs.EmailSubscription(global_args.INFO_SEC_OPS_EMAIL))

        # Grant Lambda permission to publish to topic
        # info_sec_ops_topic.grant_publish(lambda_notifier)

        # State Machine for notifying failed ACLs
        # Ref: https://docs.aws.amazon.com/cdk/api/latest/docs/aws-stepfunctions-readme.html
        ###############################################################################
        ################# STEP FUNCTIONS EXPERIMENTAL CODE - UNSTABLE #################
        ###############################################################################

        is_object_private_task = _sfn.Task(
            self,
            "isObjectPrivate?",
            task=_tasks.InvokeFunction(is_object_private_fn),
            result_path="$",
            output_path="$")

        remediate_object_acl_task = _sfn.Task(
            self,
            "RemediateObjectAcl",
            task=_tasks.InvokeFunction(remediate_object_acl_fn),
            result_path="$",
            output_path="$")

        notify_secops_task = _sfn.Task(
            self,
            "Notify InfoSecOps",
            task=_tasks.PublishToTopic(
                info_sec_ops_topic,
                integration_pattern=_sfn.ServiceIntegrationPattern.
                FIRE_AND_FORGET,
                message=_sfn.TaskInput.from_data_at("$.sns_message"),
                subject="Object Acl Remediation"))

        acl_remediation_failed_task = _sfn.Fail(self,
                                                "Acl Remediation Failed",
                                                cause="Acl Remediation Failed",
                                                error="Check Logs")

        acl_compliant_task = _sfn.Succeed(self,
                                          "Object Acl Compliant",
                                          comment="Object Acl is Compliant")

        remediate_object_acl_sfn_definition = is_object_private_task\
            .next(_sfn.Choice(self, "Is Object Private?")\
                .when(_sfn.Condition.boolean_equals("$.is_private", True), acl_compliant_task)\
                .when(_sfn.Condition.boolean_equals("$.is_private", False), remediate_object_acl_task\
                    .next(_sfn.Choice(self, "Object Remediation Complete?")\
                        .when(_sfn.Condition.boolean_equals("$.status", True),acl_compliant_task)\
                        .when(_sfn.Condition.boolean_equals("$.status", False), notify_secops_task.next(acl_remediation_failed_task))\
                        .otherwise(acl_remediation_failed_task)\
                        )
                    )
                .otherwise(acl_remediation_failed_task)
            )

        remediate_object_acl_statemachine = _sfn.StateMachine(
            self,
            "stateMachineId",
            definition=remediate_object_acl_sfn_definition,
            timeout=core.Duration.minutes(3))

        # Cloudwatch Event Triggers
        put_object_acl_event_targets = []
        """
        put_object_acl_event_targets.append(
            _targets.LambdaFunction( 
                handler=remediate_object_acl_fn
                )
            )
        """
        put_object_acl_event_targets.append(
            _targets.SfnStateMachine(
                machine=remediate_object_acl_statemachine))

        put_object_acl_event_pattern = _events.EventPattern(
            source=["aws.s3"],
            detail_type=["AWS API Call via CloudTrail"],
            detail={
                "eventSource": ["s3.amazonaws.com"],
                "eventName": ["PutObjectAcl", "PutObject"],
                "requestParameters": {
                    "bucketName": [f"{pvt_bkt.bucket_name}"]
                }
            })

        put_object_acl_event_pattern_rule = _events.Rule(
            self,
            "putObjectAclEventId",
            event_pattern=put_object_acl_event_pattern,
            rule_name=f"put_s3_policy_event_{global_args.OWNER}",
            enabled=True,
            description="Trigger an event for S3 PutObjectAcl or PutObject",
            targets=put_object_acl_event_targets)

        ###########################################
        ################# OUTPUTS #################
        ###########################################

        output0 = core.CfnOutput(
            self,
            "SecuirtyAutomationFrom",
            value=f"{global_args.SOURCE_INFO}",
            description=
            "To know more about this automation stack, check out our github page."
        )

        output1 = core.CfnOutput(
            self,
            "MonitoredS3Bucket",
            value=(f"https://console.aws.amazon.com/s3/buckets/"
                   f"{pvt_bkt.bucket_name}"),
            description=f"S3 Bucket for testing purposes")

        output2 = core.CfnOutput(
            self,
            "Helpercommands",
            value=
            (f"aws s3api get-object-acl  --bucket ${pvt_bkt.bucket_name} --key OBJECT-KEY-NAME"
             ),
            description=
            f"Commands to set object to public, Update OBJECT-KEY-NAME to your needs"
        )
Beispiel #20
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # A cache to temporarily hold session info
        session_cache_table = aws_dynamodb.Table(
            self,
            'session_cache_table',
            partition_key={
                'name': 'code',
                'type': aws_dynamodb.AttributeType.STRING
            },
            billing_mode=aws_dynamodb.BillingMode.PAY_PER_REQUEST,
            time_to_live_attribute='expires')

        #--
        #  Secrets
        #--------------------#

        # Twitter secrets are stored external to this stack
        twitter_secret = aws_secretsmanager.Secret.from_secret_attributes(
            self,
            'twitter_secret',
            secret_arn=os.environ['TWITTER_SECRET_ARN'])

        #--
        #  Layers
        #--------------------#

        # Each of these dependencies is used in 2 or more functions, extracted to layer for ease of use
        twitter_layer = aws_lambda.LayerVersion(
            self,
            'twitter_layer',
            code=aws_lambda.AssetCode('layers/twitter_layer'),
            compatible_runtimes=[
                aws_lambda.Runtime.PYTHON_2_7, aws_lambda.Runtime.PYTHON_3_6
            ])

        boto_layer = aws_lambda.LayerVersion(
            self,
            'boto_layer',
            code=aws_lambda.AssetCode('layers/boto_layer'),
            compatible_runtimes=[aws_lambda.Runtime.PYTHON_3_6])

        #--
        #  Functions
        #--------------------#

        # Handles CRC validation requests from Twitter
        twitter_crc_func = aws_lambda.Function(
            self,
            "twitter_crc_func",
            code=aws_lambda.AssetCode('functions/twitter_crc_func'),
            handler="lambda.handler",
            layers=[twitter_layer],
            runtime=aws_lambda.Runtime.PYTHON_2_7,
            environment={'twitter_secret': twitter_secret.secret_arn})

        # Grant this function the ability to read Twitter credentials
        twitter_secret.grant_read(twitter_crc_func.role)

        # Handle schedule requests from Twitter
        twitter_webhook_func = aws_lambda.Function(
            self,
            "twitter_webhook_func",
            code=aws_lambda.AssetCode('functions/twitter_webhook_func'),
            handler="lambda.handler",
            layers=[boto_layer, twitter_layer],
            runtime=aws_lambda.Runtime.PYTHON_3_6,
            environment={'twitter_secret': twitter_secret.secret_arn})

        # Grant this function permission to read Twitter credentials
        twitter_secret.grant_read(twitter_webhook_func.role)

        # Grant this function permission to publish tweets to EventBridge
        twitter_webhook_func.add_to_role_policy(
            aws_iam.PolicyStatement(actions=["events:PutEvents"],
                                    resources=["*"]))

        # Use API Gateway as the webhook endpoint
        twitter_api = aws_apigateway.LambdaRestApi(
            self, 'twitter_api', handler=twitter_webhook_func, proxy=False)

        # Tweets are POSTed to the endpoint
        twitter_api.root.add_method('POST')

        # Handles twitter CRC validation requests via GET to the webhook
        twitter_api.root.add_method(
            'GET', aws_apigateway.LambdaIntegration(twitter_crc_func))

        # Extract relevant info from the tweet, including session codes
        parse_tweet_func = aws_lambda.Function(
            self,
            "parse_tweet_func",
            code=aws_lambda.AssetCode('functions/parse_tweet_func'),
            handler="lambda.handler",
            runtime=aws_lambda.Runtime.PYTHON_3_6)

        # Get session information for requested codes
        get_sessions_func = aws_lambda.Function(
            self,
            "get_sessions_func",
            code=aws_lambda.AssetCode('functions/get_sessions_func'),
            handler="lambda.handler",
            runtime=aws_lambda.Runtime.PYTHON_3_6,
            timeout=core.Duration.seconds(60),
            layers=[boto_layer],
            environment={
                'CACHE_TABLE': session_cache_table.table_name,
                'LOCAL_CACHE_TTL':
                str(1 * 60 * 60),  # Cache sessions locally for 1 hour
                'REMOTE_CACHE_TTL': str(12 * 60 * 60)
            })  # Cache sessions removely for 12 hours

        # This functions needs permissions to read and write to the table
        session_cache_table.grant_write_data(get_sessions_func)
        session_cache_table.grant_read_data(get_sessions_func)

        # Create a schedule without conflicts
        create_schedule_func = aws_lambda.Function(
            self,
            "create_schedule_func",
            code=aws_lambda.AssetCode('functions/create_schedule_func'),
            handler="lambda.handler",
            runtime=aws_lambda.Runtime.PYTHON_3_6,
            timeout=core.Duration.seconds(60))

        # Tweet the response to the user
        tweet_schedule_func = aws_lambda.Function(
            self,
            "tweet_schedule_func",
            code=aws_lambda.AssetCode('functions/tweet_schedule_func'),
            handler="lambda.handler",
            layers=[boto_layer, twitter_layer],
            runtime=aws_lambda.Runtime.PYTHON_3_6,
            environment={'twitter_secret': twitter_secret.secret_arn})
        twitter_secret.grant_read(tweet_schedule_func.role)

        #--
        #  States
        #--------------------#

        # Step 4
        tweet_schedule_job = aws_stepfunctions.Task(
            self,
            'tweet_schedule_job',
            task=aws_stepfunctions_tasks.InvokeFunction(tweet_schedule_func))

        # Step 3
        create_schedule_job = aws_stepfunctions.Task(
            self,
            'create_schedule_job',
            task=aws_stepfunctions_tasks.InvokeFunction(create_schedule_func),
            input_path="$.sessions",
            result_path="$.schedule")
        create_schedule_job.next(tweet_schedule_job)

        # Step 2 - Get associated sessions (scrape or cache)
        get_sessions_job = aws_stepfunctions.Task(
            self,
            'get_sessions_job',
            task=aws_stepfunctions_tasks.InvokeFunction(get_sessions_func))

        # Prepare to get session info in parallel using the Map state
        get_sessions_map = aws_stepfunctions.Map(self,
                                                 'get_sessions_map',
                                                 items_path="$.codes",
                                                 result_path="$.sessions")
        get_sessions_map.iterator(get_sessions_job)
        get_sessions_map.next(create_schedule_job)

        # Shortcut if no session codes are supplied
        check_num_codes = aws_stepfunctions.Choice(self, 'check_num_codes')
        check_num_codes.when(
            aws_stepfunctions.Condition.number_greater_than('$.num_codes', 0),
            get_sessions_map)
        check_num_codes.otherwise(aws_stepfunctions.Succeed(self, "no_codes"))

        # Step 1 - Parse incoming tweet and prepare for scheduling
        parse_tweet_job = aws_stepfunctions.Task(
            self,
            'parse_tweet_job',
            task=aws_stepfunctions_tasks.InvokeFunction(parse_tweet_func))
        parse_tweet_job.next(check_num_codes)

        #--
        #  State Machines
        #--------------------#

        schedule_machine = aws_stepfunctions.StateMachine(
            self, "schedule_machine", definition=parse_tweet_job)

        # A rule to filter reInventSched tweet events
        reinvent_sched_rule = aws_events.Rule(
            self,
            "reinvent_sched_rule",
            event_pattern={"source": ["reInventSched"]})

        # Matching events start the image pipline
        reinvent_sched_rule.add_target(
            aws_events_targets.SfnStateMachine(
                schedule_machine,
                input=aws_events.RuleTargetInput.from_event_path("$.detail")))
Beispiel #21
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        email_subscription_parameter = core.CfnParameter(
            self,
            "EmailSubscriptionParameter",
            description="Email Address for Notification Subscription")
        email_subscription = email_subscription_parameter.value_as_string

        ar1 = accessanalyzer.CfnAnalyzer.ArchiveRuleProperty(
            rule_name="test",
            filter=[
                accessanalyzer.CfnAnalyzer.FilterProperty(
                    property="principal.AWS", eq=["123456789123"])
            ])
        analyzer = accessanalyzer.CfnAnalyzer(
            self,
            id="accessanalyzer",
            type="ACCOUNT",
            tags=[core.CfnTag(key="AccessAnalyzerType", value="ACCOUNT")],
            archive_rules=[ar1])

        runtime = aws_lambda.Runtime.PYTHON_3_8

        boto3_lambda_layer = aws_lambda.LayerVersion(
            self,
            "Boto3LambdaLayer",
            code=aws_lambda.AssetCode("./layers/boto3"),
            compatible_runtimes=[runtime],
            description="Boto3 Lambda Layer")

        context_enrichment = aws_lambda.Function(
            self,
            "context_enrichment",
            runtime=runtime,
            handler="app.handler",
            code=aws_lambda.AssetCode("./functions/context-enrichment"),
            layers=[boto3_lambda_layer])
        handler_statement = iam.PolicyStatement(actions=[
            "iam:ListRoleTags", "s3:GetBucketTagging", "lambda:ListTags",
            "sqs:ListQueueTags", "kms:ListAliases", "kms:ListResourceTags"
        ],
                                                effect=iam.Effect.ALLOW,
                                                resources=["*"])
        context_enrichment.add_to_role_policy(handler_statement)

        cmk_key = kms.Key(
            self,
            "SNSEncryptionAtRestKey",
            description="SNS Encryption at rest key",
            alias="sns-encryption-at-rest",
            enable_key_rotation=True,
        )

        email_topic = sns.Topic(
            self,
            "AccessAnalyzerNotificationTopic",
            display_name="Access Analyzer Finding Notification Topic",
            master_key=cmk_key)
        email_topic.add_subscription(
            subscriptions.EmailSubscription(email_subscription))

        notification = aws_lambda.Function(
            self,
            "notification",
            runtime=runtime,
            handler="app.handler",
            code=aws_lambda.AssetCode("./functions/notification"),
            layers=[boto3_lambda_layer],
            environment={"SNS_TOPIC_ARN": email_topic.topic_arn})
        notification_statement = iam.PolicyStatement(actions=[
            "sns:Publish",
        ],
                                                     effect=iam.Effect.ALLOW,
                                                     resources=["*"])
        notification.add_to_role_policy(notification_statement)
        cmk_key.grant_encrypt_decrypt(notification)

        archive_access_analyzer_finding = aws_lambda.Function(
            self,
            "archive-access-analyzer-finding",
            runtime=runtime,
            handler="app.handler",
            code=aws_lambda.AssetCode(
                "./functions/archive-access-analyzer-finding"),
            layers=[boto3_lambda_layer])
        archive_statement = iam.PolicyStatement(actions=[
            "access-analyzer:UpdateFindings",
        ],
                                                effect=iam.Effect.ALLOW,
                                                resources=["*"])
        archive_access_analyzer_finding.add_to_role_policy(archive_statement)

        evaluate_access_analyzer_finding = aws_lambda.Function(
            self,
            "evaluate-access-analyzer-finding",
            runtime=runtime,
            handler="app.handler",
            code=aws_lambda.AssetCode(
                "./functions/evaluate-access-analyzer-finding"),
            layers=[boto3_lambda_layer])

        #https://docs.aws.amazon.com/cdk/api/latest/docs/aws-stepfunctions-readme.html
        access_analyzer_handler_task = sfn.Task(
            self,
            "Context Enrichment",
            task=sfn_tasks.InvokeFunction(context_enrichment),
            result_path="$.guid",
        )

        notification_task = sfn.Task(
            self,
            "Send Notification",
            task=sfn_tasks.InvokeFunction(notification),
            result_path="$.guid",
        )

        archive_task = sfn.Task(
            self,
            "Archive Finding",
            task=sfn_tasks.InvokeFunction(archive_access_analyzer_finding),
            result_path="$.guid",
        )

        evaluate_task = sfn.Task(
            self,
            "Evaluate Risk Level",
            task=sfn_tasks.InvokeFunction(evaluate_access_analyzer_finding),
            result_path="$.guid",
        )

        definition=access_analyzer_handler_task. \
          next(evaluate_task). \
          next(sfn.Choice(self, "Archive?"). \
            when(sfn.Condition.string_equals("$.guid.status", "ARCHIVE"), archive_task). \
            when(sfn.Condition.string_equals("$.guid.status", "NOTIFY"), notification_task) \
          )

        state_machine = sfn.StateMachine(
            self,
            "Access-Analyzer-Automatic-Finding-Archive-State-Machine",
            definition=definition,
            timeout=core.Duration.minutes(5),
        )

        #https://docs.aws.amazon.com/step-functions/latest/dg/tutorial-cloudwatch-events-s3.html
        access_analyzer_finding_rule = aws_events.Rule(
            self,
            "AccessAnalzyerFindingActiveEventRule",
            description="Access Analyzer Finding Event Active",
            enabled=True,
            event_pattern=aws_events.EventPattern(
                source=["aws.access-analyzer"],
                detail_type=["Access Analyzer Finding"],
                detail={"status": ["ACTIVE"]}),
            targets=[
                aws_events_targets.SfnStateMachine(state_machine),
                aws_events_targets.LambdaFunction(context_enrichment)
            ])
Beispiel #22
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The start of the image pipeline
        imageBucket = aws_s3.Bucket(self, "imageBucket")

        # Capture API activity with a trail
        imageBucketTrail = aws_cloudtrail.Trail(self,
                                                "imageBucketTrail",
                                                is_multi_region_trail=False)

        # Restrict to S3 data-plane events
        imageBucketTrail.add_s3_event_selector(
            include_management_events=False,
            prefixes=[f"{imageBucket.bucket_arn}/"],
            read_write_type=aws_cloudtrail.ReadWriteType.WRITE_ONLY)

        # Filter to just PutObject and CopyObject events
        imageBucketRule = aws_events.Rule(
            self,
            "imageBucketRule",
            event_pattern={
                "source": ["aws.s3"],
                "detail": {
                    "eventSource": ["s3.amazonaws.com"],
                    "eventName": ["PutObject", "CopyObject"],
                    "requestParameters": {
                        "bucketName": [imageBucket.bucket_name]
                    }
                }
            })

        #--
        #  Lambda Layers
        #--------------------#

        opencvLayer = aws_lambda.LayerVersion(
            self,
            'opencvLayer',
            code=aws_lambda.AssetCode('layers/opencvLayer'),
            compatible_runtimes=[aws_lambda.Runtime.PYTHON_3_6])

        boto3Layer = aws_lambda.LayerVersion(
            self,
            'boto3Layer',
            code=aws_lambda.AssetCode('layers/boto3Layer'),
            compatible_runtimes=[aws_lambda.Runtime.PYTHON_3_6])

        #--
        #  Lambda Functions
        #--------------------#

        # Gather info about an image, name, extension, etc
        getImageInfoFunc = aws_lambda.Function(
            self,
            "getImageInfoFunc",
            code=aws_lambda.AssetCode('functions/getImageInfoFunc'),
            handler="lambda.handler",
            runtime=aws_lambda.Runtime.PYTHON_3_6)

        # The home for the website
        webBucket = aws_s3.Bucket(self,
                                  "webBucket",
                                  website_index_document='index.html')

        # Copy the image to the web bucket
        copyImageFunc = aws_lambda.Function(
            self,
            "copyImageFunc",
            code=aws_lambda.AssetCode('functions/copyImageFunc'),
            handler="lambda.handler",
            runtime=aws_lambda.Runtime.PYTHON_3_6,
            layers=[boto3Layer],
            environment={
                'OUTPUTBUCKET': webBucket.bucket_name,
                'OUTPUTPREFIX': 'images/'
            })

        # Grant permissions to read from the source and write to the desination
        imageBucket.grant_read(copyImageFunc)
        webBucket.grant_write(copyImageFunc)

        # Create a thumbnail of the image and place in the web bucket
        createThumbnailFunc = aws_lambda.Function(
            self,
            "createThumbnailFunc",
            code=aws_lambda.AssetCode('functions/createThumbnailFunc'),
            handler="lambda.handler",
            runtime=aws_lambda.Runtime.PYTHON_3_6,
            layers=[boto3Layer, opencvLayer],
            timeout=core.Duration.seconds(10),
            memory_size=256,
            environment={
                'OUTPUTBUCKET': webBucket.bucket_name,
                'OUTPUTPREFIX': 'images/'
            })

        # Grant permissions to read from the source and write to the desination
        imageBucket.grant_read(createThumbnailFunc)
        webBucket.grant_write(createThumbnailFunc)

        # Store page information
        pageTable = aws_dynamodb.Table(
            self,
            'pageTable',
            partition_key={
                'name': 'pageName',
                'type': aws_dynamodb.AttributeType.STRING
            },
            billing_mode=aws_dynamodb.BillingMode.PAY_PER_REQUEST,
            stream=aws_dynamodb.StreamViewType.NEW_IMAGE)

        # Save page and image information
        updatePageInfoFunc = aws_lambda.Function(
            self,
            "updatePageInfoFunc",
            code=aws_lambda.AssetCode('functions/updatePageInfoFunc'),
            handler="lambda.handler",
            runtime=aws_lambda.Runtime.PYTHON_3_6,
            layers=[boto3Layer],
            environment={
                'PAGETABLE': pageTable.table_name,
                'PAGEPREFIX': 'posts/'
            })

        # Grant permissions to write to the page table
        pageTable.grant_write_data(updatePageInfoFunc)

        imagePipelineDone = aws_stepfunctions.Succeed(self,
                                                      "Done processing image")

        updatePageInfoJob = aws_stepfunctions.Task(
            self,
            'Update page info',
            task=aws_stepfunctions_tasks.InvokeFunction(updatePageInfoFunc))
        updatePageInfoJob.next(imagePipelineDone)

        copyImageJob = aws_stepfunctions.Task(
            self,
            'Copy image',
            task=aws_stepfunctions_tasks.InvokeFunction(copyImageFunc))

        createThumbnailJob = aws_stepfunctions.Task(
            self,
            'Create thumbnail',
            task=aws_stepfunctions_tasks.InvokeFunction(createThumbnailFunc))

        # These tasks can be done in parallel
        processImage = aws_stepfunctions.Parallel(self,
                                                  'Process image',
                                                  result_path="$.images")

        processImage.branch(copyImageJob)
        processImage.branch(createThumbnailJob)
        processImage.next(updatePageInfoJob)

        # Results of file extension check
        notPng = aws_stepfunctions.Succeed(self, "Not a PNG")

        # Verify the file extension
        checkForPng = aws_stepfunctions.Choice(self, 'Is a PNG?')
        checkForPng.when(
            aws_stepfunctions.Condition.string_equals('$.extension', 'png'),
            processImage)
        checkForPng.otherwise(notPng)

        # A single image pipeline job for testing
        getImageInfoJob = aws_stepfunctions.Task(
            self,
            'Get image info',
            task=aws_stepfunctions_tasks.InvokeFunction(getImageInfoFunc))
        getImageInfoJob.next(checkForPng)

        # Configure the image pipeline and starting state
        imagePipeline = aws_stepfunctions.StateMachine(
            self, "imagePipeline", definition=getImageInfoJob)

        # Matching events start the image pipline
        imageBucketRule.add_target(
            aws_events_targets.SfnStateMachine(
                imagePipeline,
                input=aws_events.RuleTargetInput.from_event_path(
                    "$.detail.requestParameters")))
Beispiel #23
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        email_subscription_parameter = core.CfnParameter(
            self,
            "EmailSubscriptionParameter",
            description="Email Address for Notification Subscription",
            allowed_pattern=
            '^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$',
            min_length=1,
            constraint_description="Must be a valid email.")
        email_subscription = email_subscription_parameter.value_as_string

        #runtime=aws_lambda.Runtime.PYTHON_3_8

        boto3_lambda_layer = None

        boto3_lambda_layer = self.create_dependencies_layer(
            id="boto3layer",
            requirements_path="./layers/boto3/requirements.txt",
            output_dir="./layers/boto3")

        is_inline = False

        context_enrichment = self.create_lambda_function(
            boto3_lambda_layer, "./functions/context-enrichment",
            "context_enrichment", is_inline)
        """
    context_enrichment=aws_lambda.Function(
      self,
      "context_enrichment",
      runtime=runtime,
      handler="app.handler",
      code=aws_lambda.AssetCode("./functions/context-enrichment"),
      layers=[boto3_lambda_layer]
    )
    """
        handler_statement = iam.PolicyStatement(actions=[
            "iam:ListRoleTags", "s3:GetBucketTagging", "lambda:ListTags",
            "sqs:ListQueueTags", "kms:ListAliases", "kms:ListResourceTags"
        ],
                                                effect=iam.Effect.ALLOW,
                                                resources=["*"])
        context_enrichment.add_to_role_policy(handler_statement)

        cmk_key = kms.Key(
            self,
            "SNSEncryptionAtRestKey",
            description="SNS Encryption at rest key",
            alias="sns-encryption-at-rest",
            enable_key_rotation=True,
        )

        email_topic = sns.Topic(
            self,
            "AccessAnalyzerNotificationTopic",
            display_name="Access Analyzer Finding Notification Topic",
            master_key=cmk_key)
        email_topic.add_subscription(
            subscriptions.EmailSubscription(email_subscription))

        notification = self.create_lambda_function(
            boto3_lambda_layer, "./functions/notification", "notification",
            is_inline, {"SNS_TOPIC_ARN": email_topic.topic_arn})
        """
    notification=aws_lambda.Function(
      self,
      "notification",
      runtime=runtime,
      handler="app.handler",
      code=aws_lambda.AssetCode("./functions/notification"),
      layers=[boto3_lambda_layer],
      environment={"SNS_TOPIC_ARN":email_topic.topic_arn}
    )
    """
        notification_statement = iam.PolicyStatement(actions=[
            "sns:Publish",
        ],
                                                     effect=iam.Effect.ALLOW,
                                                     resources=["*"])
        notification.add_to_role_policy(notification_statement)
        cmk_key.grant_encrypt_decrypt(notification)

        archive_access_analyzer_finding = self.create_lambda_function(
            boto3_lambda_layer, "./functions/archive-access-analyzer-finding",
            "archive-access-analyzer-finding", is_inline)
        """
    archive_access_analyzer_finding=aws_lambda.Function(
      self,
      "archive-access-analyzer-finding",
      runtime=runtime,
      handler="app.handler",
      code=aws_lambda.AssetCode("./functions/archive-access-analyzer-finding"),
      layers=[boto3_lambda_layer]
    )
    """
        archive_statement = iam.PolicyStatement(actions=[
            "access-analyzer:UpdateFindings",
        ],
                                                effect=iam.Effect.ALLOW,
                                                resources=["*"])
        archive_access_analyzer_finding.add_to_role_policy(archive_statement)

        evaluate_access_analyzer_finding = self.create_lambda_function(
            boto3_lambda_layer, "./functions/evaluate-access-analyzer-finding",
            "evaluate-access-analyzer-finding", is_inline)
        """
    evaluate_access_analyzer_finding=aws_lambda.Function(
      self,
      "evaluate-access-analyzer-finding",
      runtime=runtime,
      handler="app.handler",
      code=aws_lambda.AssetCode("./functions/evaluate-access-analyzer-finding"),
      layers=[boto3_lambda_layer]
    )
    """
        #https://docs.aws.amazon.com/cdk/api/latest/docs/aws-stepfunctions-readme.html
        access_analyzer_handler_task = sfn.Task(
            self,
            "Context Enrichment",
            task=sfn_tasks.InvokeFunction(context_enrichment),
            result_path="$.guid",
        )

        notification_task = sfn.Task(
            self,
            "Send Notification",
            task=sfn_tasks.InvokeFunction(notification),
            result_path="$.guid",
        )

        archive_task = sfn.Task(
            self,
            "Archive Finding",
            task=sfn_tasks.InvokeFunction(archive_access_analyzer_finding),
            result_path="$.guid",
        )

        evaluate_task = sfn.Task(
            self,
            "Evaluate Risk Level",
            task=sfn_tasks.InvokeFunction(evaluate_access_analyzer_finding),
            result_path="$.guid",
        )

        definition=access_analyzer_handler_task. \
          next(evaluate_task). \
          next(sfn.Choice(self, "Archive?"). \
            when(sfn.Condition.string_equals("$.guid.status", "ARCHIVE"), archive_task). \
            when(sfn.Condition.string_equals("$.guid.status", "NOTIFY"), notification_task) \
          )

        state_machine = sfn.StateMachine(
            self,
            "Access-Analyzer-Automatic-Finding-Archive-State-Machine",
            definition=definition,
            timeout=core.Duration.minutes(5),
        )

        #https://docs.aws.amazon.com/step-functions/latest/dg/tutorial-cloudwatch-events-s3.html
        access_analyzer_finding_rule = aws_events.Rule(
            self,
            "AccessAnalzyerFindingActiveEventRule",
            description="Access Analyzer Finding Event Active",
            enabled=True,
            event_pattern=aws_events.EventPattern(
                source=["aws.access-analyzer"],
                detail_type=["Access Analyzer Finding"],
                detail={"status": ["ACTIVE"]}),
            targets=[
                aws_events_targets.SfnStateMachine(state_machine),
                aws_events_targets.LambdaFunction(context_enrichment)
            ])