Ejemplo n.º 1
0
    def __init__(self, scope: Construct, stack_id: str, *,
                 props: StorageTierDocDBProps, **kwargs):
        """
        Initializes a new instance of StorageTier
        :param scope: The scope of this construct.
        :param stack_id: the ID of this construct.
        :param props: The properties for the storage tier.
        :param kwargs: Any kwargs that need to be passed on to the parent class.
        """
        super().__init__(scope, stack_id, props=props, **kwargs)

        doc_db = DatabaseCluster(
            self,
            'DocDBCluster',
            vpc=props.vpc,
            vpc_subnets=SubnetSelection(subnet_type=SubnetType.PRIVATE),
            instance_type=props.database_instance_type,
            # TODO - For cost considerations this example only uses 1 Database instance.
            # It is recommended that when creating your render farm you use at least 2 instances for redundancy.
            instances=1,
            master_user=Login(username='******'),
            engine_version='3.6.0',
            backup=BackupProps(
                # We recommend setting the retention of your backups to 15 days
                # for security reasons. The default retention is just one day.
                # Please note that changing this value will affect cost.
                retention=Duration.days(15)),
            # TODO - Evaluate this removal policy for your own needs. This is set to DESTROY to
            # cleanly remove everything when this stack is destroyed. If you would like to ensure
            # that your data is not accidentally deleted, you should modify this value.
            removal_policy=RemovalPolicy.DESTROY)

        self.database = DatabaseConnection.for_doc_db(database=doc_db,
                                                      login=doc_db.secret)
Ejemplo n.º 2
0
 def _create_buckets(self):
     self.anidb_titles_bucket = Bucket(
         self,
         "anidb_titles_bucket",
         block_public_access=BlockPublicAccess(
             block_public_acls=True,
             block_public_policy=True,
         ),
         removal_policy=core.RemovalPolicy.DESTROY,
         lifecycle_rules=[
             LifecycleRule(expiration=Duration.days(3)),
         ]
     )
Ejemplo n.º 3
0
    def create_cloudfront_distribution(self):

        return Distribution(self,
                            "image-resize-distribution",
                            default_behavior=BehaviorOptions(
                                origin=S3Origin(bucket=self.image_bucket),
                                cache_policy=CachePolicy(
                                    self,
                                    "image-resize-cache-policy",
                                    default_ttl=Duration.seconds(0),
                                    min_ttl=Duration.seconds(0),
                                    max_ttl=Duration.days(365))),
                            default_root_object="index.html")
Ejemplo n.º 4
0
 def _create_user_pool(self) -> cognito.UserPool:
     pool = cognito.UserPool(
         scope=self,
         id="orbit-user-pool",
         account_recovery=cognito.AccountRecovery.EMAIL_ONLY,
         auto_verify=cognito.AutoVerifiedAttrs(email=True, phone=False),
         custom_attributes=None,
         email_settings=None,
         lambda_triggers=None,
         mfa=cognito.Mfa.OFF,
         mfa_second_factor=None,
         password_policy=cognito.PasswordPolicy(
             min_length=8,
             require_digits=True,
             require_lowercase=True,
             require_symbols=True,
             require_uppercase=True,
             temp_password_validity=Duration.days(5),
         ),
         self_sign_up_enabled=False,
         sign_in_aliases=cognito.SignInAliases(email=True,
                                               phone=False,
                                               preferred_username=False,
                                               username=True),
         sign_in_case_sensitive=True,
         sms_role=None,
         sms_role_external_id=None,
         standard_attributes=cognito.StandardAttributes(
             email=cognito.StandardAttribute(required=True, mutable=True)),
         user_invitation=cognito.UserInvitationConfig(
             email_subject="Invite to join Orbit Workbench!",
             email_body=
             "Hello, you have been invited to join Orbit Workbench!<br/><br/>"
             "Username: {username}<br/>"
             "Temporary password: {####}<br/><br/>"
             "Regards",
         ),
         user_pool_name=f"orbit-{self.env_name}-user-pool",
     )
     pool.apply_removal_policy(policy=core.RemovalPolicy.DESTROY)
     pool.add_domain(
         id="orbit-user-pool-domain",
         cognito_domain=cognito.CognitoDomainOptions(
             domain_prefix=f"orbit-{self.context.account_id}-{self.env_name}"
         ),
     )
     return pool
Ejemplo n.º 5
0
class Config:
    # Data bucket settings
    data_bucket_name = 'epg-data-s3-bucket-42'
    data_bucket_noncurrent_version_expiration = Duration.days(30)

    # Out bucket settings
    out_bucket_name = 'epg-out-s3-bucket-42'

    # Notifications
    email_recipient = '*****@*****.**'

    # Update function
    update_function_rate = Duration.minutes(5)
    error_count_to_notify = 12

    @staticmethod
    def period_to_check_error_count() -> Duration:
        return Duration.minutes(Config.update_function_rate.to_minutes() *
                                Config.error_count_to_notify * 2)
    def __init__(self, app: App, id: str, **kwargs) -> None:
        super().__init__(app, id, **kwargs)

        self.template_options.description = "(SO0123) Improving Forecast Accuracy with Machine Learning %%VERSION%% - This solution provides a mechanism to automate Amazon Forecast predictor and forecast generation and visualize it via an Amazon SageMaker Jupyter Notebook"

        # set up the template parameters
        email = CfnParameter(
            self,
            id="Email",
            type="String",
            description="Email to notify with forecast results",
            default="",
            max_length=50,
            allowed_pattern=
            r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$|^$)",
            constraint_description="Must be a valid email address or blank",
        )

        lambda_log_level = CfnParameter(
            self,
            id="LambdaLogLevel",
            type="String",
            description="Change the verbosity of the logs output to CloudWatch",
            default="WARNING",
            allowed_values=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
        )

        notebook_deploy = CfnParameter(
            self,
            id="NotebookDeploy",
            type="String",
            description="Deploy an Amazon SageMaker Jupyter Notebook instance",
            default="No",
            allowed_values=["Yes", "No"],
        )

        notebook_volume_size = CfnParameter(
            self,
            id="NotebookVolumeSize",
            type="Number",
            description=
            "Enter the size of the notebook instance EBS volume in GB",
            default=10,
            min_value=5,
            max_value=16384,
            constraint_description=
            "Must be an integer between 5 (GB) and 16384 (16 TB)",
        )

        notebook_instance_type = CfnParameter(
            self,
            id="NotebookInstanceType",
            type="String",
            description="Enter the type of the notebook instance",
            default="ml.t2.medium",
            allowed_values=[
                "ml.t2.medium",
                "ml.t3.medium",
                "ml.r5.large",
                "ml.c5.large",
            ],
        )

        quicksight_analysis_owner = CfnParameter(
            self,
            id="QuickSightAnalysisOwner",
            description=
            "With QuickSight Enterprise enabled, provide a QuickSight ADMIN user ARN to automatically create QuickSight analyses",
            default="",
            allowed_pattern="(^arn:.*:quicksight:.*:.*:user.*$|^$)",
        )

        # set up the metadata/ cloudformation interface
        template_options = TemplateOptions()
        template_options.add_parameter_group(
            label=
            "Improving Forecast Accuracy with Machine Learning Configuration",
            parameters=[email],
        )
        template_options.add_parameter_group(
            label="Visualization Options",
            parameters=[
                quicksight_analysis_owner,
                notebook_deploy,
                notebook_instance_type,
                notebook_volume_size,
            ],
        )
        template_options.add_parameter_group(label="Deployment Configuration",
                                             parameters=[lambda_log_level])
        template_options.add_parameter_label(email, "Email")
        template_options.add_parameter_label(lambda_log_level,
                                             "CloudWatch Log Level")
        template_options.add_parameter_label(notebook_deploy,
                                             "Deploy Jupyter Notebook")
        template_options.add_parameter_label(notebook_volume_size,
                                             "Jupyter Notebook volume size")
        template_options.add_parameter_label(notebook_instance_type,
                                             "Jupyter Notebook instance type")
        template_options.add_parameter_label(quicksight_analysis_owner,
                                             "Deploy QuickSight Dashboards")
        self.template_options.metadata = template_options.metadata

        solution_mapping = CfnMapping(
            self,
            "Solution",
            mapping={
                "Data": {
                    "ID": "SO0123",
                    "Version": "%%VERSION%%",
                    "SendAnonymousUsageData": "Yes",
                }
            },
        )

        source_mapping = CfnMapping(
            self,
            "SourceCode",
            mapping={
                "General": {
                    "S3Bucket": "%%BUCKET_NAME%%",
                    "KeyPrefix": "%%SOLUTION_NAME%%/%%VERSION%%",
                    "QuickSightSourceTemplateArn": "%%QUICKSIGHT_SOURCE%%",
                }
            },
        )

        # conditions
        create_notebook = CfnCondition(
            self,
            "CreateNotebook",
            expression=Fn.condition_equals(notebook_deploy, "Yes"),
        )
        email_provided = CfnCondition(
            self,
            "EmailProvided",
            expression=Fn.condition_not(Fn.condition_equals(email, "")),
        )
        send_anonymous_usage_data = CfnCondition(
            self,
            "SendAnonymousUsageData",
            expression=Fn.condition_equals(
                Fn.find_in_map("Solution", "Data", "SendAnonymousUsageData"),
                "Yes"),
        )
        create_analysis = CfnCondition(
            self,
            "CreateAnalysis",
            expression=Fn.condition_not(
                Fn.condition_equals(quicksight_analysis_owner, ""), ),
        )

        # Step function and state machine
        fns = LambdaFunctions(self, "Functions", log_level=lambda_log_level)

        # SNS
        notifications = Notifications(
            self,
            "NotificationConfiguration",
            lambda_function=fns.functions["SNS"],
            email=email,
            email_provided=email_provided,
        )

        # Custom Resources
        unique_name = CfnResource(
            self,
            "UniqueName",
            type="Custom::UniqueName",
            properties={
                "ServiceToken":
                fns.functions["CfnResourceUniqueName"].function_arn
            },
        )
        unique_name.override_logical_id("UniqueName")

        data_bucket_name_resource = CfnResource(
            self,
            "DataBucketName",
            type="Custom::BucketName",
            properties={
                "ServiceToken":
                fns.functions["CfnResourceBucketName"].function_arn,
                "BucketPurpose": "data-bucket",
                "StackName": Aws.STACK_NAME,
                "Id": unique_name.get_att("Id"),
            },
        )
        data_bucket_name_resource.override_logical_id("DataBucketName")

        # Buckets
        access_logs_bucket = self.secure_bucket(
            "AccessLogsBucket",
            suppressions=[
                CfnNagSuppression(
                    "W35",
                    "This bucket is used as the logging destination for forecast datasets and exports",
                )
            ],
            access_control=BucketAccessControl.LOG_DELIVERY_WRITE,
        )

        athena_bucket = self.secure_bucket(
            "AthenaBucket",
            server_access_logs_bucket=access_logs_bucket,
            server_access_logs_prefix="athena-bucket-access-logs/",
        )

        data_bucket = self.secure_bucket(
            "ForecastBucket",
            lifecycle_rules=[
                LifecycleRule(
                    abort_incomplete_multipart_upload_after=Duration.days(3),
                    enabled=True,
                ),
                LifecycleRule(expiration=Duration.days(1),
                              prefix="raw/",
                              enabled=True),
            ],
            bucket_name=data_bucket_name_resource.get_att("Name").to_string(),
            server_access_logs_bucket=access_logs_bucket,
            server_access_logs_prefix="forecast-bucket-access-logs/",
        )
        data_bucket.node.default_child.add_property_override(
            "NotificationConfiguration",
            {
                "LambdaConfigurations": [{
                    "Function":
                    fns.functions["S3NotificationLambda"].function_arn,
                    "Event":
                    "s3:ObjectCreated:*",
                    "Filter": {
                        "S3Key": {
                            "Rules": [
                                {
                                    "Name": "prefix",
                                    "Value": "train/"
                                },
                                {
                                    "Name": "suffix",
                                    "Value": ".csv"
                                },
                            ]
                        }
                    },
                }]
            },
        )

        # Glue and Athena
        glue = Glue(self, "GlueResources", unique_name)
        athena = Athena(self, "AthenaResources", athena_bucket=athena_bucket)

        # Configure permissions for functions
        fns.set_s3_notification_permissions(data_bucket_name_resource)
        fns.set_forecast_s3_access_permissions(
            name="DatasetImport",
            function=fns.functions["CreateDatasetImportJob"],
            data_bucket_name_resource=data_bucket_name_resource,
        )
        fns.set_forecast_s3_access_permissions(
            name="ForecastExport",
            function=fns.functions["CreateForecast"],
            data_bucket_name_resource=data_bucket_name_resource,
        )
        fns.set_forecast_etl_permissions(
            function=fns.functions["PrepareForecastExport"],
            database=glue.database,
            workgroup=athena.workgroup,
            quicksight_principal=quicksight_analysis_owner,
            quicksight_source=source_mapping,
            athena_bucket=athena_bucket,
            data_bucket_name_resource=data_bucket_name_resource,
        )
        fns.set_forecast_permissions(
            "CreateDatasetGroup",
            data_bucket_name_resource=data_bucket_name_resource)
        fns.set_forecast_permissions(
            "CreateDatasetImportJob",
            data_bucket_name_resource=data_bucket_name_resource,
        )
        fns.set_forecast_permissions(
            "CreateForecast",
            data_bucket_name_resource=data_bucket_name_resource)
        fns.set_forecast_permissions(
            "CreatePredictor",
            data_bucket_name_resource=data_bucket_name_resource)
        fns.set_forecast_permissions(
            "PrepareForecastExport",
            data_bucket_name_resource=data_bucket_name_resource)

        # notebook (conditional on 'create_notebook')
        notebook = Notebook(
            self,
            "Notebook",
            buckets=[data_bucket],
            instance_type=notebook_instance_type.value_as_string,
            instance_volume_size=notebook_volume_size.value_as_number,
            notebook_path=Path(__file__).parent.parent.parent.joinpath(
                "notebook", "samples", "notebooks"),
            notebook_destination_bucket=data_bucket,
            notebook_destination_prefix="notebooks",
        )
        Aspects.of(notebook).add(ConditionalResources(create_notebook))

        # solutions metrics (conditional on 'send_anonymous_usage_data')
        metrics = Metrics(
            self,
            "SolutionMetrics",
            metrics_function=fns.functions["CfnResourceSolutionMetrics"],
            metrics={
                "Solution":
                solution_mapping.find_in_map("Data", "ID"),
                "Version":
                solution_mapping.find_in_map("Data", "Version"),
                "Region":
                Aws.REGION,
                "NotebookDeployed":
                Fn.condition_if(create_notebook.node.id, "Yes", "No"),
                "NotebookType":
                Fn.condition_if(
                    create_notebook.node.id,
                    notebook_instance_type.value_as_string,
                    Aws.NO_VALUE,
                ),
                "QuickSightDeployed":
                Fn.condition_if(create_analysis.node.id, "Yes", "No"),
            },
        )
        Aspects.of(metrics).add(
            ConditionalResources(send_anonymous_usage_data))

        # outputs
        CfnOutput(self, "ForecastBucketName", value=data_bucket.bucket_name)
        CfnOutput(self, "AthenaBucketName", value=athena_bucket.bucket_name)
        CfnOutput(self,
                  "StepFunctionsName",
                  value=fns.state_machine.state_machine_name)
Ejemplo n.º 7
0
    def create_jwt_secret(
        self,
        master_secret: secretsmanager.Secret,
        ica_base_url: str,
        key_name: str,
        project_ids: Union[str, List[str]],
    ) -> Tuple[secretsmanager.Secret, lambda_.Function]:
        """
        Create a JWT holding secret - that will use the master secret for JWT making - and which will have
        broad permissions to be read by all roles.

        Args:
            master_secret: the master secret to read for the API key for JWT making
            ica_base_url: the base url of ICA to be passed on to the rotators
            key_name: a unique string that we use to name this JWT secret
            project_ids: *either* a single string or a list of string - the choice of type *will* affect
                         the resulting secret output i.e a string input will end up different to a list with one string!

        Returns:
            the JWT secret
        """
        dirname = os.path.dirname(__file__)
        filename = os.path.join(dirname, "runtime/jwt_producer")

        env = {
            "MASTER_ARN": master_secret.secret_arn,
            "ICA_BASE_URL": ica_base_url,
        }

        # flip the instructions to our single lambda - the handle either a single JWT generator or
        # dictionary of JWTS
        if isinstance(project_ids, List):
            env["PROJECT_IDS"] = " ".join(project_ids)
        else:
            env["PROJECT_ID"] = project_ids

        jwt_producer = lambda_.Function(
            self,
            "JwtProduce" + key_name,
            runtime=lambda_.Runtime.PYTHON_3_8,
            code=lambda_.AssetCode(filename),
            handler="lambda_entrypoint.main",
            timeout=Duration.minutes(1),
            environment=env,
        )

        # this end makes the lambda role for JWT producer able to attempt to read the master secret
        # (this is only one part of the permission decision though - also need to set the Secrets policy too)
        master_secret.grant_read(jwt_producer)

        # secret itself - no default value as it will eventually get replaced by the JWT
        jwt_secret = secretsmanager.Secret(
            self,
            "Jwt" + key_name,
            secret_name=key_name,
            description="JWT(s) providing access to ICA projects",
        )

        # the rotation function that creates JWTs
        jwt_secret.add_rotation_schedule(
            "JwtSecretRotation",
            automatically_after=Duration.days(ROTATION_DAYS),
            rotation_lambda=jwt_producer,
        )

        return jwt_secret, jwt_producer