def get_file_system(scope: Construct) -> FileSystem: config = get_volume_config() stack_name = config.stack_name security_group = SecurityGroup.from_security_group_id( scope, 'nfs_security_group', security_group_id=Fn.import_value(stack_name + 'SecurityGroupId')) return FileSystem.from_file_system_attributes( scope, 'filesystem', file_system_id=Fn.import_value(stack_name + 'FileSystemId'), security_group=security_group)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) env = "dev" project = "testproject1" service = "api" component = "buckets" Tag.add(self, "Service", service) Tag.add(self, "Component", component) bucket_a = aws_s3.Bucket( self, "BucketA", bucket_name=generate_resource_name(project, env, service, component, "bucketa"), removal_policy=RemovalPolicy.DESTROY, ) bucket_b = aws_s3.Bucket( self, "BucketB", bucket_name=generate_resource_name(project, env, service, component, "bucketb"), removal_policy=RemovalPolicy.DESTROY, ) value = Fn.sub( "test: ${value_to_import}", { "value_to_import": Fn.import_value( generate_resource_name(project, env, "etl", component, "bucketb")) }, ) aws_ssm.StringParameter( self, "SSMParam", parameter_name=generate_resource_name(project, env, service, component, "ssmparam"), string_value=value, ) core.CfnOutput( self, id="OutputBucketA", value=bucket_a.bucket_name, export_name=generate_resource_name(project, env, service, component, "bucketa"), ) core.CfnOutput( self, id="OutputBucketB", value=bucket_b.bucket_name, export_name=generate_resource_name(project, env, service, component, "bucketb"), )
def get_cluster(scope: Construct, vpc: Vpc) -> Cluster: config = get_cluster_config() stack_name = config.stack_name return Cluster.from_cluster_attributes( scope, 'cluster', cluster_name=Fn.import_value(stack_name + 'ClusterName'), vpc=vpc, has_ec2_capacity=False, security_groups=[], )
def __init__(self, scope: App, id: str, envs: EnvSettings, components: ComponentsStack): super().__init__(scope, id) self.backend_domain_name = StringParameter.from_string_parameter_name( self, "DomainNameParameter", string_parameter_name="/schema-cms-app/DOMAIN_NAME").string_value self.backend_url = f"https://{self.backend_domain_name}/api/v1/" self.job_processing_queues = components.data_processing_queues self.app_bucket = Bucket.from_bucket_arn( self, id="App", bucket_arn=Fn.import_value( ApiStack.get_app_bucket_arn_output_export_name(envs))) self.resize_lambda_image_bucket = Bucket.from_bucket_arn( self, id="Images", bucket_arn=Fn.import_value( ImageResizeStack. get_image_resize_bucket_arn_output_export_name(envs)), ) self.lambda_auth_token = Secret.from_secret_arn( self, id="lambda-auth-token", secret_arn=Fn.import_value( ApiStack.get_lambda_auth_token_arn_output_export_name(envs)), ) self.functions = [ self._create_lambda_fn(envs, memory_size, queue) for memory_size, queue in zip(envs.lambdas_sizes, self.job_processing_queues) ]
def get_vpc(scope: Construct) -> Vpc: config = get_cluster_config() stack_name = config.stack_name return Vpc.from_vpc_attributes( scope, 'vpc', vpc_id=Fn.import_value(stack_name + 'VpcId'), vpc_cidr_block=Fn.import_value(stack_name + 'VpcCidrBlock'), availability_zones=[ Fn.import_value(stack_name + 'AvailabilityZone0'), Fn.import_value(stack_name + 'AvailabilityZone1'), ], public_subnet_ids=[ Fn.import_value(stack_name + 'PublicSubnetId0'), Fn.import_value(stack_name + 'PublicSubnetId1'), ], isolated_subnet_ids=[ Fn.import_value(stack_name + 'IsolatedSubnet0'), Fn.import_value(stack_name + 'IsolatedSubnet1'), ], )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) env = "dev" project = "testproject1" service = "etl" component = "workflow" Tag.add(self, "Service", service) Tag.add(self, "Component", component) param_dwh = CfnParameter( self, "ParamDWH", type="String", description="The domain of the DWH to connect to. | team=data,service=dwh", default="fakedwh.host", ) value_raw = "import: ${value_to_import}, param: ${param_dwh}" value = Fn.sub( value_raw, { "value_to_import": Fn.import_value( generate_resource_name(project, env, service, "buckets", "bucketb") ), "param_dwh": Fn.ref(param_dwh.logical_id), }, ) aws_ssm.StringParameter( self, "SSMParam", parameter_name=generate_resource_name( project, env, service, component, "ssmparam" ), string_value=value, )
def __init__( self, scope: App, id: str, envs: EnvSettings, components: ComponentsStack, base_resources: BaseResources, ): super().__init__(scope, id) self.db_secret_arn = Fn.import_value( BaseResources.get_database_secret_arn_output_export_name(envs)) self.job_processing_queues = components.data_processing_queues self.vpc = base_resources.vpc self.db = base_resources.db self.app_bucket = Bucket(self, "App", versioned=True) if self.app_bucket.bucket_arn: CfnOutput( self, id="AppBucketOutput", export_name=self.get_app_bucket_arn_output_export_name(envs), value=self.app_bucket.bucket_arn, ) self.pages_bucket = Bucket(self, "Pages", public_read_access=True) self.domain_name = StringParameter.from_string_parameter_name( self, "DomainNameParameter", string_parameter_name="/schema-cms-app/DOMAIN_NAME").string_value self.certificate_arn = StringParameter.from_string_parameter_name( self, "CertificateArnParameter", string_parameter_name="/schema-cms-app/CERTIFICATE_ARN" ).string_value django_secret = Secret(self, "DjangoSecretKey", secret_name="SCHEMA_CMS_DJANGO_SECRET_KEY") lambda_auth_token_secret = Secret( self, "LambdaAuthToken", secret_name="SCHEMA_CMS_LAMBDA_AUTH_TOKEN") if lambda_auth_token_secret.secret_arn: CfnOutput( self, id="lambdaAuthTokenArnOutput", export_name=self.get_lambda_auth_token_arn_output_export_name( envs), value=lambda_auth_token_secret.secret_arn, ) self.django_secret_key = EcsSecret.from_secrets_manager(django_secret) self.lambda_auth_token = EcsSecret.from_secrets_manager( lambda_auth_token_secret) tag_from_context = self.node.try_get_context("app_image_tag") tag = tag_from_context if tag_from_context != "undefined" else None api_image = ContainerImage.from_ecr_repository( repository=Repository.from_repository_name( self, id="BackendRepository", repository_name=BaseECR.get_backend_repository_name(envs)), tag=tag, ) nginx_image = ContainerImage.from_ecr_repository( repository=Repository.from_repository_name( self, id="NginxRepository", repository_name=BaseECR.get_nginx_repository_name(envs)), tag=tag, ) self.api = ApplicationLoadBalancedFargateService( self, "ApiService", service_name=f"{envs.project_name}-api-service", cluster=Cluster.from_cluster_attributes( self, id="WorkersCluster", cluster_name="schema-ecs-cluster", vpc=self.vpc, security_groups=[], ), task_image_options=ApplicationLoadBalancedTaskImageOptions( image=nginx_image, container_name="nginx", container_port=80, enable_logging=True, ), desired_count=1, cpu=512, memory_limit_mib=1024, certificate=Certificate.from_certificate_arn( self, "Cert", certificate_arn=self.certificate_arn), domain_name=self.domain_name, domain_zone=PrivateHostedZone( self, "zone", vpc=self.vpc, zone_name=self.domain_name, ), ) self.api.task_definition.add_container( "backend", image=api_image, command=[ "sh", "-c", "/bin/chamber exec $CHAMBER_SERVICE_NAME -- ./scripts/run.sh" ], logging=AwsLogDriver(stream_prefix="backend-container"), environment={ "POSTGRES_DB": envs.data_base_name, "AWS_STORAGE_BUCKET_NAME": self.app_bucket.bucket_name, "AWS_STORAGE_PAGES_BUCKET_NAME": self.pages_bucket.bucket_name, "SQS_WORKER_QUEUE_URL": self.job_processing_queues[0].queue_url, "SQS_WORKER_EXT_QUEUE_URL": self.job_processing_queues[1].queue_url, "SQS_WORKER_MAX_QUEUE_URL": self.job_processing_queues[2].queue_url, "CHAMBER_SERVICE_NAME": "schema-cms-app", "CHAMBER_KMS_KEY_ALIAS": envs.project_name, }, secrets={ "DB_CONNECTION": EcsSecret.from_secrets_manager( Secret.from_secret_arn(self, id="DbSecret", secret_arn=self.db_secret_arn)), "DJANGO_SECRET_KEY": self.django_secret_key, "LAMBDA_AUTH_TOKEN": self.lambda_auth_token, }, cpu=512, memory_limit_mib=1024, ) self.django_secret_key.grant_read( self.api.service.task_definition.task_role) self.app_bucket.grant_read_write( self.api.service.task_definition.task_role) self.pages_bucket.grant_read_write( self.api.service.task_definition.task_role) for queue in self.job_processing_queues: queue.grant_send_messages( self.api.service.task_definition.task_role) self.api.service.connections.allow_to(self.db.connections, Port.tcp(5432)) self.api.task_definition.add_to_task_role_policy( PolicyStatement( actions=["ses:SendRawEmail", "ses:SendBulkTemplatedEmail"], resources=["*"], )) self.api.task_definition.add_to_task_role_policy( PolicyStatement( actions=[ "kms:Get*", "kms:Describe*", "kms:List*", "kms:Decrypt" ], resources=[ Fn.import_value( BaseKMS.get_kms_arn_output_export_name(envs)) ], )) self.api.task_definition.add_to_task_role_policy( PolicyStatement(actions=["ssm:DescribeParameters"], resources=["*"])) self.api.task_definition.add_to_task_role_policy( PolicyStatement( actions=["ssm:GetParameters*"], resources=[ f"arn:aws:ssm:{self.region}:{self.account}:parameter/schema-cms-app/*" ], ))