def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) params1 = _ssm.StringParameter( self, "parameter1Id", description="Load Testing Configuration", parameter_name="NoOfConCurrentUsers", string_value="100", tier=_ssm.ParameterTier.STANDARD, ) output1 = core.CfnOutput( self, "parameter1Output", description="Number of concurrent users", value=f"{params1.string_value}", ) params2 = _ssm.StringParameter( self, "parameter2Id", description="Load Testing Configuration", parameter_name="/locus/configuration/NoOfConCurrentUsers", string_value="100", tier=_ssm.ParameterTier.STANDARD, ) params3 = _ssm.StringParameter( self, "parameter3Id", description="Load Testing Configuration", parameter_name="/locus/configuration/DurationInSec", string_value="300", tier=_ssm.ParameterTier.STANDARD, ) secret1 = _secretsmanager.Secret(self, "secret1Id", description="Customer DB password", secret_name="cust_db_pass") output2 = core.CfnOutput( self, "secret1Output", description="secret 1", value=f"{secret1.secret_value}", ) templated_secret = _secretsmanager.Secret( self, "secret2Id", description="Templated secret for user data", secret_name="user_kon_attributes", generate_secret_string=_secretsmanager.SecretStringGenerator( secret_string_template=json.dumps({"username": "******"}), generate_string_key="password", ), )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Let us create AWS secrets & SSM Parameters): param1 = _ssm.StringParameter( self, "parameter1", description="Load Testing Configuration", parameter_name="NoOfConcurrentUsers", string_value="100", tier=_ssm.ParameterTier.STANDARD # choose transaction rate ) param2 = _ssm.StringParameter( self, "parameter2", description="Load Testing Configuration", parameter_name="/locust/configs/NoOfConcurrentUsers", string_value="100", tier=_ssm.ParameterTier.STANDARD) param3 = _ssm.StringParameter( self, "parameter3", description="Load Testing Configuration", parameter_name="/locust/configs/DurationInSec", string_value="300", tier=_ssm.ParameterTier.STANDARD) secret1 = _secretsmanager.Secret(self, "secret1", description="Customer DB password", secret_name="cust_db_pass") # hierarchy of secrets templated_secret = _secretsmanager.Secret( self, "secret2", description="A Templated secret for user data", secret_name="user_kon_attributes", generate_secret_string=_secretsmanager.SecretStringGenerator( secret_string_template=json.dumps({"username": "******"}), generate_string_key="password")) output_1 = core.CfnOutput(self, "param1", description="NoOfConcurrentUser", value=f"{param1.string_value}") output_2 = core.CfnOutput(self, "secret1Value", description="secret1", value=f"{secret1.secret_value}")
def test_run_job_flow_builder(): default_task_json = { 'End': True, 'Parameters': { 'FunctionName': { 'Ref': 'RunJobFlow9B18A53F' }, 'Payload': { 'ExecutionInput.$': '$$.Execution.Input', 'ClusterConfiguration.$': '$.ClusterConfiguration', 'TaskToken.$': '$$.Task.Token', 'CheckStatusLambda': { 'Fn::GetAtt': ['CheckClusterStatusA7C1019E', 'Arn'] }, 'RuleName': { 'Ref': 'testtaskEventRule9A04A93E' }, 'FireAndForget': False } }, 'Type': 'Task', 'Resource': { 'Fn::Join': [ '', [ 'arn:', { 'Ref': 'AWS::Partition' }, ':states:::lambda:invoke.waitForTaskToken' ] ] } } stack = core.Stack(core.App(), 'test-stack') task = emr_tasks.RunJobFlowBuilder.build( stack, 'test-task', roles=emr_profile.EMRRoles(stack, 'test-emr-roles', role_name_prefix='test-roles'), kerberos_attributes_secret=secretsmanager.Secret( stack, 'test-kerberos-secret'), secret_configurations={ 'Secret': secretsmanager.Secret(stack, 'test-secret-configurations-secret') }, ) print_and_assert(default_task_json, task)
def __init__(self, scope: cdk.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) "Create Secrets & SSM Parameters: " param1 = _ssm.StringParameter(self, "Parameter1", description="Load testing configuration", parameter_name="No_Of_Concurrent_Users", string_value="100", tier=_ssm.ParameterTier.STANDARD) param2 = _ssm.StringParameter( self, "Parameter2", description="Load testing configuration", parameter_name="/locust/configs/No_Of_Concurrent_Users", string_value="100", tier=_ssm.ParameterTier.STANDARD) param3 = _ssm.StringParameter( self, "Parameter3", description="Load testing configuration", parameter_name="/locust/configs/DurationInSec", string_value="300", tier=_ssm.ParameterTier.STANDARD) """ Build Secrets in Secrets Manager: """ secret1 = _secretsmanager.Secret(self, "Secret1", description="Customer DB password", secret_name="Custom_DB_Password") templated_secret = _secretsmanager.Secret( self, "Secret2", description="A Templated secret for user data", secret_name="User_Kon_Attributes", generate_secret_string=_secretsmanager.SecretStringGenerator( secret_string_template=json.dumps({"username": "******"}), generate_string_key="password")) """ Output: """ output_1 = cdk.CfnOutput(self, "Parameter1Value", description="No_Of_Concurrent_Users", value=f"{param1.string_value}") output_2 = cdk.CfnOutput(self, "Secret1Value", value=f"{secret1.secret_value}")
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) #SSM parameters ssm01 = aws_ssm.StringParameter( self, "ssmparameter01", description="Test Config", parameter_name="ConcurrentUsers", string_value="100", tier=aws_ssm.ParameterTier.STANDARD ) secret01 = aws_secrm.Secret( self, "secret01", description="DB password", secret_name="db_password" ) secret_templet01 = aws_secrm.Secret( self, "secret_templet01", description="templatized user credentials", secret_name="user_db_auth", generate_secret_string=aws_secrm.SecretStringGenerator( secret_string_template=json.dumps( { "username": "******" } ), generate_string_key="password" ) ) output_ssm01 = core.CfnOutput( self, "output_ssm01", description="ConcurrentUsersCount", value=ssm01.string_value ) output_secret01 = core.CfnOutput( self, "output_secret01", description="DB Password-secret01", value=f"{secret01.secret_value}" )
def _setup_sqlserver(self) -> None: port = 1433 database = "test" schema = "dbo" sqlserver = rds.DatabaseInstance( self, "aws-data-wrangler-sqlserver-instance", instance_identifier="sqlserver-instance-wrangler", engine=rds.DatabaseInstanceEngine.sql_server_ex(version=rds.SqlServerEngineVersion.VER_15), instance_type=ec2.InstanceType.of(ec2.InstanceClass.BURSTABLE3, ec2.InstanceSize.SMALL), credentials=rds.Credentials.from_password( username=self.db_username, password=self.db_password_secret, ), port=port, vpc=self.vpc, subnet_group=self.rds_subnet_group, security_groups=[self.db_security_group], publicly_accessible=True, s3_import_role=self.rds_role, s3_export_role=self.rds_role, ) glue.Connection( self, "aws-data-wrangler-sqlserver-glue-connection", description="Connect to SQL Server.", type=glue.ConnectionType.JDBC, connection_name="aws-data-wrangler-sqlserver", properties={ "JDBC_CONNECTION_URL": f"jdbc:sqlserver://{sqlserver.instance_endpoint.hostname}:{port};databaseName={database}", # noqa: E501 "USERNAME": self.db_username, "PASSWORD": self.db_password, }, subnet=self.vpc.private_subnets[0], security_groups=[self.db_security_group], ) secrets.Secret( self, "aws-data-wrangler-sqlserver-secret", secret_name="aws-data-wrangler/sqlserver", description="SQL Server credentials", generate_secret_string=secrets.SecretStringGenerator( generate_string_key="dummy", secret_string_template=json.dumps( { "username": self.db_username, "password": self.db_password, "engine": "sqlserver", "host": sqlserver.instance_endpoint.hostname, "port": port, "dbClusterIdentifier": sqlserver.instance_identifier, "dbname": database, } ), ), ) CfnOutput(self, "SqlServerAddress", value=sqlserver.instance_endpoint.hostname) CfnOutput(self, "SqlServerPort", value=str(port)) CfnOutput(self, "SqlServerDatabase", value=database) CfnOutput(self, "SqlServerSchema", value=schema)
def __init__(self, scope: core.Construct, id: str, vpc: ec2.Vpc, lambdasg: ec2.SecurityGroup, bastionsg: ec2.SecurityGroup, kmskey, **kwargs) -> None: super().__init__(scope, id, **kwargs) prj_name = self.node.try_get_context('project_name') env_name = self.node.try_get_context('env') creds_json_template = {'username': '******'} db_creds = sm.Secret( self, id="db-secret", secret_name=f'{env_name}-rds-secret', generate_secret_string=sm.SecretStringGenerator( include_space=False, # no space in secret password_length=12, generate_string_key= 'rds-password', # key in json dictionary for the password exclude_punctuation=True, secret_string_template=json.dumps(creds_json_template))) db_name = f'pryancdkdb' db_mysql = rds.DatabaseCluster( self, id=f'{env_name}-mysql', default_database_name=db_name, engine=rds.DatabaseClusterEngine.aurora_mysql( version=rds.AuroraMysqlEngineVersion.VER_5_7_12), master_user=rds.Login( username='******', password=db_creds.secret_value_from_json('rds-password')), instance_props=rds.InstanceProps( vpc=vpc, vpc_subnets=ec2.SubnetSelection( subnet_type=ec2.SubnetType.ISOLATED), # will pick one of the isolated Subnets from the vpc instance_type=ec2.InstanceType( instance_type_identifier='t3.small')), instances=1, storage_encrypted=True, storage_encryption_key=kmskey, removal_policy=core.RemovalPolicy.DESTROY) # we need to define the ingress rules for rds db_mysql.connections.allow_default_port_from( lambdasg, 'Access from Lambda Functions') db_mysql.connections.allow_default_port_from( bastionsg, "Access from bastion host") # ssm ssm.StringParameter(self, id=f'{env_name}-db-host', parameter_name=f"/{env_name}/db-host", string_value=db_mysql.cluster_endpoint.hostname) ssm.StringParameter(self, id=f'{env_name}-db-name', parameter_name=f"/{env_name}/db-name", string_value=db_name)
def _setup_oracle(self) -> None: port = 1521 database = "ORCL" schema = "TEST" oracle = rds.DatabaseInstance( self, "aws-data-wrangler-oracle-instance", instance_identifier="oracle-instance-wrangler", engine=rds.DatabaseInstanceEngine.oracle_ee(version=rds.OracleEngineVersion.VER_19_0_0_0_2021_04_R1), instance_type=ec2.InstanceType.of(ec2.InstanceClass.BURSTABLE3, ec2.InstanceSize.SMALL), credentials=rds.Credentials.from_password( username=self.db_username, password=self.db_password_secret, ), port=port, vpc=self.vpc, subnet_group=self.rds_subnet_group, security_groups=[self.db_security_group], publicly_accessible=True, s3_import_role=self.rds_role, s3_export_role=self.rds_role, ) glue.Connection( self, "aws-data-wrangler-oracle-glue-connection", description="Connect to Oracle.", type=glue.ConnectionType.JDBC, connection_name="aws-data-wrangler-oracle", properties={ "JDBC_CONNECTION_URL": f"jdbc:oracle:thin://@{oracle.instance_endpoint.hostname}:{port}/{database}", # noqa: E501 "USERNAME": self.db_username, "PASSWORD": self.db_password, }, subnet=self.vpc.private_subnets[0], security_groups=[self.db_security_group], ) secrets.Secret( self, "aws-data-wrangler-oracle-secret", secret_name="aws-data-wrangler/oracle", description="Oracle credentials", generate_secret_string=secrets.SecretStringGenerator( generate_string_key="dummy", secret_string_template=json.dumps( { "username": self.db_username, "password": self.db_password, "engine": "oracle", "host": oracle.instance_endpoint.hostname, "port": port, "dbClusterIdentifier": oracle.instance_identifier, "dbname": database, } ), ), ) CfnOutput(self, "OracleAddress", value=oracle.instance_endpoint.hostname) CfnOutput(self, "OraclePort", value=str(port)) CfnOutput(self, "OracleDatabase", value=database) CfnOutput(self, "OracleSchema", value=schema)
def __init__(self, scope: core.Construct, id: str, secret_name: str, template: str = None, key: str = None) -> None: """Provides a generate pseudo-random password Args: scope (core.Construct): [description] id (str): [description] secret_name (str): [description] template (str, optional): [description]. Defaults to None. key (str, optional): [description]. Defaults to None. """ super().__init__(scope, id) self.secret = asm.Secret( self, id, generate_secret_string=asm.SecretStringGenerator( secret_string_template=template, generate_string_key=key, password_length=24, exclude_characters='"@/\$'), secret_name='{}{}'.format(secret_name, id))
def test_emr_secure_launch_function(self): stack = core.Stack(core.App(), 'test-stack') vpc = ec2.Vpc(stack, 'Vpc') success_topic = sns.Topic(stack, 'SuccessTopic') failure_topic = sns.Topic(stack, 'FailureTopic') profile = emr_profile.EMRProfile( stack, 'test-profile', profile_name='test-profile', vpc=vpc,) configuration = cluster_configuration.ClusterConfiguration( stack, 'test-configuration', configuration_name='test-configuration', secret_configurations={ 'SecretConfiguration': secretsmanager.Secret(stack, 'Secret') }) function = emr_launch_function.EMRLaunchFunction( stack, 'test-function', description='test description', launch_function_name='test-function', emr_profile=profile, cluster_configuration=configuration, cluster_name='test-cluster', success_topic=success_topic, failure_topic=failure_topic, allowed_cluster_config_overrides=configuration.override_interfaces['default'], wait_for_cluster_start=False ) self.print_and_assert(self.default_function, function)
def __init__(self, scope: core.Construct, id: str, config_dict, **kwargs) -> None: super().__init__(scope, id, **kwargs) """ Create a secret in secret manager with Database credentials for Comp Reg Source """ stack = DatalakeSecretManagerStack.of(self) createCompRegSecret = sm.Secret( self, "createCompRegSecret", description="Database credentials for Comp Reg Source", secret_name=config_dict['comp_reg_secret_name'], generate_secret_string=sm.SecretStringGenerator( exclude_characters="{`~!@#$%^&*()_-+={[}}|\:;\"'<,>.?/}", generate_string_key="pass_generated_by_SM", secret_string_template=stack.to_json_string({ 'db_username': config_dict['comp_reg_user_name'], 'db_password': config_dict['comp_reg_password'], 'db_port': config_dict['comp_reg_port'], 'db_service_name': config_dict['comp_reg_db_name'], 'db_host': config_dict['comp_reg_host_name'] })))
def __init__(self, scope: core.Construct, id: str, vpc: VpcStack, **kwargs) -> None: super().__init__(scope, id, **kwargs) subnet_group = redshift.ClusterSubnetGroup( self, id="RedshiftSubnetGroup", description="Redshift private subnet group", vpc=vpc.instance, vpc_subnets=ec2.SubnetSelection( subnet_type=ec2.SubnetType.ISOLATED), ) self.redshift_secret = sm.Secret( self, "redshift-credentials", secret_name="redshift-credentials", description="Credentials for Amazon Redshift cluster.", generate_secret_string=sm.SecretStringGenerator( secret_string_template='{"username": "******"}', generate_string_key="password", password_length=32, exclude_characters='"@\\\/', exclude_punctuation=True, ), ) redshift_login = redshift.Login( master_username="******", master_password=self.redshift_secret.secret_value_from_json( "password"), ) redshift_s3_read_access_role = iam.Role( self, "redshiftS3AccessRole", role_name="redshiftS3AccessRole", assumed_by=iam.ServicePrincipal("redshift.amazonaws.com"), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AmazonS3ReadOnlyAccess") ], ) redshift_cluster = redshift.Cluster( self, id="redshift-cluster", master_user=redshift_login, vpc=vpc, cluster_type=redshift.ClusterType.SINGLE_NODE, default_database_name="redshift-db", encrypted=True, node_type=redshift.NodeType.DC2_LARGE, port=5439, roles=[redshift_s3_read_access_role], security_groups=[vpc.redshift_sg], subnet_group=subnet_group, removal_policy=core.RemovalPolicy.DESTROY, ) self._instance = redshift_cluster
def __init__(self, scope: core.Construct, id: str, *, repo_name: str = None, bucket, **kwargs) -> None: super().__init__(scope, id, **kwargs) github_secret_personal_site = sm.Secret(self, 'github_secret_personal_site', description=f'{__name__} secret for github', \ secret_name='github_secret_personal_site') personal_site_pipeline = codepipeline.Pipeline( self, "Pipeline", pipeline_name="personal_site_github") source_output = codepipeline.Artifact() source_action = codepipeline_actions.GitHubSourceAction( action_name="GitHub_Source", owner=repo_name.split('/')[0], repo=repo_name.split('/')[1], oauth_token=core.SecretValue.secrets_manager( "github_secret_personal_site"), output=source_output, branch="master") deploy_action = codepipeline_actions.S3DeployAction( action_name="S3Deploy", bucket=bucket, input=source_output) #Add the stages defined above to the pipeline personal_site_pipeline.add_stage(stage_name="Source", actions=[source_action]) personal_site_pipeline.add_stage(stage_name="Deploy", actions=[deploy_action])
def create_placeholders(self, secret_placeholders: list) -> None: for secret in secret_placeholders: secret_obj = sm.Secret( self, secret, encryption_key=self.kms_key, secret_name=secret) self.undefined_secrets[secret] = secret_obj
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.tda_secret = sm.Secret( self, 'TDA_SECRET', description='Holds the Ameritrade root access document', removal_policy=core.RemovalPolicy.DESTROY)
def __init__(self, scope: _core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) ks_resource, cassandra_keyspace_arn = self.create_cassandra_keyspace( 'cassandra_demo') table_resource, cassandra_table_arn = self.create_cassandra_table( table_name='country_cities', keyspace_name='cassandra_demo', keyspace_ref=ks_resource.ref, partitionkey_columns=[{ 'ColumnName': 'country', 'ColumnType': 'TEXT', }], clustering_key_columns=[{ 'Column': { 'ColumnName': 'city_name', 'ColumnType': 'TEXT', }, 'OrderBy': 'ASC' }], regular_columns=[ { 'ColumnName': 'population', 'ColumnType': 'INT' }, ], ) user = _iam.User(self, 'CassandraDemoUser', user_name='CassandraDemoUser') policy = _iam.Policy(self, 'CassandraFullDataAccess') policy.add_statements( _iam.PolicyStatement( resources=[cassandra_table_arn], actions=['cassandra:Select', 'cassandra:Modify'])) policy.attach_to_user(user) secrets = _secretsmanager.Secret(self, 'cassandra_demo_creds', secret_name='cassandra_demo_creds') code = _lambda.Code.asset('lambda/.dist/lambda.zip') cassandra_function = _lambda.Function( self, 'cassandra-demo', function_name='cassandra-demo', runtime=_lambda.Runtime.PYTHON_3_6, memory_size=1024, code=code, handler='demo_handler.handler', tracing=_lambda.Tracing.ACTIVE, environment={'CASSANDRA_CREDS': secrets.secret_arn}, ) secrets.grant_read(cassandra_function) api = _apigateway.LambdaRestApi(self, 'cassandra-demo-api', handler=cassandra_function)
def _set_db_infra(self) -> None: self.db_username = "******" # fmt: off self.db_password_secret = ssm.Secret( self, "db-password-secret", secret_name="aws-data-wrangler/db_password", generate_secret_string=ssm.SecretStringGenerator( exclude_characters="/@\"\' \\"), ).secret_value # fmt: on self.db_password = self.db_password_secret.to_string() self.db_security_group = ec2.SecurityGroup( self, "aws-data-wrangler-database-sg", vpc=self.vpc, description= "AWS Data Wrangler Test Arena - Database security group", ) self.db_security_group.add_ingress_rule(ec2.Peer.any_ipv4(), ec2.Port.all_traffic()) self.rds_subnet_group = rds.SubnetGroup( self, "aws-data-wrangler-rds-subnet-group", description="RDS Database Subnet Group", vpc=self.vpc, vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PUBLIC), ) self.rds_role = iam.Role( self, "aws-data-wrangler-rds-role", assumed_by=iam.ServicePrincipal("rds.amazonaws.com"), inline_policies={ "S3": iam.PolicyDocument(statements=[ iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ "s3:Get*", "s3:List*", "s3:Put*", "s3:AbortMultipartUpload", ], resources=[ self.bucket.bucket_arn, f"{self.bucket.bucket_arn}/*", ], ) ]), }, ) cdk.CfnOutput(self, "DatabasesUsername", value=self.db_username) cdk.CfnOutput( self, "DatabaseSecurityGroupId", value=self.db_security_group.security_group_id, )
def __init__(self, scope: core.Construct, id: str, vpc: ec2.Vpc, lambdasg: ec2.SecurityGroup, bastionsg: ec2.SecurityGroup, kmskey, **kwargs) -> None: super().__init__(scope, id, **kwargs) prj_name = self.node.try_get_context("project_name") env_name = self.node.try_get_context("env") json_template = {'username': '******'} db_creds = sm.Secret( self, 'db-secret', secret_name=env_name + '/rds-secret', generate_secret_string=sm.SecretStringGenerator( include_space=False, password_length=12, generate_string_key='password', exclude_punctuation=True, secret_string_template=json.dumps(json_template))) db_mysql = rds.DatabaseCluster( self, 'mysql', default_database_name=prj_name + env_name, engine=rds.DatabaseClusterEngine.AURORA_MYSQL, engine_version="5.7.12", master_user=rds.Login( username='******', password=db_creds.secret_value_from_json('password')), instance_props=rds.InstanceProps( vpc=vpc, vpc_subnets=ec2.SubnetSelection( subnet_type=ec2.SubnetType.ISOLATED), instance_type=ec2.InstanceType( instance_type_identifier="t3.small")), instances=1, parameter_group=rds.ClusterParameterGroup. from_parameter_group_name( self, 'pg-dev', parameter_group_name='default.aurora-mysql5.7'), kms_key=kmskey, removal_policy=core.RemovalPolicy.DESTROY) db_mysql.connections.allow_default_port_from( lambdasg, "Access from Lambda functions") db_mysql.connections.allow_default_port_from( bastionsg, "Allow from bastion host") #SSM Parameter ssm.StringParameter(self, 'db-host', parameter_name='/' + env_name + '/db-host', string_value=db_mysql.cluster_endpoint.hostname) ssm.StringParameter(self, 'db-name', parameter_name='/' + env_name + '/db-name', string_value=prj_name + env_name)
def __init__(self, scope: core.Construct, id: builtins.str, landing_zone: IVpcLandingZone) -> None: super().__init__(scope, id) self.__landing_zone = landing_zone # Setup DNS... self.trader_dns_zone = r53.PrivateHostedZone( self, 'Trader', zone_name='trader.fsi'.format(landing_zone.zone_name.lower()), vpc=landing_zone.vpc, comment='HomeNet Financial Services Domain') # Create a key and delegate access to IAM... self.key = kms.Key( self, 'Key', alias='homenet/fsi', enable_key_rotation=True, policy=iam.PolicyDocument(statements=[ iam.PolicyStatement(effect=iam.Effect.ALLOW, principals=[ iam.AccountPrincipal( core.Stack.of(self).account) ], actions=['kms:*'], resources=['*']) ])) # Create central resources... self.tda_secret = sm.Secret( self, 'AmeritradeSecrets', removal_policy=core.RemovalPolicy.DESTROY, secret_name='HomeNet-{}-Ameritrade-Secrets'.format( self.landing_zone.zone_name)) self.bucket = s3.Bucket(self, 'Bucket', bucket_name='homenet-{}.{}.trader.fsi'.format( self.landing_zone.zone_name, core.Stack.of(self).region).lower(), versioned=True) r53.ARecord(self, 'BucketAlias', zone=self.trader_dns_zone, record_name=self.bucket.bucket_domain_name, target=r53.RecordTarget.from_alias( dns_targets.BucketWebsiteTarget(self.bucket))) # self.fspace = space.CfnEnvironment(self,'Finspace', # name='HomeNet-FsiCoreSvc', # kms_key_id= self.key.key_id, # description="HomeNet Financial Servicing Catalog") self.finspace = FinSpaceEnvironment() self.key.grant_admin(iam.ServicePrincipal(service='finspace'))
def _setup_mysql_serverless(self) -> None: port = 3306 database = "test" schema = "test" aurora_mysql = rds.ServerlessCluster( self, "aws-data-wrangler-aurora-cluster-mysql-serverless", removal_policy=RemovalPolicy.DESTROY, engine=rds.DatabaseClusterEngine.aurora_mysql( version=rds.AuroraMysqlEngineVersion.VER_5_7_12, ), cluster_identifier="mysql-serverless-cluster-wrangler", default_database_name=database, credentials=rds.Credentials.from_password( username=self.db_username, password=self.db_password_secret, ), scaling=rds.ServerlessScalingOptions( auto_pause=Duration.minutes(5), min_capacity=rds.AuroraCapacityUnit.ACU_1, max_capacity=rds.AuroraCapacityUnit.ACU_1, ), backup_retention=Duration.days(1), vpc=self.vpc, vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_WITH_NAT), subnet_group=self.rds_subnet_group, security_groups=[self.db_security_group], enable_data_api=True, ) secret = secrets.Secret( self, "aws-data-wrangler-mysql-serverless-secret", secret_name="aws-data-wrangler/mysql-serverless", description="MySQL serverless credentials", generate_secret_string=secrets.SecretStringGenerator( generate_string_key="dummy", secret_string_template=json.dumps( { "username": self.db_username, "password": self.db_password, "engine": "mysql", "host": aurora_mysql.cluster_endpoint.hostname, "port": port, "dbClusterIdentifier": aurora_mysql.cluster_identifier, "dbname": database, } ), ), ) CfnOutput(self, "MysqlServerlessSecretArn", value=secret.secret_arn) CfnOutput(self, "MysqlServerlessClusterArn", value=aurora_mysql.cluster_arn) CfnOutput(self, "MysqlServerlessAddress", value=aurora_mysql.cluster_endpoint.hostname) CfnOutput(self, "MysqlServerlessPort", value=str(port)) CfnOutput(self, "MysqlServerlessDatabase", value=database) CfnOutput(self, "MysqlServerlessSchema", value=schema)
def __init__( self, scope: core.Construct, id: str, bucket_name: str, postgres_host: str, redis_host: str, db_secret: secrets.ISecret, full_domain_name: str, **kwargs, ) -> None: super().__init__( scope, id, **kwargs, ) self.django_secret_key = secrets.Secret( self, "DjangoSecretKey", generate_secret_string=secrets.SecretStringGenerator( exclude_punctuation=True, include_space=False, ), ) self.regular_variables = { "DJANGO_SETTINGS_MODULE": "backend.settings.production", "DEBUG": "", "FULL_DOMAIN_NAME": full_domain_name, "FULL_APP_NAME": scope.full_app_name, "CELERY_METRICS_TOKEN": "my-secret-token", "AWS_STORAGE_BUCKET_NAME": bucket_name, "POSTGRES_SERVICE_HOST": postgres_host, "POSTGRES_PASSWORD": db_secret.secret_value_from_json("password").to_string(), "SECRET_KEY": os.environ.get( "SECRET_KEY", "mysecretkey123"), # self.django_secret_key.to_string(), "REDIS_SERVICE_HOST": redis_host, } self.secret_variables = { "DJANGO_SECRET_KEY": ecs.Secret.from_secrets_manager(self.django_secret_key), }
def _set_opensearch_infra(self) -> None: self.username = "******" # fmt: off self.password_secret = secrets.Secret( self, "opensearch-password-secret", secret_name="aws-data-wrangler/opensearch_password", generate_secret_string=secrets.SecretStringGenerator( exclude_characters="/@\"\' \\"), ).secret_value # fmt: on self.password = self.password_secret.to_string()
def _create_secret(self): """ Create a secret for RDS """ db_password_secret_id = f"{self.stack_name}-{self.component_id}-db_password_secret" secret_name = f"{self.stack_name}-{self.component_id}-dbPassword" self.db_password_secret = sm.Secret( scope=self, id=db_password_secret_id, secret_name=secret_name, generate_secret_string=sm.SecretStringGenerator(password_length=20, exclude_punctuation=True), )
def console_password(self, secret_name: str, template: str = None, key: str = None): self.secret = asm.Secret( self, id, generate_secret_string=asm.SecretStringGenerator( secret_string_template=template, generate_string_key=key, password_length=24, exclude_characters='"@/\$'), secret_name='{}{}'.format(secret_name, id)) return core.SecretValue(self.secret.secret_value.to_string())
def __init__( self, scope: core.Construct, construct_id: str, iam_construct: IAMConstruct, **kwargs ) -> None: super().__init__(scope, construct_id, **kwargs) self.circleci_aws_ios_sdk_api_key = aws_secretsmanager.Secret( self, "circleCI_AWS_iOS_SDK_API_key", description="CircleCI API key used by credential rotator lambda for AWS iOS SDK", secret_name=constants.CIRCLECI_AWS_IOS_SDK_API_KEY, ) self.circleci_aws_ios_sdk_spm_api_key = aws_secretsmanager.Secret( self, "circleCI_AWS_iOS_SDK_SPM_API_key", description="CircleCI API key used by credential rotator lambda for AWS iOS SDK SPM", secret_name=constants.CIRCLECI_AWS_IOS_SDK_SPM_API_KEY, ) self.github_release_api_key = aws_secretsmanager.Secret( self, "github_release_api_key", description="""GitHub username and API token for CircleCI to access the aws-sdk-ios-spm repo to post PRs, merge from release to main """, secret_name=constants.GITHUB_SPM_RELEASE_API_TOKEN, ) lambda_role_get_secret_policy = aws_iam.PolicyStatement( effect=aws_iam.Effect.ALLOW, actions=["secretsmanager:GetSecretValue"], resources=[ self.circleci_aws_ios_sdk_api_key.secret_full_arn, self.circleci_aws_ios_sdk_spm_api_key.secret_full_arn, self.github_release_api_key.secret_full_arn, ], ) iam_construct.add_policy_to_lambda_role(lambda_role_get_secret_policy)
def __init__(self, scope: core.Construct, id: str, landing_zone: ILandingZone, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.__secrets = {} for itr in [('moonbase', 'moonbase.cameras.real.world'), ('starbase', 'starbase.cameras.real.world')]: name, url = itr self.__secrets[url] = sm.Secret( self, name, description=name + ' connection identity', removal_policy=core.RemovalPolicy.DESTROY, secret_name='homenet-{}-{}-connection-secret'.format( landing_zone.zone_name, name).lower())
def __init__(self, scope: core.Construct, id: str, vpc: VpcStack, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.db_name = "airflow" self.rds_secret = sm.Secret( self, "airflow-rds", secret_name="airflow-rds-credentials", description="Credentials for RDS PostgreSQL.", generate_secret_string=sm.SecretStringGenerator( secret_string_template='{"username": "******"}', generate_string_key="password", password_length=16, exclude_characters='"@\\\/', exclude_punctuation=True, ), ) credentials = rds.Credentials.from_secret(self.rds_secret) postgres = rds.DatabaseInstance( self, "RDS", credentials=credentials, instance_identifier="airflow-cdk", database_name=self.db_name, engine=rds.DatabaseInstanceEngine.postgres( version=rds.PostgresEngineVersion.VER_9_6_18), vpc=vpc.instance, vpc_placement=ec2.SubnetSelection( subnet_type=ec2.SubnetType.ISOLATED), port=5432, instance_type=ec2.InstanceType.of( ec2.InstanceClass.BURSTABLE2, ec2.InstanceSize.MICRO, ), allocated_storage=20, security_groups=[vpc.postgres_sg], removal_policy=core.RemovalPolicy.DESTROY, parameter_group=rds.ParameterGroup.from_parameter_group_name( self, "para-group-postgres", parameter_group_name="default.postgres9.6"), deletion_protection=False, ) self._instance = postgres
def __init__(self, scope: core.Construct, id: str, password_object: object, secret_name: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.password_object = password_object self.Secret = secretsmanager.Secret( self, id=id, generate_secret_string=secretsmanager.SecretStringGenerator( secret_string_template=json.dumps(password_object), generate_string_key='Password', exclude_punctuation=True, ), secret_name=secret_name) self.clear_text_secret = self.Secret.secret_value_from_json( 'Password').to_string()
def __init__(self, scope: cdk.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # The secret mysecret = secretsmanager.Secret(self, "DocBuilderSecret", secret_name="docbuilder/bigsecret") # The Lambda doc_builder = lambda_python.PythonFunction( self, "DocBuilderLambda", runtime=lambda_.Runtime.PYTHON_3_8, entry='./resources', index='my_lambda.py', environment=dict(SECRET_ID=mysecret.secret_arn)) # The role mysecret.grant_read(doc_builder.role)
def create_master_secret(self) -> secretsmanager.Secret: """ Create the master API key secret - for holding the API key of the master service user. This key is only then used by the key rotation lambdas of other secrets. Returns: the master secret """ # we start the secret with a random value created by secrets manager.. # first step will be to set this to an API key from Illumina ICA master_secret = secretsmanager.Secret( self, "MasterApiKeySecret", description= "Master ICA API key - not for direct use - use corresponding JWT secrets", ) return master_secret