def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Define SSM command document. ecr_repo = "{}.dkr.ecr.{}.amazonaws.com/{}".format(AWS_ACCOUNT, AWS_REGION, WINDOWS_X86_ECR_REPO) placeholder_map = {"ECR_PLACEHOLDER": ecr_repo, "GITHUB_OWNER_PLACEHOLDER": GITHUB_REPO_OWNER, "REGION_PLACEHOLDER": AWS_REGION, "GITHUB_SOURCE_VERSION_PLACEHOLDER": GITHUB_SOURCE_VERSION} content = YmlLoader.load("./cdk/ssm/windows_docker_build_ssm_document.yaml", placeholder_map) ssm.CfnDocument(scope=self, id="{}-ssm-document".format(id), name=SSM_DOCUMENT_NAME, content=content, document_type="Command") # Define a S3 bucket to store windows docker files and build scripts. s3.Bucket(scope=self, id="{}-s3".format(id), bucket_name=S3_BUCKET_NAME, block_public_access=s3.BlockPublicAccess.BLOCK_ALL) # Define a role for EC2. ecr_power_user_policy = iam.PolicyDocument.from_json(ecr_power_user_policy_in_json([WINDOWS_X86_ECR_REPO])) s3_read_write_policy = iam.PolicyDocument.from_json(s3_read_write_policy_in_json(S3_BUCKET_NAME)) inline_policies = {"ecr_power_user_policy": ecr_power_user_policy, "s3_read_write_policy": s3_read_write_policy} role = iam.Role(scope=self, id="{}-role".format(id), assumed_by=iam.ServicePrincipal("ec2.amazonaws.com"), inline_policies=inline_policies, managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name("AmazonSSMManagedInstanceCore") ]) # Define Windows EC2 instance, where the SSM document will be executed. machine_image = ec2.MachineImage.latest_windows(ec2.WindowsVersion.WINDOWS_SERVER_2019_ENGLISH_FULL_CONTAINERSLATEST) vpc = ec2.Vpc(scope=self, id="{}-vpc".format(id)) block_device_volume = ec2.BlockDeviceVolume.ebs(volume_size=200, delete_on_termination=True) block_device = ec2.BlockDevice(device_name="/dev/sda1", volume=block_device_volume) instance = ec2.Instance(scope=self, id="{}-instance".format(id), instance_type=ec2.InstanceType(instance_type_identifier="m5d.xlarge"), vpc=vpc, role=role, block_devices=[block_device], vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PUBLIC), machine_image=machine_image) core.Tags.of(instance).add(WIN_EC2_TAG_KEY, WIN_EC2_TAG_VALUE)
def __init__( self, scope: core.Construct, construct_id: str, run_document_name: str, _doc_desc: str, bash_commands_to_run: str, enable_log: bool, **kwargs ) -> None: super().__init__(scope, construct_id, **kwargs) # SSM Run Command Document should be JSON Syntax # Ref: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html#cfn-ssm-document-content # Ref: https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_ssm/CfnDocument.html _run_cmds = { "schemaVersion": "2.2", "description": f"{_doc_desc}", "parameters": { "commands": { "type": "String", "description": "The commands to run or the path to an existing script on the instance.", "default": f"{bash_commands_to_run}" } }, "mainSteps": [ { "action": "aws:runShellScript", "name": "runCommands", "inputs": { "timeoutSeconds": "60", "runCommand": [ "{{ commands }}" ] } } ] } # Create Linux Shell Script Document self.ssm_linux_document = _ssm.CfnDocument( self, "ssmLinuxDocument", document_type="Command", # If we name in Cfn, Updating becomes a problem # name=f"{run_document_name}", content=_run_cmds )
def __init__(self, scope:core.Construct, id:str, landing_zone:ILandingZone, **kwargs): """ Configure Dns Resolver """ super().__init__(scope,id, **kwargs) self.admin = 'admin' self.password = '******' self.mad = ds.CfnMicrosoftAD(self,'ActiveDirectory', name='virtual.world', password= self.password, short_name='virtualworld', enable_sso=False, edition= 'Standard', vpc_settings= ds.CfnMicrosoftAD.VpcSettingsProperty( vpc_id= landing_zone.vpc.vpc_id, subnet_ids= landing_zone.vpc.select_subnets(subnet_group_name='Hadoop').subnet_ids )) document_name='JoinDomain_'+self.mad.ref self.domain_join_document = ssm.CfnDocument(self,'JoinDomainDocument', name= document_name, content={ "schemaVersion": "1.0", "description": "Domain Join {}".format(self.mad.ref), "runtimeConfig": { "aws:domainJoin": { "properties": { "directoryId": self.mad.ref, "directoryName": "virtual.world", "dnsIpAddresses": [ self.mad.attr_dns_ip_addresses ] } } } }) self.association = ssm.CfnAssociation(self,'JoinTagAssociation', association_name='joindomain_by_tags_'+self.mad.ref, name= document_name, targets= [ ssm.CfnAssociation.TargetProperty( key='tag:domain', values=[landing_zone.zone_name]) ]) self.association.add_depends_on(self.domain_join_document)
def __init__(self, scope: core.Construct, id: str, mad: ad.CfnMicrosoftAD, targets: List[str], **kwargs) -> None: super().__init__(scope, id, **kwargs) self.__mad = mad document_name = 'Join_HomeNet_Domain_' + self.mad.ref self.domain_join_document = ssm.CfnDocument( self, 'JoinDomainDocument', name=document_name, content={ "schemaVersion": "1.0", "description": "Domain Join {}".format(self.mad.ref), "runtimeConfig": { "aws:domainJoin": { "properties": { "directoryId": self.mad.ref, "directoryName": "virtual.world", "dnsIpAddresses": self.mad.attr_dns_ip_addresses } } } }) self.association = ssm.CfnAssociation( self, 'JoinTagAssociation', association_name='joindomain_by_tags_' + self.mad.ref, name=document_name, targets=[ ssm.CfnAssociation.TargetProperty(key='tag:domain', values=targets) ]) self.domain_join_document.add_depends_on(mad) self.association.add_depends_on(self.domain_join_document)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Global variables to import cdk contexts from cdk.json _vpcID = self.node.try_get_context("VpcId") _sm_password = self.node.try_get_context("Secret_domain_password_arn") _dname = self.node.try_get_context("Domain_name") _subnet1 = self.node.try_get_context("Subnet1") _subnet2 = self.node.try_get_context("Subnet2") _sm_ec2keypair = self.node.try_get_context("Secret_keypair_arn") _ec2instance = self.node.try_get_context("Instance_type") # Import Vpc from the existing one in the AWS Account Vpc = _ec2.Vpc.from_lookup(self, "ImportVPC", vpc_id=_vpcID) Subnet1 = _ec2.Subnet.from_subnet_attributes( self, "subnetfromADManagedAD", subnet_id=_subnet1[0], availability_zone=_subnet1[1]) # Import a Secret Manager Secret for Domain Password secret_adpassword = _sm.Secret.from_secret_arn(self, "AdPasswordSecretStore", secret_arn=_sm_password) #Import a Secret Manager Secre for EC2 KeyPair secret_ec2keypair = _sm.Secret.from_secret_arn( self, "ImportEC2KeyPairSecretStore", secret_arn=_sm_ec2keypair) # Create an AWS Managed AD Service in STANDARD Version ad = _ds.CfnMicrosoftAD( self, "ManagedAD", name=_dname, password=secret_adpassword.secret_value_from_json( "Key").to_string(), edition="Standard", vpc_settings={ "vpcId": _vpcID, "subnetIds": [_subnet1[0], _subnet2[0]] }) self.directory = ad # Create r53 hosted Zone for DNS DomainName hostedzone = _r53.HostedZone(self, "HostedZoneforAD", zone_name=_dname, vpcs=[Vpc]) # Get the DNS IPs from AWS Managed AD targetip = _r53.RecordTarget(values=ad.attr_dns_ip_addresses) # Create A Record on Route 53 to point to AWS Managed AD IPs to later EC2 to join Domain r53Arecord = _r53.ARecord(self, "RecordAforAD", target=targetip, zone=hostedzone) # Create Policy to EC2JoinDomain Role ec2ssmpolicy = _iam.PolicyDocument(statements=[ _iam.PolicyStatement(actions=[ "ssm:DescribeAssociation", "ssm:GetDocument", "ssm:DescribeDocument", "ssm:GetManifest", "ssm:GetParameters", "ssm:ListAssociations", "ssm:ListInstanceAssociations", "ssm:UpdateAssociationStatus", "ssm:UpdateInstanceAssociationStatus", "ssm:UpdateInstanceInformation" ], resources=["*"]), _iam.PolicyStatement(actions=[ "ssmmessages:CreateControlChannel", "ssmmessages:CreateDataChannel", "ssmmessages:OpenControlChannel", "ssmmessages:OpenDataChannel" ], resources=["*"]), _iam.PolicyStatement(actions=[ "ec2messages:AcknowledgeMessage", "ec2messages:DeleteMessage", "ec2messages:FailMessage", "ec2messages:GetEndpoint", "ec2messages:GetMessages", "ec2messages:SendReply" ], resources=["*"]), _iam.PolicyStatement(actions=["ec2:DescribeInstanceStatus"], resources=["*"]), _iam.PolicyStatement(actions=["secretsmanager:GetSecretValue"], resources=["{}".format(_sm_password)]) ]) # Create role "EC2JoinDomain" to apply on Windows EC2JoinDomain (EC2) ssmrole = _iam.Role( self, "SSMRoleforEC2", assumed_by=_iam.ServicePrincipal('ec2.amazonaws.com'), inline_policies={"EC2SSMPolicy": ec2ssmpolicy}, role_name="EC2JoinDomain") # Create Policy to workspaces_DefaultRole Role wsdefaultpolicy = _iam.PolicyDocument(statements=[ _iam.PolicyStatement(actions=[ "ec2:CreateNetworkInterface", "ec2:DeleteNetworkInterface", "ec2:DescribeNetworkInterfaces" ], resources=["*"]), _iam.PolicyStatement(actions=[ "workspaces:RebootWorkspaces", "workspaces:RebuildWorkspaces", "workspaces:ModifyWorkspaceProperties" ], resources=["*"]) ]) # Create role workspaces_DefaultRole for later WorkSpaces API usage wsrole = _iam.Role( self, "WorkSpacesDefaultRole", assumed_by=_iam.ServicePrincipal('workspaces.amazonaws.com'), inline_policies={"WorkSpacesDefaultPolicy": wsdefaultpolicy}, role_name="workspaces_DefaultRole") # Create a security group for RDP access on Windows EC2JoinDomain (EC2) rdpsg = _ec2.SecurityGroup( self, "SGForRDP", vpc=Vpc, description= "The Secrurity Group from local environment to Windows EC2 Instance" ) rdpsg.add_ingress_rule(peer=_ec2.Peer.ipv4("192.168.1.1/32"), connection=_ec2.Port.tcp(3389)) # Create Windows EC2JoinDomain (EC2) as AD Admin server adadminEC2 = _ec2.Instance( self, "WindowsEC2", instance_type=_ec2.InstanceType( instance_type_identifier=_ec2instance), machine_image=_ec2.MachineImage.latest_windows( version=_ec2.WindowsVersion. WINDOWS_SERVER_2016_ENGLISH_FULL_BASE), vpc=Vpc, key_name=secret_ec2keypair.secret_value_from_json( "Key").to_string(), role=ssmrole, security_group=rdpsg, vpc_subnets=_ec2.SubnetSelection(subnets=[Subnet1])) adadminEC2.instance.add_depends_on(ad) # Create a SSM Parameter Store for Domain Name domain = _ssm.StringParameter(self, "ADDomainName", parameter_name="ad_join_domain_name", string_value=ad.name) # Create a SSM Parameter Store for Domain User aduser = _ssm.StringParameter(self, "ADDomainUser", parameter_name="ad_join_domain_user", string_value="Admin") domain.node.add_dependency(ad) aduser.node.add_dependency(ad) # Create SSM Document to join Window EC2 into AD ssmdocument = _ssm.CfnDocument( self, "SSMDocumentJoinAD", document_type="Command", name="SSMDocumentJoinAD", content={ "description": "Run a PowerShell script to domain join a Windows instance securely", "schemaVersion": "2.0", "mainSteps": [{ "action": "aws:runPowerShellScript", "name": "runPowerShellWithSecureString", "inputs": { "runCommand": [ "# Example PowerShell script to domain join a Windows instance securely", "# Adopt the document from AWS Blog Join a Microsoft Active Directory Domain with Parameter Store and Amazon EC2 Systems Manager Documents", "", "$ErrorActionPreference = 'Stop'", "", "try{", " # Parameter names", " # $dnsParameterStore = ([System.Net.Dns]::GetHostAddresses({}).IPAddressToString[0])" .format(domain.parameter_name), " $domainNameParameterStore = \"{}\"".format( domain.parameter_name), " $domainJoinUserNameParameterStore = \"{}\"". format(aduser.parameter_name), " $domainJoinPasswordParameterStore = \"{}\"". format(secret_adpassword.secret_arn), "", " # Retrieve configuration values from parameters", " $ipdns = ([System.Net.Dns]::GetHostAddresses(\"{}\").IPAddressToString[0])" .format(_dname), " $domain = (Get-SSMParameterValue -Name $domainNameParameterStore).Parameters[0].Value", " $username = $domain + \"\\\" + (Get-SSMParameterValue -Name $domainJoinUserNameParameterStore).Parameters[0].Value", " $password = ((Get-SECSecretValue -SecretId $domainJoinPasswordParameterStore ).SecretString | ConvertFrom-Json ).Key | ConvertTo-SecureString -asPlainText -Force ", "", " # Create a System.Management.Automation.PSCredential object", " $credential = New-Object System.Management.Automation.PSCredential($username, $password)", "", " # Determine the name of the Network Adapter of this machine", " $networkAdapter = Get-WmiObject Win32_NetworkAdapter -Filter \"AdapterType = 'Ethernet 802.3'\"", " $networkAdapterName = ($networkAdapter | Select-Object -First 1).NetConnectionID", "", " # Set up the IPv4 address of the AD DNS server as the first DNS server on this machine", " netsh.exe interface ipv4 add dnsservers name=$networkAdapterName address=$ipdns index=1", "", " # Join the domain and reboot", " Add-Computer -DomainName $domain -Credential $credential", " Restart-Computer -Force", "}", "catch [Exception]{", " Write-Host $_.Exception.ToString()", " Write-Host 'Command execution failed.'", " $host.SetShouldExit(1)", "}" ] } }] }) # Create SSM Associate to trigger SSM doucment to let Windows EC2JoinDomain (EC2) join Domain ssmjoinad = _ssm.CfnAssociation(self, "WindowJoinAD", name=ssmdocument.name, targets=[{ "key": "InstanceIds", "values": [adadminEC2.instance_id] }]) ssmjoinad.add_depends_on(ssmdocument) # Create a Policy for Lambda Role lambdapolicy = _iam.PolicyDocument(statements=[ _iam.PolicyStatement(actions=["logs:CreateLogGroup"], resources=[ "arn:aws:logs:{}:{}:*".format( self.region, self.account) ]), _iam.PolicyStatement( actions=["logs:CreateLogStream", "logs:PutLogEvents"], resources=[ "arn:aws:logs:{}:{}:log-group:/aws/lambda/*".format( self.region, self.account) ]), _iam.PolicyStatement(actions=[ "workspaces:RegisterWorkspaceDirectory", "workspaces:DeregisterWorkspaceDirectory", "ds:DescribeDirectories", "ds:AuthorizeApplication", "ds:UnauthorizeApplication", "iam:GetRole", "ec2:DescribeInternetGateways", "ec2:DescribeVpcs", "ec2:DescribeRouteTables", "ec2:DescribeSubnets", "ec2:DescribeNetworkInterfaces", "ec2:DescribeAvailabilityZones", "ec2:CreateSecurityGroup", "ec2:CreateTags" ], resources=["*"]) ]) # Creare a IAM Role for Lambda lambdarole = _iam.Role( self, "LambdaRoleForRegisterDS", assumed_by=_iam.ServicePrincipal('lambda.amazonaws.com'), inline_policies={"LambdaActicateDS": lambdapolicy}, role_name="LambdaActivateDirectoryService") # Create a Lambda function to Register Directory Service on WorkSpaces dslambda = _lambda.Function(self, "LambdaStackForDSFunction", runtime=_lambda.Runtime.PYTHON_3_7, handler="workspaceds.handler", role=lambdarole, code=_lambda.Code.asset('lambda'), environment={"DIRECTORY_ID": ad.ref}, timeout=core.Duration.seconds(120)) # Create a customResource to trigger Lambda function after Lambda function is created _cf.CfnCustomResource(self, "InvokeLambdaFunction", service_token=dslambda.function_arn)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Contextual variables external_tag_key = self.node.try_get_context("external_tag_key") external_tag_value = self.node.try_get_context( "external_tag_value") internal_tag_key = self.node.try_get_context("internal_tag_key") internal_tag_value = self.node.try_get_context( "internal_tag_value") enforce_compliance = self.node.try_get_context("enforce_compliance") # Config Rule Lambda function lambda_function = lambda_python.PythonFunction( self, "ConfigRuleLambdaFunction", entry="./lambda", handler="lambda_handler", index="index.py", description="Config Rule to create tags on untagged ALB resources", timeout=core.Duration.seconds(10), tracing=lambda_.Tracing.ACTIVE, environment=dict( EXTERNAL_TAG_KEY=external_tag_key, EXTERNAL_TAG_VALUE=external_tag_value, INTERNAL_TAG_KEY=internal_tag_key, INTERNAL_TAG_VALUE=internal_tag_value ) ) # Allow Lambda function to describe ELBs and read their tags lambda_function.add_to_role_policy( statement=iam.PolicyStatement( actions=[ "elasticloadbalancing:DescribeLoadBalancers", "elasticloadbalancing:DescribeTags" ], effect=iam.Effect.ALLOW, resources=["*"], sid="AllowELBTagRead" ) ) # The Config Rule config_rule = config.CustomRule( self, "ConfigRule", lambda_function=lambda_function, configuration_changes=True, config_rule_name="alb-tag-enforcement", description="Checks if ALBs have the appropriate tag associated to them based on their ALB scheme" ) # Scope the rule to only look at ELBv2 config_rule.scope_to_resource( type="AWS::ElasticLoadBalancingV2::LoadBalancer") # For readability, define the SSM remediation document externally # and read it in here with open("./ssm/remediation_document.json", "r") as f: ssm_document = json.load(f) remediation_document = ssm.CfnDocument( self, "SSMRemediationDocument", document_type="Automation", content=ssm_document ) # Give SSM permission to add the tag when remediation is needed remediation_role = iam.Role( self, "RemediationRole", assumed_by=iam.ServicePrincipal( service="ssm.amazonaws.com" ), description="Allow SSM to update tags on ALBs via a Config Rule remediation", inline_policies=dict( alb_read_write=iam.PolicyDocument(statements=[ iam.PolicyStatement( actions=[ "elasticloadbalancing:DescribeLoadBalancers", "elasticloadbalancing:DescribeTags", "elasticloadbalancing:AddTags" ], effect=iam.Effect.ALLOW, resources=["*"], sid="AllowELBTagReadWrite" ) ]) ) ) # If enforce compliance is true, automatically remediate non-compliant # resources. Otherwise, just report compliant and non-compliant # resources, but still allow for manual remediation. if enforce_compliance.lower() == "true": remediation_action = config.CfnRemediationConfiguration( self, "ConfigRemediationAction", automatic=True, config_rule_name=config_rule.config_rule_name, parameters=dict( ALBArn=dict(ResourceValue=dict(Value="RESOURCE_ID")), AutomationAssumeRole=dict(StaticValue=dict( Values=[remediation_role.role_arn])), ExternalTagKey=dict(StaticValue=dict( Values=[external_tag_key])), ExternalTagValue=dict(StaticValue=dict( Values=[external_tag_value])), InternalTagKey=dict(StaticValue=dict( Values=[internal_tag_key])), InternalTagValue=dict(StaticValue=dict( Values=[internal_tag_value])) ), target_id=remediation_document.ref, target_type="SSM_DOCUMENT", maximum_automatic_attempts=3, retry_attempt_seconds=15 ) else: remediation_action = config.CfnRemediationConfiguration( self, "ConfigRemediationAction", automatic=False, config_rule_name=config_rule.config_rule_name, parameters=dict( ALBArn=dict(ResourceValue=dict(Value="RESOURCE_ID")), AutomationAssumeRole=dict(StaticValue=dict( Values=[remediation_role.role_arn])), ExternalTagKey=dict(StaticValue=dict( Values=[external_tag_key])), ExternalTagValue=dict(StaticValue=dict( Values=[external_tag_value])), InternalTagKey=dict(StaticValue=dict( Values=[internal_tag_key])), InternalTagValue=dict(StaticValue=dict( Values=[internal_tag_value])) ), target_id=remediation_document.ref, target_type="SSM_DOCUMENT" )
def __init__(self, scope: core.Construct, id: str, ecr_repo: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Fetch environment variables. s3_bucket_name = EnvUtil.get("S3_FOR_WIN_DOCKER_IMG_BUILD", "windows-docker-images") win_ec2_tag_key = EnvUtil.get("WIN_EC2_TAG_KEY", "aws-lc") win_ec2_tag_value = EnvUtil.get("WIN_EC2_TAG_VALUE", "aws-lc-windows") ssm_document_name = EnvUtil.get("WIN_DOCKER_BUILD_SSM_DOCUMENT", "aws-lc-windows-docker-ssm-doc") # Define a S3 bucket to store windows docker files and build scripts. s3.Bucket(scope=self, id="{}-s3".format(id), bucket_name=s3_bucket_name, block_public_access=s3.BlockPublicAccess.BLOCK_ALL) # Define SSM command document. aws_account_id = kwargs["env"]["account"] aws_region = kwargs["env"]["region"] ecr_repo = "{}.dkr.ecr.{}.amazonaws.com/{}".format(aws_account_id, aws_region, ecr_repo) with open('./cdk/windows_docker_build_ssm_document.yaml') as file: file_text = file.read().replace("ECR_PLACEHOLDER", ecr_repo) \ .replace("S3_BUCKET_PLACEHOLDER", s3_bucket_name) \ .replace("REGION_PLACEHOLDER", aws_region) content = yaml.load(file_text, Loader=yaml.FullLoader) ssm.CfnDocument(scope=self, id="{}-ssm-document".format(id), name=ssm_document_name, content=content, document_type="Command") # Define a role for EC2. s3_read_write_policy = iam.PolicyDocument.from_json( { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": [ "s3:Put*", "s3:Get*" ], "Resource": [ "arn:aws:s3:::{}/*".format(s3_bucket_name) ] } ] } ) env = kwargs['env'] ecr_power_user_policy = iam.PolicyDocument.from_json( { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": [ "ecr:GetAuthorizationToken" ], "Resource": "*" }, { "Effect": "Allow", "Action": [ "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:GetRepositoryPolicy", "ecr:DescribeRepositories", "ecr:ListImages", "ecr:DescribeImages", "ecr:BatchGetImage", "ecr:GetLifecyclePolicy", "ecr:GetLifecyclePolicyPreview", "ecr:ListTagsForResource", "ecr:DescribeImageScanFindings", "ecr:InitiateLayerUpload", "ecr:UploadLayerPart", "ecr:CompleteLayerUpload", "ecr:PutImage" ], "Resource": "arn:aws:ecr:{}:{}:repository/{}".format(env['region'], env['account'], ecr_repo) } ] } ) inline_policies = {"s3_read_write_policy": s3_read_write_policy, "ecr_power_user_policy": ecr_power_user_policy} role = iam.Role(scope=self, id="{}-role".format(id), assumed_by=iam.ServicePrincipal("ec2.amazonaws.com"), inline_policies=inline_policies, managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name("AmazonSSMManagedInstanceCore") ]) # Define Windows EC2 instance, where the SSM document will be executed. machine_image = ec2.MachineImage.latest_windows(ec2.WindowsVersion.WINDOWS_SERVER_2016_ENGLISH_FULL_CONTAINERS) vpc = ec2.Vpc(scope=self, id="{}-vpc".format(id)) block_device_volume = ec2.BlockDeviceVolume.ebs(volume_size=200, delete_on_termination=True) block_device = ec2.BlockDevice(device_name="/dev/sda1", volume=block_device_volume) instance = ec2.Instance(scope=self, id="{}-instance".format(id), instance_type=ec2.InstanceType(instance_type_identifier="m5d.xlarge"), vpc=vpc, role=role, block_devices=[block_device], vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PUBLIC), machine_image=machine_image) core.Tag.add(instance, win_ec2_tag_key, win_ec2_tag_value)
def __init__(self, scope: core.Construct, construct_id: str, run_document_name: str, ec2_inst_id: str, stack_log_level: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # The code that defines your stack goes here # Read BootStrap Script): try: with open( "elastic_fluent_bit_kibana/stacks/back_end/bootstrap_scripts/configure_fluent_bit.sh", encoding="utf-8", mode="r") as f: bash_commands_to_run = f.read() except OSError as e: print("Unable to read bash commands file") raise e # SSM Run Command Document should be JSON Syntax # Ref: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html#cfn-ssm-document-content # Ref: https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_ssm/CfnDocument.html _run_cmds = { "schemaVersion": "2.2", "description": "Run script on Linux instances.", "parameters": { "commands": { "type": "String", "description": "The commands to run or the path to an existing script on the instance.", "default": f"{bash_commands_to_run}" } }, "mainSteps": [{ "action": "aws:runShellScript", "name": "runCommands", "inputs": { "timeoutSeconds": "60", "runCommand": ["{{ commands }}"] } }] } # Create Linux Shell Script Document ssm_linux_document = _ssm.CfnDocument( self, "ssmLinuxDocument", document_type="Command", # name=f"{run_document_name}", content=_run_cmds) # Create SSM Association to trigger SSM doucment to target (EC2) _run_commands_on_ec2 = _ssm.CfnAssociation( self, "runCommandsOnEc2", name=ssm_linux_document.name, targets=[{ "key": "InstanceIds", "values": [ec2_inst_id] }]) # As we are dealing with cloudformaiton resources, let us add a hard dependency _run_commands_on_ec2.add_depends_on(ssm_linux_document) ########################################### ################# OUTPUTS ################# ########################################### output_0 = core.CfnOutput( self, "AutomationFrom", value=f"{GlobalArgs.SOURCE_INFO}", description= "To know more about this automation stack, check out our github page." )