def buildCfWithDockerAction(buildRef, inputName: str) -> Action: actionId = ActionTypeID(Category="Build", Owner="AWS", Version="1", Provider="CodeBuild") return Actions(Name=Sub("${AWS::StackName}-TestCfBuilderAction"), ActionTypeId=actionId, InputArtifacts=[InputArtifacts(Name=inputName)], RunOrder="2", Configuration={"ProjectName": Ref(buildRef)})
def output_as_input(self, inputs): """ Takes a list of ouputs OutputArtifacts and transforms it into a list of inputs InputArtifacts """ outputs = [] self.check_output_artifacts(inputs) for input_object in inputs: outputs.append(InputArtifacts(Name=input_object.Name)) return outputs
def getDeploy( t: Template, inName: str, stage: str, interimArt: str #artifact containing func code incl. libs , sourceartifact: str = None, getTest: Callable[[Template, str, str], Action] = None) -> Stages: [actionId, role] = getDeployResources(t) params = { "S3Key": { "Fn::GetArtifactAtt": [interimArt, "ObjectKey"] }, "S3Storage": { "Fn::GetArtifactAtt": [interimArt, "BucketName"] } } params = json.dumps(params) params = params.replace('"', '\"').replace('\n', '\\n') config = { "ActionMode": "CREATE_UPDATE", "RoleArn": GetAtt(role, "Arn"), "StackName": Sub("".join(["${AWS::StackName}Functions", stage])), "Capabilities": "CAPABILITY_NAMED_IAM", "TemplatePath": inName + "::stack" + stage + ".json", "ParameterOverrides": params } arts = map(lambda x: InputArtifacts(Name=x), [inName, interimArt]) actions = [ Actions(Name="Deploy" + stage, ActionTypeId=actionId, InputArtifacts=list(arts), RunOrder="1", Configuration=config) ] if sourceartifact is not None and getTest is not None: actions.append(getTest(t, sourceartifact, stage)) return Stages(stage + "Deploy", Name=stage, Actions=actions)
def getDockerBuildAction(buildRef, inputs: List[str], outputs: List[str], number=1) -> Actions: ''' Takes a build reference which points to the build configuration, input/output map with the names of the artifacts and (optimal) a number, if multiple build actions must be added to the same pipeline ''' number = str(number) inputArts = map(lambda x: InputArtifacts(Name=x), inputs) outputArts = map(lambda x: OutputArtifacts(Name=x), outputs) actionId = ActionTypeId(Category="Build", Owner="AWS", Version="1", Provider="CodeBuild") return Actions(Name=Sub("${AWS::StackName}-BuildAction" + number), ActionTypeId=actionId, InputArtifacts=list(inputArts), OutputArtifacts=list(outputArts), RunOrder=number, Configuration={"ProjectName": Ref(buildRef)})
"ChangeSetName": "Deploy", "ActionMode": "CREATE_UPDATE", "StackName": "helloworld-ecs-staging-service", "Capabilities": "CAPABILITY_NAMED_IAM", "TemplatePath": "App::templates/nodeserver-cf.template", "TemplateConfiguration": "App::templates/nodeserver-cf-configuration.json", "RoleArn": GetAtt("CloudFormationHelloworldRole", "Arn") }, InputArtifacts=[InputArtifacts(Name="App", )], ) ]), Stages(Name="Approval", Actions=[ Actions( Name="Approval", ActionTypeId=ActionTypeId(Category="Approval", Owner="AWS", Version="1", Provider="Manual"), Configuration={}, InputArtifacts=[], ) ]) ],
ActionTypeId=source_action_id, ### Configuration={ "PollForSourceChanges": "false", "BranchName": "master", "RepositoryName": Ref('reponameparameter') }, InputArtifacts=[], Name='Source', RunOrder=1, OutputArtifacts=[OutputArtifacts(Name='source_action_output')]) build_action = Actions( ActionTypeId=build_action_id, Configuration={"ProjectName": Ref('build')}, InputArtifacts=[InputArtifacts(Name='source_action_output')], Name='Build', RunOrder=1, OutputArtifacts=[OutputArtifacts(Name='build_action_output')]) invoke_action = Actions( ActionTypeId=invoke_action_id, Configuration={ "FunctionName": Join("", [Ref("projectnameparameter"), 'sageDispatch']) }, InputArtifacts=[InputArtifacts(Name='source_action_output')], Name='Train', RunOrder=1, OutputArtifacts=[]) source_stage = Stages(Actions=[source_action], Name='Source')
Actions=[ Actions( Name='Build', ActionTypeId=ActionTypeId( Category='Build', Owner='AWS', Provider='CodeBuild', Version='1' ), Configuration={ "ProjectName": PROJECT_NAME }, Region=Region, InputArtifacts=[ InputArtifacts( Name="SourceArtifact" ) ], OutputArtifacts=[ OutputArtifacts( Name="BuildArtifact" ) ] ) ] ) ] )) # Creat Event Rule role EventRole = t.add_resource(Role(
Actions=[ Actions( Name="Test", Region="us-east-2", ActionTypeId=ActionTypeId( Category="Test", Owner="Custom", Version="1", Provider="Jenkins-Portfolio" ), Configuration={ "ProjectName":"Portfolio" }, InputArtifacts=[ InputArtifacts( Name="Portfolio", ), ] ) ] ), Stages( Name="Staging", Actions=[ Actions( Name="Deploy", ActionTypeId=ActionTypeId( Category="Deploy", Owner="AWS", Version="1", Provider="CodeDeploy"
def create_action(self, name, runorder, configuration, type, role=""): project_name = ''.join(e for e in name if e.isalnum()) config = configuration.copy() ListInputArtifacts = [] action = None if type == 'CodeBuild': provider = 'CodeBuild' category = 'Build' typeId = ActionTypeId(Category=category, Owner="AWS", Version="1", Provider=provider) inputartifact = config.pop('InputArtifacts') if isinstance(inputartifact, list): for i_artifact in inputartifact: ListInputArtifacts.append(InputArtifacts(Name=i_artifact)) else: ListInputArtifacts.append(InputArtifacts(Name=inputartifact)) action = Actions(project_name, Name=name, ActionTypeId=typeId, InputArtifacts=ListInputArtifacts, OutputArtifacts=[OutputArtifacts(Name=name)], Configuration=config, RunOrder=runorder) elif type == 'Source': provider = 'CodeCommit' category = 'Source' if 'OutputArtifacts' in configuration: outputartifacts = config.pop('OutputArtifacts') else: outputartifacts = project_name typeId = ActionTypeId(Category=category, Owner="AWS", Version="1", Provider=provider) if role: action = Actions( project_name, Name=name, ActionTypeId=typeId, OutputArtifacts=[OutputArtifacts(Name=outputartifacts)], Configuration=config, RoleArn=role, RunOrder=runorder) else: action = Actions( project_name, Name=name, ActionTypeId=typeId, OutputArtifacts=[OutputArtifacts(Name=outputartifacts)], Configuration=config, RunOrder=runorder) elif type == 'Approval': typeId = ActionTypeId(Category='Approval', Owner="AWS", Version="1", Provider='Manual') action = Actions(project_name, Name=name, ActionTypeId=typeId, Configuration=config, RunOrder=runorder) elif type == 'InvokeLambda': inputartifact = config.pop('InputArtifacts') rolearn = config.pop('RoleArn') typeId = ActionTypeId( Category='Invoke', Owner='AWS', Provider='Lambda', Version='1', ) action = Actions(project_name, Name=name, ActionTypeId=typeId, Configuration=config, InputArtifacts=inputartifact, RoleArn=rolearn, RunOrder=runorder) return action
Provider="CloudFormation"), Configuration={ "ActionMode": "CREATE_UPDATE", "StackName": "helloworld-staging-service", "Capabilities": "CAPABILITY_NAMED_IAM", "TemplatePath": "helloworldApp::templates/ansiblebase-cf.template", "TemplateConfiguration": "helloworldApp::templates/ansiblebase-cf-configuration.json", "RoleArn": GetAtt("CloudFormationHelloworldRole", "Arn") }, InputArtifacts=[InputArtifacts(Name="helloworldApp")]) ]), Stages(Name="Approval", Actions=[ Actions(Name="Approval", ActionTypeId=ActionTypeId(Category="Approval", Owner="AWS", Version="1", Provider="Manual"), Configuration={}, InputArtifacts=[]) ]) ])) print(t.to_json())
def generate_template(d): # Set template metadata t = Template() t.add_version("2010-09-09") t.set_description(d["cf_template_description"]) aws_account_id = Ref("AWS::AccountId") aws_region = Ref("AWS::Region") # Task definition task_definition = t.add_resource( TaskDefinition( "TaskDefinition", Family=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), RequiresCompatibilities=["FARGATE"], Cpu=d["container_cpu"], Memory=d["container_memory"], NetworkMode="awsvpc", ExecutionRoleArn=ImportValue(d["ecs_stack_name"] + "-ECSClusterRole"), ContainerDefinitions=[ ContainerDefinition( Name=Join("", [ d["env"], "-", d["project_name"], "-", d["service_name"] ]), Image=Join( "", [ aws_account_id, ".dkr.ecr.", aws_region, ".amazonaws.com/", d["env"], d["project_name"], d["service_name"], ":latest" ], ), Essential=True, PortMappings=[ PortMapping( ContainerPort=d["container_port"], HostPort=d["container_port"], ) ], EntryPoint=["sh", "-c"], Command=[d["container_command"]], LogConfiguration=LogConfiguration( LogDriver="awslogs", Options={ "awslogs-region": aws_region, "awslogs-group": Join("", [ d["env"], "-", d["project_name"], "-", d["service_name"] ]), "awslogs-stream-prefix": "ecs", "awslogs-create-group": "true" })) ], Tags=Tags(d["tags"], {"Name": d["project_name"] + "-task-definition"}), )) # ECR ecr = t.add_resource( Repository( "ECR", DependsOn="ListenerRule", RepositoryName=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecr"}), )) # Target group target_group = t.add_resource( elb.TargetGroup( "TargetGroup", Name=Join("", [d["env"], "-", d["service_name"]]), HealthCheckIntervalSeconds="30", HealthCheckProtocol="HTTP", HealthCheckPort=d["container_port"], HealthCheckTimeoutSeconds="10", HealthyThresholdCount="4", HealthCheckPath=d["tg_health_check_path"], Matcher=elb.Matcher(HttpCode="200-299"), Port=d["container_port"], Protocol="HTTP", TargetType="ip", UnhealthyThresholdCount="3", VpcId=ImportValue(d["network_stack_name"] + "-VPCId"), Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecr"}), )) # Listener rule t.add_resource( elb.ListenerRule( "ListenerRule", DependsOn="TargetGroup", ListenerArn=ImportValue(d["ecs_stack_name"] + "-ListenerArnHTTP"), Conditions=[ elb.Condition(Field="path-pattern", Values=[d["application_path_api"]]) ], Actions=[ elb.Action(Type="forward", TargetGroupArn=Ref(target_group)) ], Priority="1", )) # ECS service ecs_service = t.add_resource( Service( "ECSService", ServiceName=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), DependsOn="pipeline", DesiredCount=d["container_desired_tasks_count"], TaskDefinition=Ref(task_definition), LaunchType="FARGATE", NetworkConfiguration=NetworkConfiguration( AwsvpcConfiguration=AwsvpcConfiguration( Subnets=[ ImportValue(d["network_stack_name"] + "-PrivateSubnetId1"), ImportValue(d["network_stack_name"] + "-PrivateSubnetId2"), ], SecurityGroups=[ ImportValue(d["ecs_stack_name"] + "-ECSClusterSG") ], )), LoadBalancers=([ LoadBalancer( ContainerName=Join( "", [ d["env"], "-", d["project_name"], "-", d["service_name"] ], ), ContainerPort=d["container_port"], TargetGroupArn=Ref(target_group), ) ]), Cluster=ImportValue(d["ecs_stack_name"] + "-ECSClusterName"), Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecs-service"}), )) # App Autoscaling target # App Autoscaling policy # Codebuild project codebuild = t.add_resource( Project( "codebuild", Name=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), DependsOn="ECR", ServiceRole=ImportValue(d["ecs_stack_name"] + "-CodebuildDeveloperRole"), Artifacts=Artifacts( Name="Build", Location=d["artifact_store"], Type="S3", ), Description="Build a docker image and send it to ecr", Source=Source( BuildSpec="buildspec.yml", Type="S3", Location=d["artifact_store"] + "/" + d["artifact_name"], ), Environment=Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", PrivilegedMode=True, Type="LINUX_CONTAINER", EnvironmentVariables=[ EnvironmentVariable( Name="AWS_DEFAULT_REGION", Type="PLAINTEXT", Value=aws_region, ), EnvironmentVariable( Name="SERVICE_NAME", Type="PLAINTEXT", Value=Join( "", [ d["env"], "-", d["project_name"], "-", d["service_name"] ], ), ), EnvironmentVariable( Name="IMAGE_URI", Type="PLAINTEXT", Value=Join( "", [ aws_account_id, ".dkr.ecr.", aws_region, ".amazonaws.com/", d["env"], "-", d["project_name"], "-", d["service_name"], ], ), ), ], ), Tags=Tags(d["tags"], {"Name": d["project_name"] + "-codebuild"}), )) # Codepipeline pipeline = t.add_resource( Pipeline( "pipeline", Name=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), RoleArn=ImportValue(d["ecs_stack_name"] + "-CodePipelineRole"), Stages=[ Stages( Name="Source", Actions=[ Actions( Name="Source", ActionTypeId=ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[ OutputArtifacts(Name="source_artifact") ], Configuration={ "S3Bucket": d["artifact_store"], "S3ObjectKey": d["artifact_name"], }, RunOrder="1", ) ], ), Stages( Name="Build", Actions=[ Actions( Name="Build", InputArtifacts=[ InputArtifacts(Name="source_artifact") ], OutputArtifacts=[ OutputArtifacts(Name="build_artifact") ], ActionTypeId=ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), Configuration={"ProjectName": Ref(codebuild)}, RunOrder="1", ) ], ), Stages( Name="Deploy", Actions=[ Actions( Name="Deploy", InputArtifacts=[ InputArtifacts(Name="build_artifact") ], ActionTypeId=ActionTypeId( Category="Deploy", Owner="AWS", Version="1", Provider="ECS", ), Configuration={ "ClusterName": ImportValue(d["ecs_stack_name"] + "-ECSClusterName"), "ServiceName": Join( "", [ d["env"], "-", d["project_name"], "-", d["service_name"], ], ), "FileName": "definitions.json", }, ) ], ), ], ArtifactStore=ArtifactStore(Type="S3", Location=d["artifact_store"]), )) # Route53 # Outputs return t
Name="CFNBuild", Actions=[ Actions( Name="Container", ActionTypeId=ActionTypeID(Category="Build", Owner="AWS", Version="1", Provider="CodeBuild"), Configuration={ "ProjectName": Join("-", [ Select(0, Split("-", Ref("AWS::StackName"))), "cfn", "codebuild" ]), }, InputArtifacts=[InputArtifacts(Name="App")], OutputArtifacts=[ OutputArtifacts(Name="CFNBuildOutput") ], ) ]), Stages( Name="DockerBuild", Actions=[ Actions( Name="Container", ActionTypeId=ActionTypeID(Category="Build", Owner="AWS", Version="1", Provider="CodeBuild"), Configuration={
def __create_pipeline(template: Template, build_project: Project): github_owner = template.add_parameter( parameter=Parameter(title='GitHubOwner', Type='String')) github_repo = template.add_parameter( parameter=Parameter(title='GitHubRepo', Type='String')) github_branch = template.add_parameter(parameter=Parameter( title='GitHubBranch', Type='String', Default='master')) github_token = template.add_parameter( parameter=Parameter(title='GitHubToken', Type='String')) source_artifact = 'MyApp' build_artifact = 'MyBuild' stack_name = 'pipeline-sample-function' change_set_name = 'pipeline-sample-function-change-set' bucket = ImportValue(CommonResource.ExportName.BUCKET_NAME.value) template.add_resource(resource=Pipeline( title='SamplePipeline', ArtifactStore=ArtifactStore( Type='S3', Location=bucket, ), RoleArn=ImportValue( CommonResource.ExportName.CODE_PIPELINE_SERVICE_ROLE_ARN.value), Stages=[ Stages(Name="Source", Actions=[ Actions(Name="App", ActionTypeId=ActionTypeId(Category="Source", Owner="ThirdParty", Provider="GitHub", Version="1"), Configuration={ "Owner": Ref(github_owner), "Repo": Ref(github_repo), "Branch": Ref(github_branch), "OAuthToken": Ref(github_token), }, OutputArtifacts=[ OutputArtifacts(Name=source_artifact) ]) ]), Stages( Name="Build", Actions=[ Actions( Name="Build", ActionTypeId=ActionTypeId(Category="Build", Owner="AWS", Provider="CodeBuild", Version="1"), Configuration={'ProjectName': Ref(build_project)}, InputArtifacts=[InputArtifacts(Name=source_artifact)], OutputArtifacts=[OutputArtifacts(Name=build_artifact)], RunOrder=1) ]), Stages( Name='Deploy', Actions=[ Actions( Name="ChangeSetReplace", ActionTypeId=ActionTypeId(Category="Deploy", Owner="AWS", Provider="CloudFormation", Version="1"), Configuration={ "ActionMode": "CHANGE_SET_REPLACE", "Capabilities": "CAPABILITY_NAMED_IAM,CAPABILITY_AUTO_EXPAND", "ChangeSetName": change_set_name, "StackName": stack_name, "RoleArn": ImportValue( CommonResource.ExportName. CLOUD_FORMATION_SERVICE_ROLE_ARN.value), "TemplatePath": build_artifact + '::function.yml' }, InputArtifacts=[InputArtifacts(Name=build_artifact)], RunOrder=1), Actions(Name="ChangeSetExecute", ActionTypeId=ActionTypeId( Category="Deploy", Owner="AWS", Provider="CloudFormation", Version="1"), Configuration={ "ActionMode": "CHANGE_SET_EXECUTE", "ChangeSetName": change_set_name, "StackName": stack_name, }, RunOrder=2) ]) ], ))
def create_pipeline_template(config) -> Template: t = Template() github_token = t.add_parameter(Parameter("GithubToken", Type="String")) github_owner = t.add_parameter( Parameter("GitHubOwner", Type='String', Default='aiengines', AllowedPattern="[A-Za-z0-9-_]+")) github_repo = t.add_parameter( Parameter("GitHubRepo", Type='String', Default='codebuild_pipeline_skeleton', AllowedPattern="[A-Za-z0-9-_]+")) github_branch = t.add_parameter( Parameter("GitHubBranch", Type='String', Default='master', AllowedPattern="[A-Za-z0-9-_]+")) artifact_store_s3_bucket = t.add_resource(Bucket("S3Bucket", )) cloudformationrole = t.add_resource( Role("CloudformationRole", AssumeRolePolicyDocument=PolicyDocument( Version="2012-10-17", Statement=[ Statement( Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", ["cloudformation.amazonaws.com"])) ]), ManagedPolicyArns=['arn:aws:iam::aws:policy/AdministratorAccess' ])) codepipelinerole = t.add_resource( Role("CodePipelineRole", AssumeRolePolicyDocument=PolicyDocument(Statement=[ Statement(Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", ["codepipeline.amazonaws.com"])) ]), ManagedPolicyArns=['arn:aws:iam::aws:policy/AdministratorAccess' ])) codebuild_project = t.add_resource(create_codebuild_project(t)) pipeline = t.add_resource( Pipeline( "CDPipeline", ArtifactStore=ArtifactStore( Type="S3", Location=Ref(artifact_store_s3_bucket)), # DisableInboundStageTransitions = [ # DisableInboundStageTransitions( # StageName = "Release", # Reason = "Disabling the transition until " # "integration tests are completed" # ) # ], RestartExecutionOnUpdate=True, RoleArn=codepipelinerole.GetAtt('Arn'), Stages=[ Stages(Name="Source", Actions=[ Actions(Name="SourceAction", ActionTypeId=ActionTypeId( Category="Source", Owner="ThirdParty", Provider="GitHub", Version="1", ), OutputArtifacts=[ OutputArtifacts(Name="GitHubSourceCode") ], Configuration={ 'Owner': Ref(github_owner), 'Repo': Ref(github_repo), 'Branch': Ref(github_branch), 'PollForSourceChanges': False, 'OAuthToken': Ref(github_token) }, RunOrder="1") ]), Stages(Name="Build", Actions=[ Actions(Name="BuildAction", ActionTypeId=ActionTypeId( Category="Build", Owner="AWS", Provider="CodeBuild", Version="1"), InputArtifacts=[ InputArtifacts(Name="GitHubSourceCode") ], OutputArtifacts=[ OutputArtifacts(Name="BuildArtifacts") ], Configuration={ 'ProjectName': Ref(codebuild_project), }, RunOrder="1") ]), ], )) t.add_resource( Webhook("GitHubWebHook", Authentication='GITHUB_HMAC', AuthenticationConfiguration=WebhookAuthConfiguration( SecretToken=Ref(github_token)), Filters=[ WebhookFilterRule(JsonPath='$.ref', MatchEquals='refs/heads/{Branch}') ], TargetPipeline=Ref(pipeline), TargetAction='Source', TargetPipelineVersion=pipeline.GetAtt('Version'))) return t
def build_template(sierrafile): template = Template() template.add_version('2010-09-09') template.add_metadata(build_interface(sierrafile.extra_params)) parameters = AttrDict( # Network Parameters vpc_cidr=template.add_parameter(Parameter( 'VpcCidr', Type='String', Default='192.172.0.0/16', )), subnet1_cidr=template.add_parameter(Parameter( 'Subnet1Cidr', Type='String', Default='192.172.1.0/24', )), subnet2_cidr=template.add_parameter(Parameter( 'Subnet2Cidr', Type='String', Default='192.172.2.0/24', )), # ECS Parameters cluster_size=template.add_parameter(Parameter( 'ClusterSize', Type='Number', Default=2, )), instance_type=template.add_parameter(Parameter( 'InstanceType', Type='String', Default='t2.medium' )), key_name=template.add_parameter(Parameter( 'KeyName', Type='AWS::EC2::KeyPair::KeyName', )), image_id=template.add_parameter(Parameter( 'ImageId', Type='AWS::SSM::Parameter::Value<AWS::EC2::Image::Id>', Default=( '/aws/service/ecs/optimized-ami' '/amazon-linux/recommended/image_id' ), Description=( 'An SSM parameter that resolves to a valid AMI ID.' ' This is the AMI that will be used to create ECS hosts.' ' The default is the current recommended ECS-optimized AMI.' ) )), # Other Parameters github_token=template.add_parameter(Parameter( 'GitHubToken', Type='String', NoEcho=True, )), ) # Environment Variable Parameters for env_var_param, env_var_name in sierrafile.extra_params: template.add_parameter(Parameter( env_var_param, Type='String', NoEcho=True, )) # Resource Declarations # # Network network_vpc = template.add_resource(VPC( 'NetworkVpc', CidrBlock=Ref(parameters.vpc_cidr), Tags=Tags(Name=Ref('AWS::StackName')), )) network_ig = template.add_resource(InternetGateway( 'NetworkInternetGateway', Tags=Tags(Name=Ref('AWS::StackName')), )) vpc_attach = template.add_resource(VPCGatewayAttachment( 'NetworkInternetGatewayAttachment', InternetGatewayId=Ref(network_ig), VpcId=Ref(network_vpc), )) route_table = template.add_resource(RouteTable( 'NetworkRouteTable', VpcId=Ref(network_vpc), Tags=Tags(Name=Ref('AWS::StackName')), )) template.add_resource(Route( 'NetworkDefaultRoute', DependsOn=[vpc_attach.title], RouteTableId=Ref(route_table), DestinationCidrBlock='0.0.0.0/0', GatewayId=Ref(network_ig), )) subnet1 = template.add_resource(Subnet( 'NetworkSubnet1', VpcId=Ref(network_vpc), AvailabilityZone=Select(0, GetAZs()), MapPublicIpOnLaunch=True, CidrBlock=Ref(parameters.subnet1_cidr), Tags=Tags(Name=Sub('${AWS::StackName} (Public)')), )) subnet2 = template.add_resource(Subnet( 'NetworkSubnet2', VpcId=Ref(network_vpc), AvailabilityZone=Select(1, GetAZs()), MapPublicIpOnLaunch=True, CidrBlock=Ref(parameters.subnet2_cidr), Tags=Tags(Name=Sub('${AWS::StackName} (Public)')), )) template.add_resource(SubnetRouteTableAssociation( 'NetworkSubnet1RouteTableAssociation', RouteTableId=Ref(route_table), SubnetId=Ref(subnet1), )) template.add_resource(SubnetRouteTableAssociation( 'NetworkSubnet2RouteTableAssociation', RouteTableId=Ref(route_table), SubnetId=Ref(subnet2), )) elb = template.add_resource(LoadBalancer( ELB_NAME, Name=Sub('${AWS::StackName}-elb'), Type='network', Subnets=[Ref(subnet1), Ref(subnet2)], )) # # Cluster ecs_host_role = template.add_resource(Role( 'EcsHostRole', AssumeRolePolicyDocument=PolicyDocument( Statement=[Statement( Effect=Allow, Principal=Principal('Service', 'ec2.amazonaws.com'), Action=[awacs.sts.AssumeRole] )], ), ManagedPolicyArns=[ 'arn:aws:iam::aws:policy/' 'service-role/AmazonEC2ContainerServiceforEC2Role' ] )) ecs_host_profile = template.add_resource(InstanceProfile( 'EcsHostInstanceProfile', Roles=[Ref(ecs_host_role)] )) ecs_host_sg = template.add_resource(SecurityGroup( 'EcsHostSecurityGroup', GroupDescription=Sub('${AWS::StackName}-hosts'), VpcId=Ref(network_vpc), SecurityGroupIngress=[SecurityGroupRule( CidrIp='0.0.0.0/0', IpProtocol='-1' )] )) cluster = template.add_resource(Cluster( 'EcsCluster', ClusterName=Ref('AWS::StackName') )) autoscaling_name = 'EcsHostAutoScalingGroup' launch_conf_name = 'EcsHostLaunchConfiguration' launch_conf = template.add_resource(LaunchConfiguration( launch_conf_name, ImageId=Ref(parameters.image_id), InstanceType=Ref(parameters.instance_type), IamInstanceProfile=Ref(ecs_host_profile), KeyName=Ref(parameters.key_name), SecurityGroups=[Ref(ecs_host_sg)], UserData=Base64(Sub( '#!/bin/bash\n' 'yum install -y aws-cfn-bootstrap\n' '/opt/aws/bin/cfn-init -v' ' --region ${AWS::Region}' ' --stack ${AWS::StackName}' f' --resource {launch_conf_name}\n' '/opt/aws/bin/cfn-signal -e $?' ' --region ${AWS::Region}' ' --stack ${AWS::StackName}' f' --resource {autoscaling_name}\n' )), Metadata={ 'AWS::CloudFormation::Init': { 'config': { 'commands': { '01_add_instance_to_cluster': { 'command': Sub( f'echo ECS_CLUSTER=${{{cluster.title}}}' f' > /etc/ecs/ecs.config' ), } }, 'files': { '/etc/cfn/cfn-hup.conf': { 'mode': 0o400, 'owner': 'root', 'group': 'root', 'content': Sub( '[main]\n' 'stack=${AWS::StackId}\n' 'region=${AWS::Region}\n' ), }, '/etc/cfn/hooks.d/cfn-auto-reloader.conf': { 'content': Sub( '[cfn-auto-reloader-hook]\n' 'triggers=post.update\n' 'path=Resources.ContainerInstances.Metadata' '.AWS::CloudFormation::Init\n' 'action=/opt/aws/bin/cfn-init -v' ' --region ${AWS::Region}' ' --stack ${AWS::StackName}' f' --resource {launch_conf_name}\n' ), }, }, 'services': { 'sysvinit': { 'cfn-hup': { 'enabled': True, 'ensureRunning': True, 'files': [ '/etc/cfn/cfn-hup.conf', '/etc/cfn/hooks.d/cfn-auto-reloader.conf' ] } } } } } } )) autoscaling_group = template.add_resource(AutoScalingGroup( autoscaling_name, VPCZoneIdentifier=[Ref(subnet1), Ref(subnet2)], LaunchConfigurationName=Ref(launch_conf), DesiredCapacity=Ref(parameters.cluster_size), MinSize=Ref(parameters.cluster_size), MaxSize=Ref(parameters.cluster_size), Tags=[{ 'Key': 'Name', 'Value': Sub('${AWS::StackName} - ECS Host'), 'PropagateAtLaunch': True, }], CreationPolicy=CreationPolicy( ResourceSignal=ResourceSignal(Timeout='PT15M'), ), UpdatePolicy=UpdatePolicy( AutoScalingRollingUpdate=AutoScalingRollingUpdate( MinInstancesInService=1, MaxBatchSize=1, PauseTime='PT5M', WaitOnResourceSignals=True, ), ), )) # # Services task_role = template.add_resource(Role( 'TaskExecutionRole', AssumeRolePolicyDocument=PolicyDocument( Statement=[Statement( Effect=Allow, Principal=Principal('Service', 'ecs-tasks.amazonaws.com'), Action=[awacs.sts.AssumeRole], )], ), ManagedPolicyArns=[ 'arn:aws:iam::aws:policy/' 'service-role/AmazonECSTaskExecutionRolePolicy' ], )) artifact_bucket = template.add_resource(Bucket( 'ArtifactBucket', DeletionPolicy='Retain', )) codebuild_role = template.add_resource(Role( 'CodeBuildServiceRole', Path='/', AssumeRolePolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Effect=Allow, Principal=Principal( 'Service', 'codebuild.amazonaws.com' ), Action=[ awacs.sts.AssumeRole, ], ), ], ), Policies=[Policy( PolicyName='root', PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Resource=['*'], Effect=Allow, Action=[ awacs.ssm.GetParameters, ], ), Statement( Resource=['*'], Effect=Allow, Action=[ awacs.s3.GetObject, awacs.s3.PutObject, awacs.s3.GetObjectVersion, ], ), Statement( Resource=['*'], Effect=Allow, Action=[ awacs.logs.CreateLogGroup, awacs.logs.CreateLogStream, awacs.logs.PutLogEvents, ], ), ], ), )], )) codepipeline_role = template.add_resource(Role( 'CodePipelineServiceRole', Path='/', AssumeRolePolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Effect=Allow, Principal=Principal( 'Service', 'codepipeline.amazonaws.com' ), Action=[ awacs.sts.AssumeRole, ], ), ], ), Policies=[Policy( PolicyName='root', PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Resource=[ Sub(f'${{{artifact_bucket.title}.Arn}}/*') ], Effect=Allow, Action=[ awacs.s3.GetBucketVersioning, awacs.s3.GetObject, awacs.s3.GetObjectVersion, awacs.s3.PutObject, ], ), Statement( Resource=['*'], Effect=Allow, Action=[ awacs.ecs.DescribeServices, awacs.ecs.DescribeTaskDefinition, awacs.ecs.DescribeTasks, awacs.ecs.ListTasks, awacs.ecs.RegisterTaskDefinition, awacs.ecs.UpdateService, awacs.codebuild.StartBuild, awacs.codebuild.BatchGetBuilds, awacs.iam.PassRole, ], ), ], ), )], )) log_group = template.add_resource(LogGroup( 'LogGroup', LogGroupName=Sub('/ecs/${AWS::StackName}'), )) if any(conf.pipeline.enable for conf in sierrafile.services.values()): project = template.add_resource(Project( 'CodeBuildProject', Name=Sub('${AWS::StackName}-build'), ServiceRole=Ref(codebuild_role), Artifacts=Artifacts(Type='CODEPIPELINE'), Source=Source(Type='CODEPIPELINE'), Environment=Environment( ComputeType='BUILD_GENERAL1_SMALL', Image='aws/codebuild/docker:17.09.0', Type='LINUX_CONTAINER', ), )) for name, settings in sierrafile.services.items(): task_definition = template.add_resource(TaskDefinition( f'{name}TaskDefinition', RequiresCompatibilities=['EC2'], Cpu=str(settings.container.cpu), Memory=str(settings.container.memory), NetworkMode='bridge', ExecutionRoleArn=Ref(task_role.title), ContainerDefinitions=[ ContainerDefinition( Name=f'{name}', Image=settings.container.image, Memory=str(settings.container.memory), Essential=True, PortMappings=[ PortMapping( ContainerPort=settings.container.port, Protocol='tcp', ), ], Environment=[ troposphere.ecs.Environment(Name=k, Value=v) for k, v in sierrafile.env_vars.items() if k in settings.get('environment', []) ], LogConfiguration=LogConfiguration( LogDriver='awslogs', Options={ 'awslogs-region': Ref('AWS::Region'), 'awslogs-group': Ref(log_group.title), 'awslogs-stream-prefix': Ref('AWS::StackName'), }, ), ), ], )) target_group = template.add_resource(TargetGroup( f'{name}TargetGroup', Port=settings.container.port, Protocol='TCP', VpcId=Ref(network_vpc), Tags=Tags(Name=Sub(f'${{AWS::StackName}}-{name}')), )) listener = template.add_resource(Listener( f'{name}ElbListener', LoadBalancerArn=Ref(elb), Port=settings.container.port, Protocol='TCP', DefaultActions=[ Action(TargetGroupArn=Ref(target_group), Type='forward') ], )) service = template.add_resource(Service( f'{name}Service', Cluster=Ref(cluster), ServiceName=f'{name}-service', DependsOn=[autoscaling_group.title, listener.title], DesiredCount=settings.container.count, TaskDefinition=Ref(task_definition), LaunchType='EC2', LoadBalancers=[ troposphere.ecs.LoadBalancer( ContainerName=f'{name}', ContainerPort=settings.container.port, TargetGroupArn=Ref(target_group), ), ], )) if settings.pipeline.enable: pipeline = template.add_resource(Pipeline( f'{name}Pipeline', RoleArn=GetAtt(codepipeline_role, 'Arn'), ArtifactStore=ArtifactStore( Type='S3', Location=Ref(artifact_bucket), ), Stages=[ Stages( Name='Source', Actions=[Actions( Name='Source', ActionTypeId=ActionTypeId( Category='Source', Owner='ThirdParty', Version='1', Provider='GitHub', ), OutputArtifacts=[ OutputArtifacts(Name=f'{name}Source'), ], RunOrder='1', Configuration={ 'Owner': settings.pipeline.user, 'Repo': settings.pipeline.repo, 'Branch': settings.pipeline.branch, 'OAuthToken': Ref(parameters.github_token), }, )], ), Stages( Name='Build', Actions=[Actions( Name='Build', ActionTypeId=ActionTypeId( Category='Build', Owner='AWS', Version='1', Provider='CodeBuild', ), InputArtifacts=[ InputArtifacts(Name=f'{name}Source'), ], OutputArtifacts=[ OutputArtifacts(Name=f'{name}Build'), ], RunOrder='1', Configuration={ 'ProjectName': Ref(project), }, )], ), Stages( Name='Deploy', Actions=[Actions( Name='Deploy', ActionTypeId=ActionTypeId( Category='Deploy', Owner='AWS', Version='1', Provider='ECS', ), InputArtifacts=[ InputArtifacts(Name=f'{name}Build') ], RunOrder='1', Configuration={ 'ClusterName': Ref(cluster), 'ServiceName': Ref(service), 'FileName': 'image.json', }, )], ), ], )) template.add_resource(Webhook( f'{name}CodePipelineWebhook', Name=Sub(f'${{AWS::StackName}}-{name}-webhook'), Authentication='GITHUB_HMAC', AuthenticationConfiguration=AuthenticationConfiguration( SecretToken=Ref(parameters.github_token), ), Filters=[FilterRule( JsonPath='$.ref', MatchEquals=f'refs/heads/{settings.pipeline.branch}' )], TargetAction='Source', TargetPipeline=Ref(pipeline), TargetPipelineVersion=1, RegisterWithThirdParty=True, )) return template
def create_pipeline_template(name) -> Template: t = Template() github_token = t.add_parameter(Parameter("GithubToken", Type="String")) github_owner = t.add_parameter( Parameter("GitHubOwner", Type='String', Default='larroy', AllowedPattern="[A-Za-z0-9-_]+")) github_repo = t.add_parameter( Parameter("GitHubRepo", Type='String', Default='ci', AllowedPattern="[A-Za-z0-9-_]+")) github_branch = t.add_parameter( Parameter("GitHubBranch", Type='String', Default='master', AllowedPattern="[A-Za-z0-9-_]+")) artifact_store_s3_bucket = t.add_resource(Bucket(name + "bucket", )) cloudformationrole = t.add_resource( Role("CloudformationRole", AssumeRolePolicyDocument=PolicyDocument( Version="2012-10-17", Statement=[ Statement( Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", ["cloudformation.amazonaws.com"])) ]), ManagedPolicyArns=['arn:aws:iam::aws:policy/AdministratorAccess' ])) codepipelinerole = t.add_resource( Role("CodePipelineRole", AssumeRolePolicyDocument=PolicyDocument(Statement=[ Statement(Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", ["codepipeline.amazonaws.com"])) ]), ManagedPolicyArns=['arn:aws:iam::aws:policy/AdministratorAccess' ])) code_build_role = t.add_resource( Role("CodeBuildRole", AssumeRolePolicyDocument=PolicyDocument(Statement=[ Statement(Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", ["codebuild.amazonaws.com"])) ]), ManagedPolicyArns=[ 'arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess', 'arn:aws:iam::aws:policy/AWSCodeBuildAdminAccess', 'arn:aws:iam::aws:policy/CloudWatchFullAccess', ])) environment = Environment( ComputeType='BUILD_GENERAL1_SMALL', Image='aws/codebuild/python:3.7.1', Type='LINUX_CONTAINER', ) # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codebuild-project-source.html codebuild_project = t.add_resource( Project(name, Name=name, Description='continuous deployment of infrastructure', Artifacts=Artifacts(Type='CODEPIPELINE'), Environment=environment, Source=Source(Type='CODEPIPELINE'), ServiceRole=code_build_role.GetAtt('Arn'))) pipeline = t.add_resource( Pipeline( name + "Pipeline", ArtifactStore=ArtifactStore( Type="S3", Location=Ref(artifact_store_s3_bucket)), # DisableInboundStageTransitions = [ # DisableInboundStageTransitions( # StageName = "Release", # Reason = "Disabling the transition until " # "integration tests are completed" # ) # ], RestartExecutionOnUpdate=True, RoleArn=codepipelinerole.GetAtt('Arn'), Stages=[ Stages(Name="Source", Actions=[ Actions(Name="SourceAction", ActionTypeId=ActionTypeId( Category="Source", Owner="ThirdParty", Provider="GitHub", Version="1", ), OutputArtifacts=[ OutputArtifacts(Name="GitHubSourceCode") ], Configuration={ 'Owner': Ref(github_owner), 'Repo': Ref(github_repo), 'Branch': Ref(github_branch), 'PollForSourceChanges': False, 'OAuthToken': Ref(github_token) }, RunOrder="1") ]), Stages(Name="Build", Actions=[ Actions(Name="BuildAction", ActionTypeId=ActionTypeId( Category="Build", Owner="AWS", Provider="CodeBuild", Version="1"), InputArtifacts=[ InputArtifacts(Name="GitHubSourceCode") ], OutputArtifacts=[ OutputArtifacts(Name="BuildArtifacts") ], Configuration={ 'ProjectName': Ref(codebuild_project), }, RunOrder="1") ]), ], )) t.add_resource( Webhook("GitHubWebHook", Authentication='GITHUB_HMAC', AuthenticationConfiguration=WebhookAuthConfiguration( SecretToken=Ref(github_token)), Filters=[ WebhookFilterRule(JsonPath='$.ref', MatchEquals='refs/heads/{Branch}') ], TargetPipeline=Ref(pipeline), TargetAction='Source', TargetPipelineVersion=pipeline.GetAtt('Version'))) return t
"Deploy", "ActionMode": "CREATE_UPDATE", "StackName": "staging-helloworld-service2", "Capabilities": "CAPABILITY_NAMED_IAM", "TemplatePath": "App::templates/helloworld-ecs-service-cf.template", "RoleArn": GetAtt("CloudFormationHelloworldRole", "Arn"), "ParameterOverrides": """{"Tag" : { "Fn::GetParam" : [ "BuildOutput", "build.json", "tag" ] } }""" }, InputArtifacts=[ InputArtifacts(Name="App", ), InputArtifacts(Name="BuildOutput") ], ) ]), Stages(Name="Approval", Actions=[ Actions( Name="Approval", ActionTypeId=ActionTypeID(Category="Approval", Owner="AWS", Version="1", Provider="Manual"), Configuration={}, InputArtifacts=[], )
"PollForSourceChanges": "false", "BranchName": "master", "RepositoryName": Ref(CodeCommitRepo) }, RunOrder="1" ) ] ), Stages( Name="Build", Actions=[ Actions( Name="Build", InputArtifacts=[ InputArtifacts( Name="SourceOutput" ) ], ActionTypeId=ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild" ), OutputArtifacts=[ OutputArtifacts( Name="BuildOutput" ) ], Configuration={ "ProjectName": Ref(CodeBuildProject),
Configuration={ "S3Bucket": { "Ref": "SourceS3Bucket" }, "S3ObjectKey": { "Ref": "SourceS3ObjectKey" } }, RunOrder="1") ]), Stages( Name="Beta", Actions=[ Actions( Name="BetaAction", InputArtifacts=[InputArtifacts(Name="SourceOutput")], ActionTypeId=ActionTypeID(Category="Deploy", Owner="AWS", Version="1", Provider="CodeDeploy"), Configuration={ "ApplicationName": { "Ref": "ApplicationName" }, "DeploymentGroupName": { "Ref": "DeploymentGroupName" } }, RunOrder="1") ]), Stages(
def add_pipeline(t, environments, name): ''' This will add a codepipeline that deploys the ecs clusters. ''' name = sanitize_cfn_resource_name(name) app_stages = [] branch = get_branch() owner, repository = get_repository_and_owner() # Add Source Stage app_stages.append( Stages( Name="Source", Actions=[ Actions( Name="SourceAction", ActionTypeId=ActionTypeId(Category="Source", Owner="ThirdParty", Version="1", Provider="GitHub"), OutputArtifacts=[OutputArtifacts(Name="SourceOutput")], Configuration={ "Owner": owner, "Repo": repository, "OAuthToken": "{{resolve:secretsmanager:GithubToken:SecretString}}", "PollForSourceChanges": 'false', "Branch": branch }, RunOrder="1") ])) # Add AMI stage app_stages.append( Stages( Name="BuildAMI", Actions=[ Actions( Name="AMIBuilder", InputArtifacts=[InputArtifacts(Name="SourceOutput")], ActionTypeId=ActionTypeId(Category="Build", Owner="AWS", Version="1", Provider="CodeBuild"), Configuration={"ProjectName": { "Ref": "PackerAMIBuilder" }}, OutputArtifacts=[OutputArtifacts(Name='AMI')], RunOrder="1") ])) for environment in environments: # Now add the environments app_stages.append( Stages(Name=f"Deploy{environment.title()}", Actions=[ Actions(Name=f"Deploy{name}{environment.title()}", InputArtifacts=[InputArtifacts(Name="AMI")], ActionTypeId=ActionTypeId(Category="Build", Owner="AWS", Version="1", Provider="CodeBuild"), Configuration={ "ProjectName": { "Ref": f"Deploy{name}{environment.title()}" } }, RunOrder="1") ])) t.add_resource( Pipeline( "AppPipeline", DependsOn="PackerInstanceProfile", RoleArn=GetAtt("PipelineRole", "Arn"), Stages=app_stages, ArtifactStore=ArtifactStore( Type="S3", Location=Sub( "ecs-cluster-deployer-${AWS::AccountId}-${AWS::Region}"))))