def ECR(self, name): logger.info(f"Criando o ECR: {name}") project_name = f'ECR{name}' resource_name = ''.join(e for e in project_name if e.isalnum()) p_service = Principal("Service", "codebuild.amazonaws.com") p_aws = Principal("AWS", [ Sub("arn:aws:iam::${DevAccount}:root"), Sub("arn:aws:iam::${HomologAccount}:root"), Sub("arn:aws:iam::${ProdAccount}:root") ]) policydocument = PolicyDocument(Version='2008-10-17', Statement=[ Statement( Sid='AllowPushPull', Effect=Allow, Principal=p_service, Action=[Action("ecr", "*")], ), Statement( Sid='AllowPushPull', Effect=Allow, Principal=p_aws, Action=[Action("ecr", "*")], ), ]) resource_ecr = Repository(resource_name, RepositoryName=name.lower(), RepositoryPolicyText=policydocument) return [resource_ecr]
def add_ecr(self, container_name): print 'Creating ECR for %s' % container_name return (self.add_resource( Repository( container_name + 'ECR', RepositoryName='AWSFrederick/' + container_name, RepositoryPolicyText=self.add_ecr_policy_text_allow_prod())))
def add_ecr_repository(self, title, repo_name): ''' Add ECR repository to template Args: title: logical resources name repo_name: target repository name ''' self.cfn_template.add_resource( Repository(title=title, RepositoryName=Ref(repo_name), RepositoryPolicyText=PolicyDocument( Version='2012-10-17', Statement=[ Statement(Sid='AllowPushPull', Effect=Allow, Action=[Action('ecr', '*')], Principal=AWSPrincipal('*')) ]))) return self.cfn_template
def add_ecr_resources(self): self.template.add_resource(Repository( 'FrontendPaymentsRegistrationECRRepo', LifecyclePolicy=LifecyclePolicy( LifecyclePolicyText=json.dumps({ "rules": [ { "rulePriority": 10, "description": "Expire images more than 10 pushes old", "selection": { "tagStatus": "any", "countType": "imageCountMoreThan", "countNumber": 10 }, "action": { "type": "expire" } } ] }) ), RepositoryName=Sub('${Environment}/frontend-payments-registration') ))
# Create an `ECR` docker repository repository = Repository( "ApplicationRepository", template=template, RepositoryName=Ref(AWS_STACK_NAME), # Allow all account users to manage images. RepositoryPolicyText=Policy(Version="2008-10-17", Statement=[ Statement( Sid="AllowPushPull", Effect=Allow, Principal=AWSPrincipal([ Join("", [ arn_prefix, ":iam::", Ref(AWS_ACCOUNT_ID), ":root", ]), ]), Action=[ ecr.GetDownloadUrlForLayer, ecr.BatchGetImage, ecr.BatchCheckLayerAvailability, ecr.PutImage, ecr.InitiateLayerUpload, ecr.UploadLayerPart, ecr.CompleteLayerUpload, ], ), ]), )
t = Template() t.add_resource( Repository( 'MyRepository', RepositoryName='test-repository', RepositoryPolicyText=Policy( Version='2008-10-17', Statement=[ Statement( Sid='AllowPushPull', Effect=Allow, Principal=AWSPrincipal([ iam.ARN(account='123456789012', resource='user/Bob'), iam.ARN(account='123456789012', resource='user/Alice'), ]), Action=[ ecr.GetDownloadUrlForLayer, ecr.BatchGetImage, ecr.BatchCheckLayerAvailability, ecr.PutImage, ecr.InitiateLayerUpload, ecr.UploadLayerPart, ecr.CompleteLayerUpload, ], ), ] ), ) ) print(t.to_json())
"""Generating CloudFormation template.""" from troposphere import (Export, Join, Output, Parameter, Ref, Template) from troposphere.ecr import Repository t = Template() t.add_description("Effective DevOps in AWS: ECR Repository") t.add_parameter( Parameter("RepoName", Type="String", Description="Name of the ECR repository to create")) t.add_resource(Repository("Repository", RepositoryName=Ref("RepoName"))) t.add_output( Output( "Repository", Description="ECR repository", Value=Ref("RepoName"), Export=Export(Join("-", [Ref("RepoName"), "repo"])), )) print(t.to_json())
Type="String", Default="core-infrastructure", Description="Core infrastructure stack name")) vpc_id = ImportValue(Sub("${CoreInfraStackName}-VpcId")) private_subnets = [ ImportValue(Sub("${CoreInfraStackName}-PrivateSubnet0")), ImportValue(Sub("${CoreInfraStackName}-PrivateSubnet1")), ImportValue(Sub("${CoreInfraStackName}-PrivateSubnet2")), ] # ECR Repo ecr_repo = t.add_resource( Repository( 'ECRRepo', RepositoryName=Ref(image_name), )) # Batch BatchServiceRole = t.add_resource( Role( 'BatchServiceRole', RoleName=Sub('${Environment}-${Pipeline}-ServiceRole'), Path='/', Policies=[ Policy( PolicyName='access_s3_buckets', PolicyDocument={ "Version": "2012-10-17", "Statement": [{
MinLength="1", MaxLength="16", AllowedPattern="[a-zA-Z][a-zA-Z0-9]*", ConstraintDescription=("Letra e numero"))) t.add_resource( Repository( 'MyRepository', RepositoryName='test-repository', RepositoryPolicyText=Policy( Version='2008-10-17', Statement=[ Statement( Sid='AllowPushPull', Effect=Allow, Principal='*', Action=[ ecr.GetDownloadUrlForLayer, ecr.BatchGetImage, ecr.BatchCheckLayerAvailability, ecr.PutImage, ecr.InitiateLayerUpload, ecr.UploadLayerPart, ecr.CompleteLayerUpload, ], ), ]), )) with open('exemplo_ecr.yaml', 'w') as f: f.write(t.to_yaml())
ECRRepo = t.add_resource( Repository( 'ECRRepo', RepositoryName=Ref("ECRRepoName"), RepositoryPolicyText=Policy( Version='2008-10-17', Statement=[ Statement( Sid='AllowPushPull', Effect=Allow, Principal=AWSPrincipal([ Join("", [ "arn:aws:iam::", Ref(AWS_ACCOUNT_ID), ":user/", Ref("IAMAllowedUser"), ]), ]), Action=[ ecr.GetDownloadUrlForLayer, ecr.BatchGetImage, ecr.BatchCheckLayerAvailability, ecr.PutImage, ecr.InitiateLayerUpload, ecr.UploadLayerPart, ecr.CompleteLayerUpload, ], ), ]), ))
t=Template() t.add_description("Effective DevOps in AWS: ECR Repository") t.add_parameter( Parameter( "RepoName", Type="String", Description="Name of the ECR repository to create" ) ) t.add_resource( Repository( "Repository", RepositoryName=Ref("RepoName") ) ) t.add_output( Output( "Repository", Description="ECR repository", Value=Ref("RepoName"), Export=Export(Join("-",[Ref("RepoName"), "repo"])), ) ) print(t.to_json())
repository = template.add_resource( Repository( "ApplicationRepository", RepositoryName=repository_name, RepositoryPolicyText=Policy( Version="2008-10-17", Statement=[ Statement( Sid="AllowPushPull", Effect=Allow, Principal=AWSPrincipal([ Join("", [ "arn:aws:iam::", Ref(AWS_ACCOUNT_ID), ":root", ]), ]), Action=[ ecr.GetDownloadUrlForLayer, ecr.BatchGetImage, ecr.BatchCheckLayerAvailability, ecr.PutImage, ecr.InitiateLayerUpload, ecr.UploadLayerPart, ecr.CompleteLayerUpload, ], ), ]), )) template.add_output(
from troposphere.iam import Role t = Template() t.add_version("2010-09-09") t.add_description("Stack that creates Elastic Container Registry") appName = t.add_parameter( Parameter( "AppName", Type="String", Description="Name of the application", )) ECRRepository = t.add_resource( Repository("ECRRepository", RepositoryName=Ref(appName))) t.add_output( Output("Registry", Value=Join(".", [ Ref("AWS::AccountId"), "dkr.ecr", Ref("AWS::Region"), "amazonaws.com", ]), Description="Hostname of the registry")) t.add_output( Output("Repository", Value=Join("/", [ Join(".", [ Ref("AWS::AccountId"),
)) # Generate a ECR Repository DockerStaticWebsiteRepo = t.add_resource( Repository( 'DockerStaticWebsiteRepo', RepositoryName=Ref(RepoName), RepositoryPolicyText=awacs.aws.Policy( Version=VERSION, Statement=[ awacs.aws.Statement( Sid='AllowPushPull', Effect=Allow, Principal=AWSPrincipal('*'), Action=[ ecr.GetDownloadUrlForLayer, ecr.BatchGetImage, ecr.BatchCheckLayerAvailability, ecr.PutImage, ecr.InitiateLayerUpload, ecr.UploadLayerPart, ecr.CompleteLayerUpload, ], ), ] ), ) ) # Generate the service policy and the service role for CodeBuild CodeBuildServiceRole = t.add_resource(Role(
def add_ecr_repository(self, title: str, repo_name: str): repo_to_add = Repository(title, RepositoryName=repo_name) self.template.add_resource(repo_to_add)
def create_primary_template(): template = Template( Description="Root stack for VERY STRONG Lambda function") image_digest = template.add_parameter( Parameter("ImageDigest", Type="String", Default="")) is_image_digest_defined = "IsImageDigestDefined" template.add_condition(is_image_digest_defined, Not(Equals(Ref(image_digest), ""))) artifact_repository = template.add_resource( Repository( "ArtifactRepository", ImageTagMutability="MUTABLE", LifecyclePolicy=LifecyclePolicy(LifecyclePolicyText=json.dumps( { "rules": [{ "rulePriority": 1, "selection": { "tagStatus": "untagged", "countType": "imageCountMoreThan", "countNumber": 3, }, "action": { "type": "expire", }, }] }, indent=None, sort_keys=True, separators=(",", ":"), )), )) artifact_repository_url = Join( "/", [ Join( ".", [ AccountId, "dkr", "ecr", Region, URLSuffix, ], ), Ref(artifact_repository), ], ) image_uri = Join("@", [artifact_repository_url, Ref(image_digest)]) artifact_bucket = template.add_resource( Bucket( "ArtifactBucket", BucketEncryption=BucketEncryption( ServerSideEncryptionConfiguration=[ ServerSideEncryptionRule( BucketKeyEnabled=True, ServerSideEncryptionByDefault= ServerSideEncryptionByDefault( SSEAlgorithm="aws:kms", KMSMasterKeyID=Join(":", [ "arn", Partition, "kms", Region, AccountId, "alias/aws/s3" ]), ), ) ], ), LifecycleConfiguration=LifecycleConfiguration(Rules=[ LifecycleRule( AbortIncompleteMultipartUpload= AbortIncompleteMultipartUpload(DaysAfterInitiation=3, ), Status="Enabled", ), ], ), PublicAccessBlockConfiguration=PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), )) deployment_id_stack = template.add_resource( Stack( "DeploymentId", TemplateURL=common.get_template_s3_url( Ref(artifact_bucket), deployment_id.create_template()), Parameters={ "ArtifactBucket": Ref(artifact_bucket), }, Condition=is_image_digest_defined, )) availability_zones_stack = template.add_resource( Stack( "AvailabilityZones", TemplateURL=common.get_template_s3_url( Ref(artifact_bucket), availability_zones.create_template()), Parameters={ "DeploymentId": GetAtt(deployment_id_stack, "Outputs.Value"), "ImageUri": image_uri, }, Condition=is_image_digest_defined, )) vpc_stack = template.add_resource( Stack( "Vpc", TemplateURL=common.get_template_s3_url(Ref(artifact_bucket), vpc.create_template()), Parameters={ "AvailabilityZones": GetAtt(availability_zones_stack, "Outputs.AvailabilityZones"), }, Condition=is_image_digest_defined, )) lambda_eip_allocator_stack = template.add_resource( Stack( "LambdaEipAllocator", TemplateURL=common.get_template_s3_url( Ref(artifact_bucket), lambda_eip_allocator.create_template()), Parameters={ "DeploymentId": GetAtt(deployment_id_stack, "Outputs.Value"), "VpcId": GetAtt(vpc_stack, "Outputs.VpcId"), "ImageUri": image_uri, }, Condition=is_image_digest_defined, )) elastic_file_system_stack = template.add_resource( Stack( "ElasticFileSystem", TemplateURL=common.get_template_s3_url( Ref(artifact_bucket), elastic_file_system.create_template()), Parameters={ "VpcId": GetAtt(vpc_stack, "Outputs.VpcId"), "SubnetIds": GetAtt(vpc_stack, "Outputs.SubnetIds"), "AvailabilityZones": GetAtt(availability_zones_stack, "Outputs.AvailabilityZones"), }, Condition=is_image_digest_defined, )) lambda_function_stack = template.add_resource( Stack( "LambdaFunction", TemplateURL=common.get_template_s3_url( Ref(artifact_bucket), lambda_function.create_template()), Parameters={ "DeploymentId": GetAtt(deployment_id_stack, "Outputs.Value"), "VpcId": GetAtt(vpc_stack, "Outputs.VpcId"), "SubnetIds": GetAtt(vpc_stack, "Outputs.SubnetIds"), "FileSystemAccessPointArn": GetAtt(elastic_file_system_stack, "Outputs.AccessPointArn"), "ImageUri": image_uri, }, DependsOn=[lambda_eip_allocator_stack], Condition=is_image_digest_defined, )) image_tagger_stack = template.add_resource( Stack( "ImageTagger", TemplateURL=common.get_template_s3_url( Ref(artifact_bucket), image_tagger.create_template()), Parameters={ "DeploymentId": GetAtt(deployment_id_stack, "Outputs.Value"), "ArtifactRepository": Ref(artifact_repository), "DesiredImageTag": "current-cloudformation", "ImageDigest": Ref(image_digest), "ImageUri": image_uri, }, DependsOn=list(template.resources), Condition=is_image_digest_defined, )) template.add_output(Output( "ArtifactBucket", Value=Ref(artifact_bucket), )) template.add_output( Output( "ArtifactRepositoryUrl", Value=artifact_repository_url, )) return template
t = Template() t.add_resource( Repository( "MyRepository", RepositoryName="test-repository", RepositoryPolicyText=PolicyDocument( Version="2008-10-17", Statement=[ Statement( Sid="AllowPushPull", Effect=Allow, Principal=AWSPrincipal([ iam.ARN(account="123456789012", resource="user/Bob"), iam.ARN(account="123456789012", resource="user/Alice"), ]), Action=[ ecr.GetDownloadUrlForLayer, ecr.BatchGetImage, ecr.BatchCheckLayerAvailability, ecr.PutImage, ecr.InitiateLayerUpload, ecr.UploadLayerPart, ecr.CompleteLayerUpload, ], ), ], ), )) print(t.to_json())
def generate_template(d): # Set template metadata t = Template() t.add_version("2010-09-09") t.set_description(d["cf_template_description"]) aws_account_id = Ref("AWS::AccountId") aws_region = Ref("AWS::Region") # Task definition task_definition = t.add_resource( TaskDefinition( "TaskDefinition", Family=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), RequiresCompatibilities=["FARGATE"], Cpu=d["container_cpu"], Memory=d["container_memory"], NetworkMode="awsvpc", ExecutionRoleArn=ImportValue(d["ecs_stack_name"] + "-ECSClusterRole"), ContainerDefinitions=[ ContainerDefinition( Name=Join("", [ d["env"], "-", d["project_name"], "-", d["service_name"] ]), Image=Join( "", [ aws_account_id, ".dkr.ecr.", aws_region, ".amazonaws.com/", d["env"], d["project_name"], d["service_name"], ":latest" ], ), Essential=True, PortMappings=[ PortMapping( ContainerPort=d["container_port"], HostPort=d["container_port"], ) ], EntryPoint=["sh", "-c"], Command=[d["container_command"]], LogConfiguration=LogConfiguration( LogDriver="awslogs", Options={ "awslogs-region": aws_region, "awslogs-group": Join("", [ d["env"], "-", d["project_name"], "-", d["service_name"] ]), "awslogs-stream-prefix": "ecs", "awslogs-create-group": "true" })) ], Tags=Tags(d["tags"], {"Name": d["project_name"] + "-task-definition"}), )) # ECR ecr = t.add_resource( Repository( "ECR", DependsOn="ListenerRule", RepositoryName=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecr"}), )) # Target group target_group = t.add_resource( elb.TargetGroup( "TargetGroup", Name=Join("", [d["env"], "-", d["service_name"]]), HealthCheckIntervalSeconds="30", HealthCheckProtocol="HTTP", HealthCheckPort=d["container_port"], HealthCheckTimeoutSeconds="10", HealthyThresholdCount="4", HealthCheckPath=d["tg_health_check_path"], Matcher=elb.Matcher(HttpCode="200-299"), Port=d["container_port"], Protocol="HTTP", TargetType="ip", UnhealthyThresholdCount="3", VpcId=ImportValue(d["network_stack_name"] + "-VPCId"), Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecr"}), )) # Listener rule t.add_resource( elb.ListenerRule( "ListenerRule", DependsOn="TargetGroup", ListenerArn=ImportValue(d["ecs_stack_name"] + "-ListenerArnHTTP"), Conditions=[ elb.Condition(Field="path-pattern", Values=[d["application_path_api"]]) ], Actions=[ elb.Action(Type="forward", TargetGroupArn=Ref(target_group)) ], Priority="1", )) # ECS service ecs_service = t.add_resource( Service( "ECSService", ServiceName=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), DependsOn="pipeline", DesiredCount=d["container_desired_tasks_count"], TaskDefinition=Ref(task_definition), LaunchType="FARGATE", NetworkConfiguration=NetworkConfiguration( AwsvpcConfiguration=AwsvpcConfiguration( Subnets=[ ImportValue(d["network_stack_name"] + "-PrivateSubnetId1"), ImportValue(d["network_stack_name"] + "-PrivateSubnetId2"), ], SecurityGroups=[ ImportValue(d["ecs_stack_name"] + "-ECSClusterSG") ], )), LoadBalancers=([ LoadBalancer( ContainerName=Join( "", [ d["env"], "-", d["project_name"], "-", d["service_name"] ], ), ContainerPort=d["container_port"], TargetGroupArn=Ref(target_group), ) ]), Cluster=ImportValue(d["ecs_stack_name"] + "-ECSClusterName"), Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecs-service"}), )) # App Autoscaling target # App Autoscaling policy # Codebuild project codebuild = t.add_resource( Project( "codebuild", Name=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), DependsOn="ECR", ServiceRole=ImportValue(d["ecs_stack_name"] + "-CodebuildDeveloperRole"), Artifacts=Artifacts( Name="Build", Location=d["artifact_store"], Type="S3", ), Description="Build a docker image and send it to ecr", Source=Source( BuildSpec="buildspec.yml", Type="S3", Location=d["artifact_store"] + "/" + d["artifact_name"], ), Environment=Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", PrivilegedMode=True, Type="LINUX_CONTAINER", EnvironmentVariables=[ EnvironmentVariable( Name="AWS_DEFAULT_REGION", Type="PLAINTEXT", Value=aws_region, ), EnvironmentVariable( Name="SERVICE_NAME", Type="PLAINTEXT", Value=Join( "", [ d["env"], "-", d["project_name"], "-", d["service_name"] ], ), ), EnvironmentVariable( Name="IMAGE_URI", Type="PLAINTEXT", Value=Join( "", [ aws_account_id, ".dkr.ecr.", aws_region, ".amazonaws.com/", d["env"], "-", d["project_name"], "-", d["service_name"], ], ), ), ], ), Tags=Tags(d["tags"], {"Name": d["project_name"] + "-codebuild"}), )) # Codepipeline pipeline = t.add_resource( Pipeline( "pipeline", Name=Join( "", [d["env"], "-", d["project_name"], "-", d["service_name"]]), RoleArn=ImportValue(d["ecs_stack_name"] + "-CodePipelineRole"), Stages=[ Stages( Name="Source", Actions=[ Actions( Name="Source", ActionTypeId=ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[ OutputArtifacts(Name="source_artifact") ], Configuration={ "S3Bucket": d["artifact_store"], "S3ObjectKey": d["artifact_name"], }, RunOrder="1", ) ], ), Stages( Name="Build", Actions=[ Actions( Name="Build", InputArtifacts=[ InputArtifacts(Name="source_artifact") ], OutputArtifacts=[ OutputArtifacts(Name="build_artifact") ], ActionTypeId=ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), Configuration={"ProjectName": Ref(codebuild)}, RunOrder="1", ) ], ), Stages( Name="Deploy", Actions=[ Actions( Name="Deploy", InputArtifacts=[ InputArtifacts(Name="build_artifact") ], ActionTypeId=ActionTypeId( Category="Deploy", Owner="AWS", Version="1", Provider="ECS", ), Configuration={ "ClusterName": ImportValue(d["ecs_stack_name"] + "-ECSClusterName"), "ServiceName": Join( "", [ d["env"], "-", d["project_name"], "-", d["service_name"], ], ), "FileName": "definitions.json", }, ) ], ), ], ArtifactStore=ArtifactStore(Type="S3", Location=d["artifact_store"]), )) # Route53 # Outputs return t
# Parameters # ############## t.add_parameter( Parameter("RepoName", Type="String", Description="Name of the CodeCommit repository to source")) ############# # Resources # ############# ### ECR #### # Create the resource t.add_resource( Repository("Repository", RepositoryName=Select(0, Split("-", Ref("AWS::StackName"))))) # Define the stack output t.add_output( Output( "Repository", Description="ECR repository", Value=Select(0, Split("-", Ref("AWS::StackName"))), Export=Export(Join("-", [Ref("RepoName"), "repo"])), )) #### CodeBuild #### t.add_resource( Role("ServiceRole", AssumeRolePolicyDocument=Policy(Statement=[