# -*- coding: utf-8 -*- import boto3 from troposphere_mate import StackManager from troposphere_mate.examples.nested_stack import tier_master_iam_inst_profile as cft aws_profile = "eq_sanhe" aws_region = "us-east-1" cft_bucket = "eq-sanhe-for-everything" env_name = "tropo-mate-examples-nested-stack-dev" boto_ses = boto3.session.Session(profile_name=aws_profile, region_name=aws_region) sm = StackManager(boto_ses=boto_ses, cft_bucket=cft_bucket) sm.deploy( template=cft.template, stack_name=env_name, stack_parameters={ cft.param_env_name.title: env_name }, include_iam=True, )
# -*- coding: utf-8 -*- from seedinvest_monitor.cf import template, config from seedinvest_monitor.boto_ses import boto_ses from troposphere_mate import StackManager template.to_file("master.json") stack_manager = StackManager( boto_ses=boto_ses, cft_bucket=config.S3_BUCKET_FOR_DEPLOY.get_value(), ) config_data = config.to_cloudformation_config_data() stack_parameters = {key: config_data[key] for key in template.parameters} stack_manager.deploy( template, stack_name=config.ENVIRONMENT_NAME.get_value(), stack_parameters=stack_parameters, include_iam=True, )
template = webapp.template #--- Add template.add_resource(webapp.ecs_task_definition_execution_role) template.add_resource(webapp.ecs_task_definition) template.add_resource(webapp.sg_for_elb) template.add_resource(webapp.elb_lb) template.add_resource(webapp.sg_for_ecs) template.add_resource(webapp.elb_default_target_group) template.add_resource(webapp.elb_listener) template.add_resource(webapp.ecs_service) # --- Remove sm = StackManager( boto_ses=boto_ses, cft_bucket=config.S3_BUCKET_FOR_DEPLOY.get_value(), ) sm.deploy( template=template, stack_name=config.ENVIRONMENT_NAME.get_value(), stack_tags={ "ProjectName": config.PROJECT_NAME.get_value(), }, include_iam=True, )
your DevOps automation scripts. The CloudFormation template will be generated from a python script that taking all configurations and smartly decide which AWS resource and the naming convention should be created. You can extend it by changing the ``./rabbit_docker_cicd_pipeline/cf/__init__.py`` file. ``troposphere_mate`` allows you to deploy your CloudFormation stack to AWS from Python. """ import boto3 from rabbit_docker_cicd_pipeline.cf import config, template, param_env_name from troposphere_mate import StackManager boto_ses = boto3.session.Session( profile_name=config.AWS_PROFILE_FOR_BOTO3.get_value(), region_name=config.AWS_REGION.get_value(), ) sm = StackManager(boto_ses=boto_ses, cft_bucket=config.S3_BUCKET_FOR_DEPLOY.get_value()) sm.deploy( template=template, stack_name="pygitrepo-{}".format(config.ENVIRONMENT_NAME.get_value()), stack_parameters={ param_env_name.title: "pygitrepo-{}".format(config.ENVIRONMENT_NAME.get_value()), }, include_iam=True )
# -*- coding: utf-8 -*- """ This script implements one-click create all AWS ECR repository. """ from pathlib_mate import Path from troposphere_mate import StackManager from create_app import app from rabbit_docker_cicd_pipeline.cf.ecr_tier import create_template HERE = Path(__file__).parent app.plan() template = create_template(app) sm = StackManager( boto_ses=app.app_boto_ses, cft_bucket=app.app_cft_bucket, ) sm.deploy(template, stack_name=app.app_name)
should be created. You can extend it by changing the ``./bgs_deploy/cf/__init__.py`` file. ``troposphere_mate`` allows you to deploy your CloudFormation stack to AWS from Python. """ import boto3 from bgs_deploy.cf import ecs_example from troposphere_mate import StackManager config = ecs_example.config ecs_example.template.add_resource(ecs_example.ecr_repo_webapp) boto_ses = boto3.session.Session( profile_name=config.AWS_PROFILE_FOR_BOTO3.get_value(), region_name=config.AWS_REGION.get_value(), ) sm = StackManager( boto_ses=boto_ses, cft_bucket=ecs_example.config.S3_BUCKET_FOR_DEPLOY.get_value()) sm.deploy(template=ecs_example.template, stack_name=config.ECS_EXAMPLE_ENVIRONMENT_NAME.get_value(), stack_parameters={ ecs_example.param_env_name.title: config.ECS_EXAMPLE_ENVIRONMENT_NAME.get_value(), }, include_iam=True)