def main(): parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) deployment_map = DeploymentMap(parameter_store, os.environ["ADF_PIPELINE_PREFIX"]) s3 = S3(DEPLOYMENT_ACCOUNT_REGION, boto3, S3_BUCKET_NAME) sts = STS(boto3) role = sts.assume_cross_account_role( 'arn:aws:iam::{0}:role/{1}-org-access-adf'.format( MASTER_ACCOUNT_ID, parameter_store.fetch_parameter('cross_account_access_role')), 'pipeline') organizations = Organizations(role) clean(parameter_store, deployment_map) for p in deployment_map.map_contents.get('pipelines'): pipeline = Pipeline(p) for target in p['targets']: target_structure = TargetStructure(target) for step in target_structure.target: for path in step.get('path'): try: regions = step.get( 'regions', p.get('regions', DEPLOYMENT_ACCOUNT_REGION)) pipeline.stage_regions.append(regions) pipeline_target = Target(path, regions, target_structure, organizations) pipeline_target.fetch_accounts_for_target() except BaseException: raise Exception( "Failed to return accounts for {0}".format(path)) pipeline.template_dictionary["targets"].append( target_structure.account_list) if DEPLOYMENT_ACCOUNT_REGION not in regions: pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) parameters = pipeline.generate_parameters() pipeline.generate() deployment_map.update_deployment_parameters(pipeline) s3_object_path = upload_if_required(s3, pipeline) store_regional_parameter_config(pipeline, parameter_store) cloudformation = CloudFormation( region=DEPLOYMENT_ACCOUNT_REGION, deployment_account_region=DEPLOYMENT_ACCOUNT_REGION, role=boto3, template_url=s3_object_path, parameters=parameters, wait=True, stack_name="{0}-{1}".format(os.environ["ADF_PIPELINE_PREFIX"], pipeline.name), s3=None, s3_key_path=None) cloudformation.create_stack()
def test_fetch_account_error(): cls = Target(path='some_string', regions=['region1', 'region2'], target_structure=MockTargetStructure(), organizations=Mock(), step_name=None) with raises(InvalidDeploymentMapError): cls.fetch_accounts_for_target()
def test_fetch_accounts_for_target_account_id(): cls = Target(path='12345678910', regions=['region1', 'region2'], target_structure=MockTargetStructure(), organizations=None) with patch.object(cls, '_target_is_account_id') as mock: cls.fetch_accounts_for_target() mock.assert_called_once_with()
def worker_thread(p, organizations, auto_create_repositories, s3, deployment_map, parameter_store): pipeline = Pipeline(p) if auto_create_repositories == 'enabled': try: code_account_id = next(param['SourceAccountId'] for param in p['params'] if 'SourceAccountId' in param) has_custom_repo = bool( [item for item in p['params'] if 'RepositoryName' in item]) if auto_create_repositories and code_account_id and str( code_account_id).isdigit() and not has_custom_repo: repo = Repo(code_account_id, p.get('name'), p.get('description')) repo.create_update() except StopIteration: LOGGER.debug( "No need to create repository as SourceAccountId is not found in params" ) for target in p.get('targets', []): target_structure = TargetStructure(target) for step in target_structure.target: for path in step.get('path'): regions = step.get('regions', p.get('regions', DEPLOYMENT_ACCOUNT_REGION)) step_name = step.get('name') params = step.get('params', {}) pipeline.stage_regions.append(regions) pipeline_target = Target(path, regions, target_structure, organizations, step_name, params) pipeline_target.fetch_accounts_for_target() pipeline.template_dictionary["targets"].append( target_structure.account_list) if DEPLOYMENT_ACCOUNT_REGION not in regions: pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) parameters = pipeline.generate_parameters() pipeline.generate() deployment_map.update_deployment_parameters(pipeline) s3_object_path = upload_pipeline(s3, pipeline) store_regional_parameter_config(pipeline, parameter_store) cloudformation = CloudFormation( region=DEPLOYMENT_ACCOUNT_REGION, deployment_account_region=DEPLOYMENT_ACCOUNT_REGION, role=boto3, template_url=s3_object_path, parameters=parameters, wait=True, stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name), s3=None, s3_key_path=None, account_id=DEPLOYMENT_ACCOUNT_ID) cloudformation.create_stack()
def test_fetch_account_error_invalid_account_id(): cls = Target( path='12345678910', #11 digits rather than 12 (invalid account id) regions=['region1', 'region2'], target_structure=MockTargetStructure(), organizations=Mock(), step_name=None) with raises(InvalidDeploymentMapError): cls.fetch_accounts_for_target()
def test_fetch_accounts_for_approval(): cls = Target(path='approval', regions=['region1', 'region2'], target_structure=MockTargetStructure(), organizations=None, step_name=None) with patch.object(cls, '_target_is_approval') as mock: cls.fetch_accounts_for_target() mock.assert_called_once_with()
def main(): #pylint: disable=R0915 LOGGER.info('ADF Version %s', ADF_VERSION) LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL) parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) deployment_map = DeploymentMap(parameter_store, ADF_PIPELINE_PREFIX) s3 = S3(DEPLOYMENT_ACCOUNT_REGION, S3_BUCKET_NAME) sts = STS() role = sts.assume_cross_account_role( 'arn:aws:iam::{0}:role/{1}-readonly'.format( MASTER_ACCOUNT_ID, parameter_store.fetch_parameter('cross_account_access_role')), 'pipeline') organizations = Organizations(role) clean(parameter_store, deployment_map) for p in deployment_map.map_contents.get('pipelines'): pipeline = Pipeline(p) for target in p.get('targets', []): target_structure = TargetStructure(target) for step in target_structure.target: for path in step.get('path'): regions = step.get( 'regions', p.get('regions', DEPLOYMENT_ACCOUNT_REGION)) step_name = step.get('name') params = step.get('params', {}) pipeline.stage_regions.append(regions) pipeline_target = Target(path, regions, target_structure, organizations, step_name, params) pipeline_target.fetch_accounts_for_target() pipeline.template_dictionary["targets"].append( target_structure.account_list) if DEPLOYMENT_ACCOUNT_REGION not in regions: pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) parameters = pipeline.generate_parameters() pipeline.generate() deployment_map.update_deployment_parameters(pipeline) s3_object_path = upload_pipeline(s3, pipeline) store_regional_parameter_config(pipeline, parameter_store) cloudformation = CloudFormation( region=DEPLOYMENT_ACCOUNT_REGION, deployment_account_region=DEPLOYMENT_ACCOUNT_REGION, role=boto3, template_url=s3_object_path, parameters=parameters, wait=True, stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name), s3=None, s3_key_path=None, account_id=DEPLOYMENT_ACCOUNT_ID) cloudformation.create_stack()
def test_fetch_accounts_for_target_ou_id(): cls = Target( path='ou-123fake', regions=['region1', 'region2'], # imports=[], # exports=[], target_structure=MockTargetStructure(), organizations=None) with patch.object(cls, '_target_is_ou_id') as mock: cls.fetch_accounts_for_target() mock.assert_called_once_with()
def worker_thread(p, organizations, auto_create_repositories, deployment_map, parameter_store): LOGGER.debug("Worker Thread started for %s", p.get('name')) pipeline = Pipeline(p) if auto_create_repositories == 'enabled': code_account_id = p.get('default_providers', {}).get('source', {}).get('properties', {}).get('account_id', {}) has_custom_repo = p.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository', {}) if auto_create_repositories and code_account_id and str( code_account_id).isdigit() and not has_custom_repo: repo = Repo(code_account_id, p.get('name'), p.get('description')) repo.create_update() regions = [] for target in p.get('targets', []): target_structure = TargetStructure(target) for step in target_structure.target: regions = step.get('regions', p.get('regions', DEPLOYMENT_ACCOUNT_REGION)) paths_tags = [] for path in step.get('path', []): paths_tags.append(path) if step.get('tags') is not None: paths_tags.append(step.get('tags', {})) for path_or_tag in paths_tags: pipeline.stage_regions.append(regions) pipeline_target = Target(path_or_tag, target_structure, organizations, step, regions) pipeline_target.fetch_accounts_for_target() pipeline.template_dictionary["targets"].append( target.target_structure.generate_waves()) if DEPLOYMENT_ACCOUNT_REGION not in regions: pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) pipeline.generate_input() ssm_params = fetch_required_ssm_params(pipeline.input["regions"] or [DEPLOYMENT_ACCOUNT_REGION]) deployment_map.update_deployment_parameters(pipeline) store_regional_parameter_config(pipeline, parameter_store) with open(f'cdk_inputs/{pipeline.input["name"]}.json', mode='w', encoding='utf-8') as outfile: data = {} data['input'] = pipeline.input data['input']['default_scm_branch'] = ssm_params.get( 'default_scm_branch') data['ssm_params'] = ssm_params json.dump(data, outfile)
def test_fetch_accounts_for_target_ou_path(): cls = Target( path='/thing/path', regions=['region1', 'region2'], target_structure=MockTargetStructure(), organizations=None, step_name=None, params={} ) with patch.object(cls, '_target_is_ou_path') as mock: cls.fetch_accounts_for_target() mock.assert_called_once_with()