コード例 #1
0
def worker_thread(p, organizations, auto_create_repositories, s3,
                  deployment_map, parameter_store):
    pipeline = Pipeline(p)

    if auto_create_repositories == 'enabled':
        try:
            code_account_id = next(param['SourceAccountId']
                                   for param in p['params']
                                   if 'SourceAccountId' in param)
            has_custom_repo = bool(
                [item for item in p['params'] if 'RepositoryName' in item])
            if auto_create_repositories and code_account_id and str(
                    code_account_id).isdigit() and not has_custom_repo:
                repo = Repo(code_account_id, p.get('name'),
                            p.get('description'))
                repo.create_update()
        except StopIteration:
            LOGGER.debug(
                "No need to create repository as SourceAccountId is not found in params"
            )

    for target in p.get('targets', []):
        target_structure = TargetStructure(target)
        for step in target_structure.target:
            for path in step.get('path'):
                regions = step.get('regions',
                                   p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                step_name = step.get('name')
                params = step.get('params', {})
                pipeline.stage_regions.append(regions)
                pipeline_target = Target(path, regions, target_structure,
                                         organizations, step_name, params)
                pipeline_target.fetch_accounts_for_target()

        pipeline.template_dictionary["targets"].append(
            target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

    parameters = pipeline.generate_parameters()
    pipeline.generate()
    deployment_map.update_deployment_parameters(pipeline)
    s3_object_path = upload_pipeline(s3, pipeline)

    store_regional_parameter_config(pipeline, parameter_store)
    cloudformation = CloudFormation(
        region=DEPLOYMENT_ACCOUNT_REGION,
        deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
        role=boto3,
        template_url=s3_object_path,
        parameters=parameters,
        wait=True,
        stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name),
        s3=None,
        s3_key_path=None,
        account_id=DEPLOYMENT_ACCOUNT_ID)
    cloudformation.create_stack()
コード例 #2
0
def worker_thread(p, organizations, auto_create_repositories, deployment_map,
                  parameter_store):
    LOGGER.debug("Worker Thread started for %s", p.get('name'))
    pipeline = Pipeline(p)
    if auto_create_repositories == 'enabled':
        code_account_id = p.get('default_providers',
                                {}).get('source',
                                        {}).get('properties',
                                                {}).get('account_id', {})
        has_custom_repo = p.get('default_providers',
                                {}).get('source',
                                        {}).get('properties',
                                                {}).get('repository', {})
        if auto_create_repositories and code_account_id and str(
                code_account_id).isdigit() and not has_custom_repo:
            repo = Repo(code_account_id, p.get('name'), p.get('description'))
            repo.create_update()

    regions = []
    for target in p.get('targets', []):
        target_structure = TargetStructure(target)
        for step in target_structure.target:
            regions = step.get('regions',
                               p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
            paths_tags = []
            for path in step.get('path', []):
                paths_tags.append(path)
            if step.get('tags') is not None:
                paths_tags.append(step.get('tags', {}))
            for path_or_tag in paths_tags:
                pipeline.stage_regions.append(regions)
                pipeline_target = Target(path_or_tag, target_structure,
                                         organizations, step, regions)
                pipeline_target.fetch_accounts_for_target()

            pipeline.template_dictionary["targets"].append(
                target.target_structure.generate_waves())

    if DEPLOYMENT_ACCOUNT_REGION not in regions:
        pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)
    pipeline.generate_input()
    ssm_params = fetch_required_ssm_params(pipeline.input["regions"]
                                           or [DEPLOYMENT_ACCOUNT_REGION])
    deployment_map.update_deployment_parameters(pipeline)
    store_regional_parameter_config(pipeline, parameter_store)
    with open(f'cdk_inputs/{pipeline.input["name"]}.json',
              mode='w',
              encoding='utf-8') as outfile:
        data = {}
        data['input'] = pipeline.input
        data['input']['default_scm_branch'] = ssm_params.get(
            'default_scm_branch')
        data['ssm_params'] = ssm_params
        json.dump(data, outfile)
コード例 #3
0
def main():  #pylint: disable=R0915
    LOGGER.info('ADF Version %s', ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    deployment_map = DeploymentMap(parameter_store, ADF_PIPELINE_PREFIX)
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, S3_BUCKET_NAME)
    sts = STS()
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-readonly'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')),
        'pipeline')

    organizations = Organizations(role)
    clean(parameter_store, deployment_map)

    try:
        auto_create_repositories = parameter_store.fetch_parameter(
            'auto_create_repositories')
    except ParameterNotFoundError:
        auto_create_repositories = 'enabled'

    for p in deployment_map.map_contents.get('pipelines'):
        pipeline = Pipeline(p)

        if auto_create_repositories == 'enabled':
            code_account_id = next(param['SourceAccountId']
                                   for param in p['params']
                                   if 'SourceAccountId' in param)
            if auto_create_repositories and code_account_id and str(
                    code_account_id).isdigit():
                repo = Repo(code_account_id, p.get('name'),
                            p.get('description'))
                repo.create_update()

        for target in p.get('targets', []):
            target_structure = TargetStructure(target)
            for step in target_structure.target:
                for path in step.get('path'):
                    regions = step.get(
                        'regions', p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                    step_name = step.get('name')
                    params = step.get('params', {})
                    pipeline.stage_regions.append(regions)
                    pipeline_target = Target(path, regions, target_structure,
                                             organizations, step_name, params)
                    pipeline_target.fetch_accounts_for_target()

            pipeline.template_dictionary["targets"].append(
                target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

        parameters = pipeline.generate_parameters()
        pipeline.generate()
        deployment_map.update_deployment_parameters(pipeline)
        s3_object_path = upload_pipeline(s3, pipeline)

        store_regional_parameter_config(pipeline, parameter_store)
        cloudformation = CloudFormation(
            region=DEPLOYMENT_ACCOUNT_REGION,
            deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
            role=boto3,
            template_url=s3_object_path,
            parameters=parameters,
            wait=True,
            stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name),
            s3=None,
            s3_key_path=None,
            account_id=DEPLOYMENT_ACCOUNT_ID)
        cloudformation.create_stack()