Esempio n. 1
0
def lambda_handler(event, _):
    sts = STS()
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}'.format(
            event["account_id"],
            event["cross_account_access_role"]
        ), 'master_lambda'
    )

    if event['is_deployment_account']:
        configure_master_account_parameters(event)
        configure_deployment_account_parameters(event, role)

    s3 = S3(
        region=REGION_DEFAULT,
        bucket=S3_BUCKET
    )

    for region in list(set([event["deployment_account_region"]] + event["regions"])):
        if not event["is_deployment_account"]:
            configure_generic_account(sts, event, region, role)
        cloudformation = CloudFormation(
            region=region,
            deployment_account_region=event["deployment_account_region"],
            role=role,
            wait=False,
            stack_name=None, # Stack name will be automatically defined based on event
            s3=s3,
            s3_key_path=event["full_path"],
            account_id=event["account_id"]
        )
        cloudformation.create_stack()

    return event
Esempio n. 2
0
    def create_update(self):
        s3_object_path = s3.put_object(
            "adf-build/repo_templates/codecommit.yml",
            "{0}/adf-build/repo_templates/codecommit.yml".format(TARGET_DIR))
        cloudformation = CloudFormation(
            region=CODE_ACCOUNT_REGION,
            deployment_account_region=CODE_ACCOUNT_REGION,
            role=self.session,
            template_url=s3_object_path,
            parameters=self.define_repo_parameters(),
            wait=True,
            stack_name=self.stack_name,
            s3=None,
            s3_key_path=None,
            account_id=DEPLOYMENT_ACCOUNT_ID,
        )

        # Update the stack if the repo and the adf contolled stack exist
        update_stack = (self.repo_exists()
                        and cloudformation.get_stack_status())
        if not self.repo_exists() or update_stack:
            LOGGER.info(
                'Creating Stack for Codecommit Repository %s on Account %s',
                self.name, self.account_id)
            cloudformation.create_stack()
def main():
    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    deployment_map = DeploymentMap(parameter_store,
                                   os.environ["ADF_PIPELINE_PREFIX"])
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, boto3, S3_BUCKET_NAME)
    sts = STS(boto3)
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-org-access-adf'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')),
        'pipeline')

    organizations = Organizations(role)
    clean(parameter_store, deployment_map)

    for p in deployment_map.map_contents.get('pipelines'):
        pipeline = Pipeline(p)

        for target in p['targets']:
            target_structure = TargetStructure(target)
            for step in target_structure.target:
                for path in step.get('path'):
                    try:
                        regions = step.get(
                            'regions',
                            p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                        pipeline.stage_regions.append(regions)
                        pipeline_target = Target(path, regions,
                                                 target_structure,
                                                 organizations)
                        pipeline_target.fetch_accounts_for_target()
                    except BaseException:
                        raise Exception(
                            "Failed to return accounts for {0}".format(path))

            pipeline.template_dictionary["targets"].append(
                target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

        parameters = pipeline.generate_parameters()
        pipeline.generate()
        deployment_map.update_deployment_parameters(pipeline)
        s3_object_path = upload_if_required(s3, pipeline)

        store_regional_parameter_config(pipeline, parameter_store)
        cloudformation = CloudFormation(
            region=DEPLOYMENT_ACCOUNT_REGION,
            deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
            role=boto3,
            template_url=s3_object_path,
            parameters=parameters,
            wait=True,
            stack_name="{0}-{1}".format(os.environ["ADF_PIPELINE_PREFIX"],
                                        pipeline.name),
            s3=None,
            s3_key_path=None)

        cloudformation.create_stack()
Esempio n. 4
0
    def fetch_stack_output(self, value, param, key=None):
        try:
            [_, account_id, region, stack_name, export] = str(value).split(':')
        except ValueError:
            raise ValueError(
                "{0} is not a valid import string."
                "syntax should be import:account_id:region:stack_name:export_key"
                .format(str(value)))

        LOGGER.info(
            "Assuming the role %s", 'arn:aws:iam::{0}:role/{1}'.format(
                account_id, 'adf-cloudformation-deployment-role'))
        role = self.sts.assume_cross_account_role(
            'arn:aws:iam::{0}:role/{1}'.format(
                account_id, 'adf-cloudformation-deployment-role'), 'importer')
        cloudformation = CloudFormation(
            region=region,
            deployment_account_region=os.environ["AWS_REGION"],
            role=role,
            stack_name=stack_name,
            account_id=account_id)
        LOGGER.info("Retrieving value of key %s from %s on %s in %s", export,
                    stack_name, account_id, region)
        stack_output = cloudformation.get_stack_output(export)
        if not stack_output:
            raise Exception("No Key was found on {0} with the name {1}".format(
                stack_name, export))

        LOGGER.info("Stack output value is %s", stack_output)
        if key:
            self.stage_parameters[key][param] = stack_output
            return
        self.stage_parameters[key] = stack_output
Esempio n. 5
0
 def create_update(self):
     s3_object_path = s3.put_object(
         "adf-build/templates/codecommit.yml",
         f"{TARGET_DIR}/adf-build/templates/codecommit.yml",
     )
     cloudformation = CloudFormation(
         region=DEPLOYMENT_ACCOUNT_REGION,
         deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
         role=self.session,
         template_url=s3_object_path,
         parameters=self.define_repo_parameters(),
         wait=True,
         stack_name=self.stack_name,
         s3=None,
         s3_key_path=None,
         account_id=DEPLOYMENT_ACCOUNT_ID,
     )
     # Update the stack if the repo and the ADF controlled stack exist,
     # return if the repo exists but no stack (previously made)
     _repo_exists = self.repo_exists()
     _stack_exists = cloudformation.get_stack_status()
     if _repo_exists and not _stack_exists:
         return
     if not _repo_exists and not _stack_exists:
         LOGGER.info(
             'Ensuring State for CodeCommit Repository Stack %s on Account %s',
             self.name,
             self.account_id,
         )
         cloudformation.create_stack()
 def fetch_stack_output(self, value, key, optional=False):  # pylint: disable=too-many-statements
     try:
         [_, account_id, region, stack_name,
          output_key] = str(value).split(':')
     except ValueError:
         raise ValueError(
             "{0} is not a valid import string."
             "syntax should be import:account_id:region:stack_name:output_key"
             .format(str(value)))
     if Resolver._is_optional(output_key):
         LOGGER.info("Parameter %s is considered optional", output_key)
         optional = True
     output_key = output_key[:-1] if optional else output_key
     try:
         role = self.sts.assume_cross_account_role(
             'arn:aws:iam::{0}:role/{1}'.format(
                 account_id, 'adf-readonly-automation-role'), 'importer')
         cloudformation = CloudFormation(
             region=region,
             deployment_account_region=os.environ["AWS_REGION"],
             role=role,
             stack_name=stack_name,
             account_id=account_id)
         stack_output = self.cache.check(
             value) or cloudformation.get_stack_output(output_key)
         if stack_output:
             LOGGER.info("Stack output value is %s", stack_output)
             self.cache.add(value, stack_output)
     except ClientError:
         if not optional:
             raise
         stack_output = ""
         pass
     try:
         parent_key = list(
             Resolver.determine_parent_key(self.comparison_parameters,
                                           key))[0]
         if optional:
             self.stage_parameters[parent_key][key] = stack_output
         else:
             if not stack_output:
                 raise Exception(
                     "No Stack Output found on {account_id} in {region} "
                     "with stack name {stack} and output key "
                     "{output_key}".format(
                         account_id=account_id,
                         region=region,
                         stack=stack_name,
                         output_key=output_key,
                     ))
             self.stage_parameters[parent_key][key] = stack_output
     except IndexError:
         if stack_output:
             if self.stage_parameters.get(key):
                 self.stage_parameters[key] = stack_output
         else:
             raise Exception(
                 "Could not determine the structure of the file in order to import from CloudFormation"
             )
     return True
Esempio n. 7
0
def worker_thread(sts, region, account_id, role, event):
    partition = get_partition(REGION_DEFAULT)

    role = sts.assume_cross_account_role(
        f'arn:{partition}:iam::{account_id}:role/{role}', 'remove_base')

    parameter_store = ParameterStore(region, role)
    paginator = parameter_store.client.get_paginator('describe_parameters')
    page_iterator = paginator.paginate()
    for page in page_iterator:
        for parameter in page['Parameters']:
            if 'Used by The AWS Deployment Framework' in parameter.get(
                    'Description', ''):
                parameter_store.delete_parameter(parameter.get('Name'))

    cloudformation = CloudFormation(
        region=region,
        deployment_account_region=event.get('deployment_account_region'),
        role=role,
        wait=True,
        stack_name=None,
        s3=None,
        s3_key_path=None,
        account_id=account_id)
    return cloudformation.delete_all_base_stacks()
Esempio n. 8
0
def clean(parameter_store, deployment_map):
    """
    Function used to remove stale entries in Parameter Store and
    Deployment Pipelines that are no longer in the Deployment Map
    """
    current_pipeline_parameters = parameter_store.fetch_parameters_by_path(
        '/deployment/')

    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    cloudformation = CloudFormation(
        region=DEPLOYMENT_ACCOUNT_REGION,
        deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
        role=boto3)
    stacks_to_remove = []
    for parameter in current_pipeline_parameters:
        name = parameter.get('Name').split('/')[-2]
        if name not in [
                p.get('name') for p in deployment_map.map_contents['pipelines']
        ]:
            parameter_store.delete_parameter(name)
            stacks_to_remove.append(name)

    for stack in list(set(stacks_to_remove)):
        cloudformation.delete_stack("{0}-{1}".format(ADF_PIPELINE_PREFIX,
                                                     stack))
Esempio n. 9
0
def worker_thread(p, organizations, auto_create_repositories, s3,
                  deployment_map, parameter_store):
    pipeline = Pipeline(p)

    if auto_create_repositories == 'enabled':
        try:
            code_account_id = next(param['SourceAccountId']
                                   for param in p['params']
                                   if 'SourceAccountId' in param)
            has_custom_repo = bool(
                [item for item in p['params'] if 'RepositoryName' in item])
            if auto_create_repositories and code_account_id and str(
                    code_account_id).isdigit() and not has_custom_repo:
                repo = Repo(code_account_id, p.get('name'),
                            p.get('description'))
                repo.create_update()
        except StopIteration:
            LOGGER.debug(
                "No need to create repository as SourceAccountId is not found in params"
            )

    for target in p.get('targets', []):
        target_structure = TargetStructure(target)
        for step in target_structure.target:
            for path in step.get('path'):
                regions = step.get('regions',
                                   p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                step_name = step.get('name')
                params = step.get('params', {})
                pipeline.stage_regions.append(regions)
                pipeline_target = Target(path, regions, target_structure,
                                         organizations, step_name, params)
                pipeline_target.fetch_accounts_for_target()

        pipeline.template_dictionary["targets"].append(
            target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

    parameters = pipeline.generate_parameters()
    pipeline.generate()
    deployment_map.update_deployment_parameters(pipeline)
    s3_object_path = upload_pipeline(s3, pipeline)

    store_regional_parameter_config(pipeline, parameter_store)
    cloudformation = CloudFormation(
        region=DEPLOYMENT_ACCOUNT_REGION,
        deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
        role=boto3,
        template_url=s3_object_path,
        parameters=parameters,
        wait=True,
        stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name),
        s3=None,
        s3_key_path=None,
        account_id=DEPLOYMENT_ACCOUNT_ID)
    cloudformation.create_stack()
Esempio n. 10
0
def main():  #pylint: disable=R0915
    LOGGER.info('ADF Version %s', ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    deployment_map = DeploymentMap(parameter_store, ADF_PIPELINE_PREFIX)
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, S3_BUCKET_NAME)
    sts = STS()
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-readonly'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')),
        'pipeline')

    organizations = Organizations(role)
    clean(parameter_store, deployment_map)

    for p in deployment_map.map_contents.get('pipelines'):
        pipeline = Pipeline(p)

        for target in p.get('targets', []):
            target_structure = TargetStructure(target)
            for step in target_structure.target:
                for path in step.get('path'):
                    regions = step.get(
                        'regions', p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                    step_name = step.get('name')
                    params = step.get('params', {})
                    pipeline.stage_regions.append(regions)
                    pipeline_target = Target(path, regions, target_structure,
                                             organizations, step_name, params)
                    pipeline_target.fetch_accounts_for_target()

            pipeline.template_dictionary["targets"].append(
                target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

        parameters = pipeline.generate_parameters()
        pipeline.generate()
        deployment_map.update_deployment_parameters(pipeline)
        s3_object_path = upload_pipeline(s3, pipeline)

        store_regional_parameter_config(pipeline, parameter_store)
        cloudformation = CloudFormation(
            region=DEPLOYMENT_ACCOUNT_REGION,
            deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
            role=boto3,
            template_url=s3_object_path,
            parameters=parameters,
            wait=True,
            stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name),
            s3=None,
            s3_key_path=None,
            account_id=DEPLOYMENT_ACCOUNT_ID)
        cloudformation.create_stack()
Esempio n. 11
0
    def _clean_stale_stacks(self, name):
        cloudformation = CloudFormation(
            region=os.environ['AWS_REGION'],
            deployment_account_region=os.environ['AWS_REGION'],
            role=boto3,
        )

        LOGGER.warning('Removing Deployment Pipeline for %s', name)
        cloudformation.delete_stack("{0}-{1}".format(self.pipeline_name_prefix,
                                                     name))
Esempio n. 12
0
def worker_thread(account_id, sts, config, s3, cache, kms_dict):
    """
    The Worker thread function that is created for each account
    in which CloudFormation create_stack is called
    """
    LOGGER.debug("%s - Starting new worker thread", account_id)

    organizations = Organizations(role=boto3, account_id=account_id)
    ou_id = organizations.get_parent_info().get("ou_parent_id")

    account_state = is_account_in_invalid_state(ou_id, config.config)
    if account_state:
        LOGGER.info("%s %s", account_id, account_state)
        return

    account_path = organizations.build_account_path(
        ou_id,
        [],  # Initial empty array to hold OU Path,
        cache)
    try:
        role = ensure_generic_account_can_be_setup(sts, config, account_id)

        # Regional base stacks can be updated after global
        for region in list(
                set([config.deployment_account_region] +
                    config.target_regions)):
            # Ensuring the kms_arn on the target account is up-to-date
            parameter_store = ParameterStore(region, role)
            parameter_store.put_parameter('kms_arn', kms_dict[region])

            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=config.deployment_account_region,
                role=role,
                wait=True,
                stack_name=None,
                s3=s3,
                s3_key_path=account_path,
                account_id=account_id)
            try:
                cloudformation.create_stack()
            except GenericAccountConfigureError as error:
                if 'Unable to fetch parameters' in str(error):
                    LOGGER.error(
                        '%s - Failed to update its base stack due to missing parameters (deployment_account_id or kms_arn), '
                        'ensure this account has been bootstrapped correctly by being moved from the root '
                        'into an Organizational Unit within AWS Organizations.',
                        account_id)
                raise Exception from error

    except GenericAccountConfigureError as generic_account_error:
        LOGGER.info(generic_account_error)
        return
def worker_thread(sts, region, account_id, role, event):
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}'.format(account_id, role), 'remove_base')

    cloudformation = CloudFormation(
        region=region,
        deployment_account_region=event.get('deployment_account_region'),
        role=role,
        wait=True,
        stack_name=None,
        s3=None,
        s3_key_path=None)
    return cloudformation.delete_all_base_stacks()
Esempio n. 14
0
 def fetch_stack_output(self, value, key, param=None, optional=False):  #pylint: disable=R0912, R0915
     try:
         [_, account_id, region, stack_name, export] = str(value).split(':')
         if export.endswith('?'):
             export = export[:-1]
             LOGGER.info("Import %s is considered optional", export)
             optional = True
     except ValueError:
         raise ValueError(
             "{0} is not a valid import string."
             "syntax should be import:account_id:region:stack_name:export_key"
             .format(str(value)))
     LOGGER.info(
         "Assuming the role %s", 'arn:aws:iam::{0}:role/{1}'.format(
             account_id, 'adf-cloudformation-deployment-role'))
     try:
         role = self.sts.assume_cross_account_role(
             'arn:aws:iam::{0}:role/{1}'.format(
                 account_id, 'adf-cloudformation-deployment-role'),
             'importer')
         cloudformation = CloudFormation(
             region=region,
             deployment_account_region=os.environ["AWS_REGION"],
             role=role,
             stack_name=stack_name,
             account_id=account_id)
         LOGGER.info("Retrieving value of key %s from %s on %s in %s",
                     export, stack_name, account_id, region)
         stack_output = cloudformation.get_stack_output(export)
         LOGGER.info("Stack output value is %s", stack_output)
     except ClientError:
         if not optional:
             raise
         stack_output = ""
         pass
     if optional:
         if param:
             self.stage_parameters[param][key] = stack_output
         else:
             self.stage_parameters[key] = stack_output
         return
     else:
         if not stack_output:
             raise Exception(
                 "No Stack Output found on %s in %s with stack name %s and output key %s",
                 account_id, region, stack_name, export)  #pylint: disable=W0715
         if param:
             self.stage_parameters[param][key] = stack_output
         else:
             self.stage_parameters[key] = stack_output
         return
def lambda_handler(event, _):
    parameters = ParameterStore(REGION_DEFAULT, boto3)
    account_id = event.get(
        'detail').get(
            'requestParameters').get('accountId')
    organizations = Organizations(boto3, account_id)
    parsed_event = Event(event, parameters, organizations, account_id)
    cache = Cache()

    if parsed_event.moved_to_root or parsed_event.moved_to_protected:
        return parsed_event.create_output_object(cache)

    parsed_event.set_destination_ou_name()

    sts = STS(boto3)
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}'.format(
            parsed_event.account_id,
            parsed_event.cross_account_access_role
        ), 'master_lambda'
    )

    if parsed_event.is_deployment_account:
        update_master_account_parameters(parsed_event, parameters)
        configure_deployment_account(parsed_event, role)

    s3 = S3(REGION_DEFAULT, boto3, S3_BUCKET)

    account_path = parsed_event.organizations.build_account_path(
        parsed_event.destination_ou_id,
        [],  # Initial empty array to hold OU Path,
        cache,
    )

    for region in list(set([parsed_event.deployment_account_region] + parsed_event.regions)):
        if not parsed_event.is_deployment_account:
            configure_generic_account(sts, parsed_event, region, role)
        cloudformation = CloudFormation(
            region=region,
            deployment_account_region=parsed_event.deployment_account_region,
            role=role,
            wait=False,
            stack_name=None,
            s3=s3,
            s3_key_path=account_path,
            file_path=None,
        )
        cloudformation.create_stack()

    return parsed_event.create_output_object(cache)
Esempio n. 16
0
def lambda_handler(event, _):
    s3 = S3(region=REGION_DEFAULT, bucket=S3_BUCKET)

    cloudformation = CloudFormation(
        region=event['deployment_account_region'],
        deployment_account_region=event['deployment_account_region'],
        role=boto3,
        wait=True,
        stack_name=None,
        s3=s3,
        s3_key_path='adf-build')
    cloudformation.create_stack()

    return event
def worker_thread(template_path, name, s3):
    s3_object_path = upload_pipeline(template_path, name, s3)
    cloudformation = CloudFormation(
        region=DEPLOYMENT_ACCOUNT_REGION,
        deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
        role=boto3,
        template_url=s3_object_path,
        parameters=[],
        wait=True,
        stack_name=f"{ADF_PIPELINE_PREFIX}{name}",
        s3=None,
        s3_key_path=None,
        account_id=DEPLOYMENT_ACCOUNT_ID)
    cloudformation.create_stack()
Esempio n. 18
0
def setup(args):
    """
    Similar to config but creates AWS resources using cfn template
    and based on the cfn stack output, creates the specs file for the user,
    then writes petctl config.
    """
    region = args.region
    s3_bucket_name = args.s3_bucket
    efs_id = args.efs_id
    os.makedirs(PETCTL_DIR, exist_ok=True)
    session = auth.get_session(region)
    cfn = CloudFormation(session)
    cfn.create_specs_file(SPECS_FILE, s3_bucket_name, efs_id)
    write_config_file(region, SPECS_FILE)
    log.info(f"Setup complete. petctl config file: {PETCTL_CONFIG_FILE}")
Esempio n. 19
0
def lambda_handler(event, _):
    """Main Lambda Entry point
    """
    sts = STS()
    account_id = event.get('account_id')
    partition = get_partition(REGION_DEFAULT)
    cross_account_access_role = event.get('cross_account_access_role')

    role = sts.assume_cross_account_role(
        f'arn:{partition}:iam::{account_id}:role/{cross_account_access_role}',
        'master')

    s3 = S3(REGION_DEFAULT, S3_BUCKET)

    for region in list(
            set([event['deployment_account_region']] + event['regions'])):

        cloudformation = CloudFormation(
            region=region,
            deployment_account_region=event['deployment_account_region'],
            role=role,
            wait=False,
            stack_name=None,
            s3=s3,
            s3_key_path=event['ou_name'],
            account_id=account_id)

        status = cloudformation.get_stack_status()

        if status in ('CREATE_IN_PROGRESS', 'UPDATE_IN_PROGRESS'):
            raise RetryError(f"CloudFormation Stack status: {status}")

        if status in ('CREATE_FAILED', 'ROLLBACK_FAILED', 'DELETE_FAILED',
                      'UPDATE_ROLLBACK_FAILED', 'ROLLBACK_IN_PROGRESS',
                      'ROLLBACK_COMPLETE'):
            raise Exception(
                f"Account Bootstrap Failed - Account: {account_id} "
                f"Region: {region} Status: {status}")

        if event.get('is_deployment_account'):
            update_deployment_account_output_parameters(
                deployment_account_region=event['deployment_account_region'],
                region=region,
                deployment_account_role=role,
                cloudformation=cloudformation)

    return event
def global_cls():
    return CloudFormation(region='us-east-1',
                          deployment_account_region='us-east-1',
                          role=boto3,
                          wait=False,
                          stack_name=None,
                          template_url='https://some/path/global.yml',
                          s3=None,
                          s3_key_path='/some/location')
def regional_cls():
    return CloudFormation(region='eu-central-1',
                          deployment_account_region='us-east-1',
                          role=boto3,
                          wait=False,
                          stack_name='some_stack',
                          template_url='https://some/path/regional.yml',
                          s3=None,
                          s3_key_path=None)
def lambda_handler(event, _):
    """Main Lambda Entry point
    """
    sts = STS(boto3)

    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}'.format(
            event.get('account_id'),
            event.get('cross_account_iam_role'),
        ), 'master')

    s3 = S3(REGION_DEFAULT, boto3, S3_BUCKET)

    for region in list(
            set([event.get('deployment_account_region')] +
                event.get("regions"))):

        cloudformation = CloudFormation(
            region=region,
            deployment_account_region=event.get('deployment_account_region'),
            role=role,
            wait=False,
            stack_name=None,
            s3=s3,
            s3_key_path=event.get('ou_name'),
            file_path=None,
        )

        status = cloudformation.get_stack_status()

        if status in ("CREATE_IN_PROGRESS", "UPDATE_IN_PROGRESS"):
            raise RetryError("Cloudformation Stack not yet complete")

        # TODO Better waiting validation to ensure stack is not failed
        if event.get('is_deployment_account'):
            update_deployment_account_output_parameters(
                deployment_account_region=event.get(
                    'deployment_account_region'),
                region=region,
                deployment_account_role=role,
                cloudformation=cloudformation)

    return event
Esempio n. 23
0
 def create_update(self):
     s3_object_path = s3.put_object(
         "adf-build/templates/events.yml",
         "{0}/templates/events.yml".format(TARGET_DIR)
     )
     cloudformation = CloudFormation(
         region=SOURCE_ACCOUNT_REGION,
         deployment_account_region=SOURCE_ACCOUNT_REGION,
         role=self.role,
         template_url=s3_object_path,
         parameters=[],
         wait=True,
         stack_name=self.stack_name,
         s3=None,
         s3_key_path=None,
         account_id=DEPLOYMENT_ACCOUNT_ID,
     )
     LOGGER.info('Ensuring Stack State for Event Rule forwarding from %s to %s', self.source_account_id, DEPLOYMENT_ACCOUNT_ID)
     cloudformation.create_stack()
def regional_cls():
    return CloudFormation(
        region='eu-central-1',
        deployment_account_region='us-east-1',
        role=boto3,
        wait=False,
        stack_name='some_stack',
        s3=None,
        s3_key_path=None,
        file_path=None,
    )
def global_cls():
    return CloudFormation(
        region='us-east-1',
        deployment_account_region='us-east-1',
        role=boto3,
        wait=False,
        stack_name=None,
        s3=None,
        s3_key_path='/some/location',
        file_path=None,
    )
Esempio n. 26
0
def worker_thread(account_id, sts, config, s3, cache):
    """
    The Worker thread function that is created for each account
    in which CloudFormation create_stack is called
    """
    LOGGER.info("Starting new worker thread for %s", account_id)

    organizations = Organizations(boto3, account_id)
    ou_id = organizations.get_parent_info().get("ou_parent_id")

    if is_account_invalid_state(ou_id, config.config):
        LOGGER.info("%s is in an invalid state", account_id)
        return

    account_path = organizations.build_account_path(
        ou_id,
        [],  # Initial empty array to hold OU Path,
        cache)
    LOGGER.info("The Account path for %s is %s", account_id, account_path)

    try:
        role = ensure_generic_account_can_be_setup(sts, config, account_id)

        # Regional base stacks can be updated after global
        for region in list(
                set([config.deployment_account_region] +
                    config.target_regions)):
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=config.deployment_account_region,
                role=role,
                wait=True,
                stack_name=None,
                s3=s3,
                s3_key_path=account_path)

            cloudformation.create_stack()

    except GenericAccountConfigureError as generic_account_error:
        LOGGER.info(generic_account_error)
        return
def worker_thread(sts, region, account_id, role, event):
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}'.format(account_id, role),
        'remove_base')

    parameter_store = ParameterStore(region, role)
    parameters = [param['Name'] for param in parameter_store.client.describe_parameters()['Parameters'] if 'Used by The AWS Deployment Framework' in param['Description']]
    for parameter in parameters:
        parameter_store.delete_parameter(parameter)

    cloudformation = CloudFormation(
        region=region,
        deployment_account_region=event.get('deployment_account_region'),
        role=role,
        wait=True,
        stack_name=None,
        s3=None,
        s3_key_path=None,
        account_id=account_id
    )
    return cloudformation.delete_all_base_stacks()
Esempio n. 28
0
def lambda_handler(event, _):
    sts = STS()

    account_id = event["account_id"]
    cross_account_access_role = event["cross_account_access_role"]
    role_arn = f'arn:{PARTITION}:iam::{account_id}:role/{cross_account_access_role}'

    role = sts.assume_cross_account_role(role_arn=role_arn,
                                         role_session_name='management_lambda')

    if event['is_deployment_account']:
        configure_master_account_parameters(event)
        configure_deployment_account_parameters(event, role)

    s3 = S3(region=REGION_DEFAULT, bucket=S3_BUCKET)

    for region in list(
            set([event["deployment_account_region"]] + event["regions"])):
        if not event["is_deployment_account"]:
            configure_generic_account(sts, event, region, role)
        cloudformation = CloudFormation(
            region=region,
            deployment_account_region=event["deployment_account_region"],
            role=role,
            wait=True,
            # Stack name will be automatically defined based on event
            stack_name=None,
            s3=s3,
            s3_key_path=event["full_path"],
            account_id=account_id)
        if is_inter_ou_account_move(event):
            cloudformation.delete_all_base_stacks(True)  # override Wait
        cloudformation.create_stack()
        if region == event["deployment_account_region"]:
            cloudformation.create_iam_stack()

    return event
Esempio n. 29
0
def main():
    config = Config()
    config.store_config()

    try:
        parameter_store = ParameterStore(REGION_DEFAULT, boto3)
        deployment_account_id = parameter_store.fetch_parameter(
            'deployment_account_id')

        organizations = Organizations(boto3, deployment_account_id)

        sts = STS(boto3)
        deployment_account_role = prepare_deployment_account(
            sts=sts,
            deployment_account_id=deployment_account_id,
            config=config)

        cache = Cache()
        ou_id = organizations.get_parent_info().get("ou_parent_id")
        account_path = organizations.build_account_path(ou_id=ou_id,
                                                        account_path=[],
                                                        cache=cache)
        s3 = S3(REGION_DEFAULT, boto3, S3_BUCKET_NAME)

        # First Setup the Deployment Account in all regions (KMS Key and S3 Bucket + Parameter Store values)
        for region in list(
                set([config.deployment_account_region] +
                    config.target_regions)):
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=config.deployment_account_region,
                role=deployment_account_role,
                wait=True,
                stack_name=None,
                s3=s3,
                s3_key_path=account_path)

            cloudformation.create_stack()

            update_deployment_account_output_parameters(
                deployment_account_region=config.deployment_account_region,
                region=region,
                deployment_account_role=deployment_account_role,
                cloudformation=cloudformation)

        threads = []
        account_ids = organizations.get_account_ids()
        for account_id in account_ids:
            t = PropagatingThread(target=worker_thread,
                                  args=(account_id, sts, config, s3, cache))
            t.start()
            threads.append(t)

        for thread in threads:
            thread.join()

        step_functions = StepFunctions(
            role=deployment_account_role,
            deployment_account_id=deployment_account_id,
            deployment_account_region=config.deployment_account_region,
            regions=config.target_regions,
            account_ids=[
                i for i in account_ids if i != config.deployment_account_region
            ],
            update_pipelines_only=1)

        step_functions.execute_statemachine()
    except ParameterNotFoundError:
        LOGGER.info("Deployment Account has not yet been Bootstrapped.")
        return
Esempio n. 30
0
def main():  # pylint: disable=R0915
    LOGGER.info("ADF Version %s", ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    policies = OrganizationPolicy()
    config = Config()
    config.store_config()

    try:
        parameter_store = ParameterStore(REGION_DEFAULT, boto3)
        deployment_account_id = parameter_store.fetch_parameter(
            'deployment_account_id')
        organizations = Organizations(role=boto3,
                                      account_id=deployment_account_id)
        policies.apply(organizations, parameter_store, config.config)
        sts = STS()
        deployment_account_role = prepare_deployment_account(
            sts=sts,
            deployment_account_id=deployment_account_id,
            config=config)

        cache = Cache()
        ou_id = organizations.get_parent_info().get("ou_parent_id")
        account_path = organizations.build_account_path(ou_id=ou_id,
                                                        account_path=[],
                                                        cache=cache)
        s3 = S3(region=REGION_DEFAULT, bucket=S3_BUCKET_NAME)

        kms_and_bucket_dict = {}
        # First Setup/Update the Deployment Account in all regions (KMS Key and
        # S3 Bucket + Parameter Store values)
        for region in list(
                set([config.deployment_account_region] +
                    config.target_regions)):
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=config.deployment_account_region,
                role=deployment_account_role,
                wait=True,
                stack_name=None,
                s3=s3,
                s3_key_path="adf-bootstrap/" + account_path,
                account_id=deployment_account_id)
            cloudformation.create_stack()
            update_deployment_account_output_parameters(
                deployment_account_region=config.deployment_account_region,
                region=region,
                kms_and_bucket_dict=kms_and_bucket_dict,
                deployment_account_role=deployment_account_role,
                cloudformation=cloudformation)
            if region == config.deployment_account_region:
                cloudformation.create_iam_stack()

        # Updating the stack on the master account in deployment region
        cloudformation = CloudFormation(
            region=config.deployment_account_region,
            deployment_account_region=config.deployment_account_region,
            role=boto3,
            wait=True,
            stack_name=None,
            s3=s3,
            s3_key_path='adf-build',
            account_id=ACCOUNT_ID)
        cloudformation.create_stack()
        threads = []
        account_ids = [
            account_id["Id"] for account_id in organizations.get_accounts()
        ]
        for account_id in [
                account for account in account_ids
                if account != deployment_account_id
        ]:
            thread = PropagatingThread(target=worker_thread,
                                       args=(account_id, sts, config, s3,
                                             cache, kms_and_bucket_dict))
            thread.start()
            threads.append(thread)

        for thread in threads:
            thread.join()

        LOGGER.info("Executing Step Function on Deployment Account")
        step_functions = StepFunctions(
            role=deployment_account_role,
            deployment_account_id=deployment_account_id,
            deployment_account_region=config.deployment_account_region,
            regions=config.target_regions,
            account_ids=account_ids,
            update_pipelines_only=0)

        step_functions.execute_statemachine()
    except ParameterNotFoundError:
        LOGGER.info(
            'A Deployment Account is ready to be bootstrapped! '
            'The Account provisioner will now kick into action, '
            'be sure to check out its progress in AWS Step Functions in this account.'
        )
        return