Пример #1
0
def main():
    LOGGER.info('ADF Version %s', ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    deployment_map = DeploymentMap(parameter_store, ADF_PIPELINE_PREFIX)
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, S3_BUCKET_NAME)
    sts = STS()
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-readonly'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')),
        'pipeline')

    organizations = Organizations(role)
    clean(parameter_store, deployment_map)

    try:
        auto_create_repositories = parameter_store.fetch_parameter(
            'auto_create_repositories')
    except ParameterNotFoundError:
        auto_create_repositories = 'enabled'

    threads = []
    for p in deployment_map.map_contents.get('pipelines'):
        thread = PropagatingThread(target=worker_thread,
                                   args=(p, organizations,
                                         auto_create_repositories, s3,
                                         deployment_map, parameter_store))
        thread.start()
        threads.append(thread)

    for thread in threads:
        thread.join()
def configure_generic_account(sts, event, region, role):
    """
    Fetches the kms_arn from the deployment account main region
    and adds the it plus the deployment_account_id parameter to the
    target account so it can be consumed in CloudFormation. These
    are required for the global.yml in all target accounts.
    """
    try:
        deployment_account_role = sts.assume_cross_account_role(
            'arn:aws:iam::{0}:role/{1}'.format(
                event['deployment_account_id'],
                event['cross_account_access_role']), 'configure_generic')
        parameter_store_deployment_account = ParameterStore(
            event['deployment_account_region'], deployment_account_role)
        parameter_store_target_account = ParameterStore(region, role)
        kms_arn = parameter_store_deployment_account.fetch_parameter(
            '/cross_region/kms_arn/{0}'.format(region))
        bucket_name = parameter_store_deployment_account.fetch_parameter(
            '/cross_region/s3_regional_bucket/{0}'.format(region))
    except (ClientError, ParameterNotFoundError):
        raise GenericAccountConfigureError(
            'Account {0} cannot yet be bootstrapped '
            'as the Deployment Account has not yet been bootstrapped. '
            'Have you moved your Deployment account into the deployment OU?'.
            format(event['account_id'])) from None
    parameter_store_target_account.put_parameter('kms_arn', kms_arn)
    parameter_store_target_account.put_parameter('bucket_name', bucket_name)
    parameter_store_target_account.put_parameter(
        'deployment_account_id', event['deployment_account_id'])
def lambda_handler(event, _):
    # Return if we need to update the pipelines only
    if event.get('update_only'):
        LOGGER.info('Will only update pipelines for this execution.')
        return event
    try:
        sts = STS(boto3)
        parameter_store = ParameterStore(
            event.get('deployment_account_region'), boto3)
        for region in list(
                set([event.get('deployment_account_region')] +
                    event.get("regions"))):
            kms_key_arn = parameter_store.fetch_parameter(
                "/cross_region/kms_arn/{0}".format(region))
            s3_bucket = parameter_store.fetch_parameter(
                "/cross_region/s3_regional_bucket/{0}".format(region))
            for account_id in event.get('account_ids'):
                try:
                    role = sts.assume_cross_account_role(
                        'arn:aws:iam::{0}:role/{1}'.format(
                            account_id, 'adf-cloudformation-deployment-role'),
                        'base_cfn_role')
                    IAMUpdater(kms_key_arn, s3_bucket, role)
                    kms = KMS(region, boto3, kms_key_arn, account_id)
                    kms.enable_cross_account_access()
                except ClientError:
                    continue
        return event
    except BaseException as error:
        LOGGER.exception(error)
Пример #4
0
    def _cfn_param_updater(self, param, comparison_parameters,
                           stage_parameters):
        """
        Generic CFN Updater method
        """
        for key, value in comparison_parameters[param].items():
            if str(value).startswith('resolve:'):
                if str(value).count(':') > 1:
                    regional_client = ParameterStore(
                        value.split(':')[1], boto3)
                    stage_parameters[param][
                        key] = regional_client.fetch_parameter(
                            value.split(':')[2])
                    continue
                stage_parameters[param][
                    key] = self.parameter_store.fetch_parameter(
                        value.split('resolve:')[1])
            if key not in stage_parameters[param]:
                stage_parameters[param][key] = comparison_parameters[param][
                    key]

        for key, value in stage_parameters[param].items():
            if str(value).startswith('resolve:'):
                if str(value).count(':') > 1:
                    regional_client = ParameterStore(
                        value.split(':')[1], boto3)
                    stage_parameters[param][
                        key] = regional_client.fetch_parameter(
                            value.split(':')[2])
                    continue
                stage_parameters[param][
                    key] = self.parameter_store.fetch_parameter(
                        value.split('resolve:')[1])

        return stage_parameters
Пример #5
0
    def _sc_param_updater(self, comparison_parameters, stage_parameters):
        """
        Compares parameter files used for the Service Catalog deployment type
        """
        for key, value in comparison_parameters.items():
            if str(value).startswith('resolve:'):
                if str(value).count(':') > 1:
                    regional_client = ParameterStore(
                        value.split(':')[1], boto3)
                    stage_parameters[key] = regional_client.fetch_parameter(
                        value.split(':')[2])
                    continue
                stage_parameters[key] = self.parameter_store.fetch_parameter(
                    value.split('resolve:')[1])

            if key not in stage_parameters:
                stage_parameters[key] = comparison_parameters[key]

        for key, value in stage_parameters.items():
            if str(value).startswith('resolve:'):
                if str(value).count(':') > 1:
                    regional_client = ParameterStore(
                        value.split(':')[1], boto3)
                    stage_parameters[key] = regional_client.fetch_parameter(
                        value.split(':')[2])
                    continue
                stage_parameters[key] = self.parameter_store.fetch_parameter(
                    value.split('resolve:')[1])

        return stage_parameters
Пример #6
0
def fetch_required_ssm_params(regions):
    output = {}
    for region in regions:
        parameter_store = ParameterStore(region, boto3)
        output[region] = {
            "s3": parameter_store.fetch_parameter('/cross_region/s3_regional_bucket/{0}'.format(region)),
            "kms": parameter_store.fetch_parameter('/cross_region/kms_arn/{0}'.format(region))
        }
        if region == DEPLOYMENT_ACCOUNT_REGION:
            output[region]["modules"] = parameter_store.fetch_parameter('deployment_account_bucket')
    return output
def main():
    LOGGER.info('ADF Version %s', ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    parameter_store = ParameterStore(
        DEPLOYMENT_ACCOUNT_REGION,
        boto3
    )
    deployment_map = DeploymentMap(
        parameter_store,
        ADF_PIPELINE_PREFIX
    )
    s3 = S3(
        DEPLOYMENT_ACCOUNT_REGION,
        S3_BUCKET_NAME
    )
    sts = STS()
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-readonly'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')
        ), 'pipeline'
    )

    organizations = Organizations(role)
    clean(parameter_store, deployment_map)

    try:
        auto_create_repositories = parameter_store.fetch_parameter('auto_create_repositories')
    except ParameterNotFoundError:
        auto_create_repositories = 'enabled'

    threads = []
    for counter, p in enumerate(deployment_map.map_contents.get('pipelines')):
        thread = PropagatingThread(target=worker_thread, args=(
            p,
            organizations,
            auto_create_repositories,
            s3,
            deployment_map,
            parameter_store
        ))
        thread.start()
        threads.append(thread)
        _batcher = counter % 10
        if _batcher == 9: # 9 meaning we have hit a set of 10 threads since n % 10
            _interval = random.randint(5, 11)
            LOGGER.debug('Waiting for %s seconds before starting next batch of 10 threads.', _interval)
            time.sleep(_interval)

    for thread in threads:
        thread.join()
Пример #8
0
def main():
    LOGGER.info('ADF Version %s', ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    _create_inputs_folder()
    parameter_store = ParameterStore(
        DEPLOYMENT_ACCOUNT_REGION,
        boto3
    )
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, SHARED_MODULES_BUCKET)
    deployment_map = DeploymentMap(
        parameter_store,
        s3,
        ADF_PIPELINE_PREFIX
    )
    sts = STS()
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-readonly'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')
        ), 'pipeline'
    )
    organizations = Organizations(role)
    clean(parameter_store, deployment_map)
    ensure_event_bus_status(ORGANIZATION_ID)
    try:
        auto_create_repositories = parameter_store.fetch_parameter('auto_create_repositories')
    except ParameterNotFoundError:
        auto_create_repositories = 'enabled'
    threads = []
    _cache = Cache()
    for p in deployment_map.map_contents.get('pipelines', []):
        _source_account_id = p.get('default_providers', {}).get('source', {}).get('properties', {}).get('account_id', {})
        if _source_account_id and int(_source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) and not _cache.check(_source_account_id):
            rule = Rule(p['default_providers']['source']['properties']['account_id'])
            rule.create_update()
            _cache.add(p['default_providers']['source']['properties']['account_id'], True)
        thread = PropagatingThread(target=worker_thread, args=(
            p,
            organizations,
            auto_create_repositories,
            deployment_map,
            parameter_store
        ))
        thread.start()
        threads.append(thread)

    for thread in threads:
        thread.join()
Пример #9
0
def lambda_handler(event, _):
    message = extract_message(event)
    pipeline = extract_pipeline(message)
    parameter_store = ParameterStore(os.environ["AWS_REGION"], boto3)
    secrets_manager = boto3.client(
        "secretsmanager", region_name=os.environ["AWS_REGION"]
    )
    channel = parameter_store.fetch_parameter(
        name="/notification_endpoint/{0}".format(pipeline["name"]),
        with_decryption=False,
    )
    # All slack url"s must be stored in /adf/slack/channel_name since ADF only has access to the /adf/ prefix by default
    url = secrets_manager.get_secret_value(
        SecretId="/notification_endpoint/hooks/slack/{0}".format(channel)
    )["SecretString"]

    if is_approval(message):
        send_message(url[channel], create_approval(channel, message))
        return
    if is_bootstrap(event):
        send_message(url[channel], create_bootstrap_message_text(channel, message))
        return

    if is_text(message):
        send_message(url, create_bootstrap_message_text(channel, message))
        return

    send_message(url[channel], create_pipeline_message_text(channel, pipeline))
    return
def main():
    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    deployment_map = DeploymentMap(parameter_store,
                                   os.environ["ADF_PIPELINE_PREFIX"])
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, boto3, S3_BUCKET_NAME)
    sts = STS(boto3)
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-org-access-adf'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')),
        'pipeline')

    organizations = Organizations(role)
    clean(parameter_store, deployment_map)

    for p in deployment_map.map_contents.get('pipelines'):
        pipeline = Pipeline(p)

        for target in p['targets']:
            target_structure = TargetStructure(target)
            for step in target_structure.target:
                for path in step.get('path'):
                    try:
                        regions = step.get(
                            'regions',
                            p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                        pipeline.stage_regions.append(regions)
                        pipeline_target = Target(path, regions,
                                                 target_structure,
                                                 organizations)
                        pipeline_target.fetch_accounts_for_target()
                    except BaseException:
                        raise Exception(
                            "Failed to return accounts for {0}".format(path))

            pipeline.template_dictionary["targets"].append(
                target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

        parameters = pipeline.generate_parameters()
        pipeline.generate()
        deployment_map.update_deployment_parameters(pipeline)
        s3_object_path = upload_if_required(s3, pipeline)

        store_regional_parameter_config(pipeline, parameter_store)
        cloudformation = CloudFormation(
            region=DEPLOYMENT_ACCOUNT_REGION,
            deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
            role=boto3,
            template_url=s3_object_path,
            parameters=parameters,
            wait=True,
            stack_name="{0}-{1}".format(os.environ["ADF_PIPELINE_PREFIX"],
                                        pipeline.name),
            s3=None,
            s3_key_path=None)

        cloudformation.create_stack()
Пример #11
0
def lambda_handler(event, _):
    target_role_policies = {
        'adf-cloudformation-deployment-role': 'adf-cloudformation-deployment-role-policy-kms',
        'adf-cloudformation-role': 'adf-cloudformation-role-policy'
    }

    role_policies = {
        'adf-codepipeline-role': 'adf-codepipeline-role-policy',
        'adf-cloudformation-deployment-role': 'adf-cloudformation-deployment-role-policy',
        'adf-cloudformation-role': 'adf-cloudformation-role-policy'
    }

    sts = STS()
    partition = get_partition(REGION_DEFAULT)

    parameter_store = ParameterStore(
        region=event.get('deployment_account_region'),
        role=boto3
    )
    account_id = event.get("account_id")
    kms_key_arns = []
    s3_buckets = []
    for region in list(set([event.get('deployment_account_region')] + event.get("regions", []))):
        kms_key_arn = parameter_store.fetch_parameter(
            f"/cross_region/kms_arn/{region}"
        )
        kms_key_arns.append(kms_key_arn)
        s3_bucket = parameter_store.fetch_parameter(
            f"/cross_region/s3_regional_bucket/{region}"
        )
        s3_buckets.append(s3_bucket)
        try:
            role = sts.assume_cross_account_role(
                f'arn:{partition}:iam::{account_id}:role/adf-cloudformation-deployment-role',
                'base_cfn_role'
            )
            LOGGER.debug("Role has been assumed for %s", account_id)
            update_iam(role, s3_bucket, kms_key_arn, target_role_policies)
        except ClientError as err:
            LOGGER.debug("%s could not be assumed (%s), continuing", account_id, err, exc_info=True)
            continue

    update_iam(boto3, s3_buckets, kms_key_arns, role_policies)

    return event
Пример #12
0
def main():  #pylint: disable=R0915
    LOGGER.info('ADF Version %s', ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    deployment_map = DeploymentMap(parameter_store, ADF_PIPELINE_PREFIX)
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, S3_BUCKET_NAME)
    sts = STS()
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-readonly'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')),
        'pipeline')

    organizations = Organizations(role)
    clean(parameter_store, deployment_map)

    for p in deployment_map.map_contents.get('pipelines'):
        pipeline = Pipeline(p)

        for target in p.get('targets', []):
            target_structure = TargetStructure(target)
            for step in target_structure.target:
                for path in step.get('path'):
                    regions = step.get(
                        'regions', p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                    step_name = step.get('name')
                    params = step.get('params', {})
                    pipeline.stage_regions.append(regions)
                    pipeline_target = Target(path, regions, target_structure,
                                             organizations, step_name, params)
                    pipeline_target.fetch_accounts_for_target()

            pipeline.template_dictionary["targets"].append(
                target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

        parameters = pipeline.generate_parameters()
        pipeline.generate()
        deployment_map.update_deployment_parameters(pipeline)
        s3_object_path = upload_pipeline(s3, pipeline)

        store_regional_parameter_config(pipeline, parameter_store)
        cloudformation = CloudFormation(
            region=DEPLOYMENT_ACCOUNT_REGION,
            deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
            role=boto3,
            template_url=s3_object_path,
            parameters=parameters,
            wait=True,
            stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name),
            s3=None,
            s3_key_path=None,
            account_id=DEPLOYMENT_ACCOUNT_ID)
        cloudformation.create_stack()
Пример #13
0
def lambda_handler(event, _):
    target_role_policies = {
        'adf-cloudformation-deployment-role':
        'adf-cloudformation-deployment-role-policy-kms',
        'adf-cloudformation-role': 'adf-cloudformation-role-policy'
    }

    role_policies = {
        'adf-codepipeline-role': 'adf-codepipeline-role-policy',
        'adf-cloudformation-deployment-role':
        'adf-cloudformation-deployment-role-policy',
        'adf-cloudformation-role': 'adf-cloudformation-role-policy'
    }

    sts = STS()
    parameter_store = ParameterStore(
        region=event.get('deployment_account_region'), role=boto3)
    for region in list(
            set([event.get('deployment_account_region')] +
                event.get("regions", []))):
        kms_key_arn = parameter_store.fetch_parameter(
            "/cross_region/kms_arn/{0}".format(region))
        s3_bucket = parameter_store.fetch_parameter(
            "/cross_region/s3_regional_bucket/{0}".format(region))
        update_iam(boto3, s3_bucket, kms_key_arn, role_policies)
        for account_id in event.get('account_ids'):
            try:
                role = sts.assume_cross_account_role(
                    'arn:aws:iam::{0}:role/{1}'.format(
                        account_id, 'adf-cloudformation-deployment-role'),
                    'base_cfn_role')
                LOGGER.debug("Role has been assumed for %s", account_id)
                update_iam(role, s3_bucket, kms_key_arn, target_role_policies)
            except ClientError as err:
                LOGGER.debug("%s could not be assumed (%s), continuing",
                             account_id,
                             err,
                             exc_info=True)
                continue

    return event
Пример #14
0
 def fetch_parameter_store_value(self, value, key, param=None):
     if str(value).count(':') > 1:
         [_, region, value] = value.split(':')
         regional_client = ParameterStore(region, boto3)
         LOGGER.info("Fetching Parameter from %s", value)
         if param:
             self.stage_parameters[param][
                 key] = regional_client.fetch_parameter(value)
         else:
             self.stage_parameters[key] = regional_client.fetch_parameter(
                 value)
         return True
     [_, value] = value.split(':')
     LOGGER.info("Fetching Parameter from %s", value)
     if param:
         self.stage_parameters[param][
             key] = self.parameter_store.fetch_parameter(value)
     else:
         self.stage_parameters[key] = self.parameter_store.fetch_parameter(
             value)
     return False
Пример #15
0
def lambda_handler(event, _):
    message = extract_message(event)
    pipeline = extract_pipeline(message)
    parameter_store = ParameterStore(os.environ["AWS_REGION"], boto3)
    channel = parameter_store.fetch_parameter(
        name='/notification_endpoint/{0}'.format(pipeline["name"]),
        with_decryption=False)
    url = parameter_store.fetch_parameter(
        name='/notification_endpoint/hooks/slack/{0}'.format(channel),
        with_decryption=True)

    if is_approval(message):
        send_message(url, create_approval(channel, message))
        return

    if is_bootstrap(event):
        send_message(url, create_bootstrap_message_text(channel, message))
        return

    send_message(url, create_pipeline_message_text(channel, pipeline))
    return
def lambda_handler(event, _):
    parameter_store = ParameterStore(REGION_DEFAULT, boto3)
    configuration_options = ast.literal_eval(
        parameter_store.fetch_parameter('config'))

    to_root_option = list(
        filter(lambda option: option.get("name", []) == "to-root",
               configuration_options.get('moves')))

    action = to_root_option.pop().get('action')
    account_id = event.get('account_id')
    execute_move_action(action, account_id, parameter_store, event)

    return event
Пример #17
0
def lambda_handler(event, _):
    sts = STS(boto3)
    parameter_store = ParameterStore(event.get('deployment_account_region'),
                                     boto3)
    for region in list(
            set([event.get('deployment_account_region')] +
                event.get("regions"))):
        kms_key_arn = parameter_store.fetch_parameter(
            "/cross_region/kms_arn/{0}".format(region))
        s3_bucket = parameter_store.fetch_parameter(
            "/cross_region/s3_regional_bucket/{0}".format(region))
        for account_id in event.get('account_ids'):
            try:
                role = sts.assume_cross_account_role(
                    'arn:aws:iam::{0}:role/{1}'.format(
                        account_id, 'adf-cloudformation-deployment-role'),
                    'base_cfn_role')
                IAMUpdater(kms_key_arn, s3_bucket, role)
                kms = KMS(region, boto3, kms_key_arn, account_id)
                kms.enable_cross_account_access()
            except ClientError:
                continue

    return event
Пример #18
0
def main():
    accounts = read_config_files(ACCOUNTS_FOLDER)
    if not bool(accounts):
        LOGGER.info(f"Found {len(accounts)} account(s) in configuration file(s). Account provisioning will not continue.")
        return
    LOGGER.info(f"Found {len(accounts)} account(s) in configuration file(s).")
    organizations = Organizations(boto3)
    all_accounts = organizations.get_accounts()
    parameter_store = ParameterStore(os.environ.get('AWS_REGION', 'us-east-1'), boto3)
    adf_role_name = parameter_store.fetch_parameter('cross_account_access_role')
    for account in accounts:
        try:
            account_id = next(acc["Id"] for acc in all_accounts if acc["Name"] == account.full_name)
        except StopIteration: # If the account does not exist yet..
            account_id = None
        create_or_update_account(organizations, account, adf_role_name, account_id)
def configure_generic_account(sts, event, region, role):
    """
    Fetches the kms_arn from the deployment account main region
    and adds the it plus the deployment_account_id parameter to the
    target account so it can be consumed in CloudFormation. These
    are required for the global.yml in all target accounts.
    """
    deployment_account_role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}'.format(event['deployment_account_id'],
                                           event['cross_account_access_role']),
        'configure_generic')
    parameter_store_deployment_account = ParameterStore(
        event['deployment_account_region'], deployment_account_role)
    parameter_store_target_account = ParameterStore(region, role)
    kms_arn = parameter_store_deployment_account.fetch_parameter(
        '/cross_region/kms_arn/{0}'.format(region))
    parameter_store_target_account.put_parameter('kms_arn', kms_arn)
    parameter_store_target_account.put_parameter(
        'deployment_account_id', event['deployment_account_id'])
Пример #20
0
 def fetch_parameter_store_value(self, value, key, optional=False):  # pylint: disable=too-many-statements
     if self._is_optional(value):
         LOGGER.info("Parameter %s is considered optional", value)
         optional = True
     if str(value).count(':') > 1:
         [_, region, value] = value.split(':')
     else:
         [_, value] = value.split(':')
         region = DEFAULT_REGION
     value = value[:-1] if optional else value
     client = ParameterStore(region, boto3)
     try:
         parameter = self.cache.check('{0}/{1}'.format(
             region, value)) or client.fetch_parameter(value)
     except ParameterNotFoundError:
         if optional:
             LOGGER.info("Parameter %s not found, returning empty string",
                         value)
             parameter = ""
         else:
             raise
     try:
         parent_key = list(
             Resolver.determine_parent_key(self.comparison_parameters,
                                           key))[0]
         if parameter:
             self.cache.add('{0}/{1}'.format(region, value), parameter)
             self.stage_parameters[parent_key][key] = parameter
     except IndexError as error:
         if parameter:
             if self.stage_parameters.get(key):
                 self.stage_parameters[key] = parameter
         else:
             LOGGER.error(
                 "Parameter was not found, unable to fetch it from parameter store"
             )
             raise Exception(
                 "Parameter was not found, unable to fetch it from parameter store"
             ) from error
     return True
Пример #21
0
def lambda_handler(event, _):
    message = extract_message(event)
    pipeline = extract_pipeline(message)
    parameter_store = ParameterStore(os.environ["AWS_REGION"], boto3)
    secrets_manager = boto3.client('secretsmanager',
                                   region_name=os.environ["AWS_REGION"])
    channel = parameter_store.fetch_parameter(
        name=f'/notification_endpoint/{pipeline["name"]}',
        with_decryption=False,
    )
    # All slack url's must be stored in /adf/slack/channel_name since ADF only
    # has access to the /adf/ prefix by default
    url = json.loads(
        secrets_manager.get_secret_value(
            SecretId=f'/adf/slack/{channel}')['SecretString'])
    if is_approval(message):
        send_message(url[channel], create_approval(channel, message))
        return
    if is_bootstrap(event):
        send_message(url[channel],
                     create_bootstrap_message_text(channel, message))
        return
    send_message(url[channel], create_pipeline_message_text(channel, pipeline))
    return
Пример #22
0
 def fetch_parameter_store_value(self,
                                 value,
                                 key,
                                 param=None,
                                 optional=False):  #pylint: disable=R0912, R0915
     if str(value).count(':') > 1:
         [_, region, value] = value.split(':')
         if value.endswith('?'):
             value = value[:-1]
             LOGGER.info("Parameter %s is considered optional", value)
             optional = True
         regional_client = ParameterStore(region, boto3)
         LOGGER.info("Fetching Parameter from %s", value)
         if param:
             try:
                 self.stage_parameters[param][
                     key] = regional_client.fetch_parameter(value)
             except ParameterNotFoundError:
                 if optional:
                     LOGGER.info(
                         "Parameter %s not found, returning empty string",
                         value)
                     self.stage_parameters[param][key] = ""
                 else:
                     raise
         else:
             try:
                 self.stage_parameters[
                     key] = regional_client.fetch_parameter(value)
             except ParameterNotFoundError:
                 if optional:
                     LOGGER.info(
                         "Parameter %s not found, returning empty string",
                         value)
                     self.stage_parameters[key] = ""
                 else:
                     raise
         return True
     [_, value] = value.split(':')
     if value.endswith('?'):
         value = value[:-1]
         LOGGER.info("Parameter %s is considered optional", value)
         optional = True
     LOGGER.info("Fetching Parameter from %s", value)
     regional_client = ParameterStore(DEFAULT_REGION, boto3)
     if param:
         try:
             self.stage_parameters[param][
                 key] = regional_client.fetch_parameter(value)
         except ParameterNotFoundError:
             if optional:
                 LOGGER.info(
                     "Parameter %s not found, returning empty string",
                     value)
                 self.stage_parameters[param][key] = ""
             else:
                 raise
     else:
         try:
             self.stage_parameters[key] = regional_client.fetch_parameter(
                 value)
         except ParameterNotFoundError:
             if optional:
                 LOGGER.info(
                     "Parameter %s not found, returning empty string",
                     value)
                 self.stage_parameters[key] = ""
             else:
                 raise
     return False
Пример #23
0
def main():
    config = Config()
    config.store_config()

    try:
        parameter_store = ParameterStore(REGION_DEFAULT, boto3)
        deployment_account_id = parameter_store.fetch_parameter(
            'deployment_account_id')

        organizations = Organizations(boto3, deployment_account_id)

        sts = STS(boto3)
        deployment_account_role = prepare_deployment_account(
            sts=sts,
            deployment_account_id=deployment_account_id,
            config=config)

        cache = Cache()
        ou_id = organizations.get_parent_info().get("ou_parent_id")
        account_path = organizations.build_account_path(ou_id=ou_id,
                                                        account_path=[],
                                                        cache=cache)
        s3 = S3(REGION_DEFAULT, boto3, S3_BUCKET_NAME)

        # First Setup the Deployment Account in all regions (KMS Key and S3 Bucket + Parameter Store values)
        for region in list(
                set([config.deployment_account_region] +
                    config.target_regions)):
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=config.deployment_account_region,
                role=deployment_account_role,
                wait=True,
                stack_name=None,
                s3=s3,
                s3_key_path=account_path)

            cloudformation.create_stack()

            update_deployment_account_output_parameters(
                deployment_account_region=config.deployment_account_region,
                region=region,
                deployment_account_role=deployment_account_role,
                cloudformation=cloudformation)

        threads = []
        account_ids = organizations.get_account_ids()
        for account_id in account_ids:
            t = PropagatingThread(target=worker_thread,
                                  args=(account_id, sts, config, s3, cache))
            t.start()
            threads.append(t)

        for thread in threads:
            thread.join()

        step_functions = StepFunctions(
            role=deployment_account_role,
            deployment_account_id=deployment_account_id,
            deployment_account_region=config.deployment_account_region,
            regions=config.target_regions,
            account_ids=[
                i for i in account_ids if i != config.deployment_account_region
            ],
            update_pipelines_only=1)

        step_functions.execute_statemachine()
    except ParameterNotFoundError:
        LOGGER.info("Deployment Account has not yet been Bootstrapped.")
        return
Пример #24
0
def main():  # pylint: disable=R0915
    LOGGER.info("ADF Version %s", ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    policies = OrganizationPolicy()
    config = Config()
    config.store_config()

    try:
        parameter_store = ParameterStore(REGION_DEFAULT, boto3)
        deployment_account_id = parameter_store.fetch_parameter(
            'deployment_account_id')
        organizations = Organizations(role=boto3,
                                      account_id=deployment_account_id)
        policies.apply(organizations, parameter_store, config.config)
        sts = STS()
        deployment_account_role = prepare_deployment_account(
            sts=sts,
            deployment_account_id=deployment_account_id,
            config=config)

        cache = Cache()
        ou_id = organizations.get_parent_info().get("ou_parent_id")
        account_path = organizations.build_account_path(ou_id=ou_id,
                                                        account_path=[],
                                                        cache=cache)
        s3 = S3(region=REGION_DEFAULT, bucket=S3_BUCKET_NAME)

        kms_and_bucket_dict = {}
        # First Setup/Update the Deployment Account in all regions (KMS Key and
        # S3 Bucket + Parameter Store values)
        for region in list(
                set([config.deployment_account_region] +
                    config.target_regions)):
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=config.deployment_account_region,
                role=deployment_account_role,
                wait=True,
                stack_name=None,
                s3=s3,
                s3_key_path="adf-bootstrap/" + account_path,
                account_id=deployment_account_id)
            cloudformation.create_stack()
            update_deployment_account_output_parameters(
                deployment_account_region=config.deployment_account_region,
                region=region,
                kms_and_bucket_dict=kms_and_bucket_dict,
                deployment_account_role=deployment_account_role,
                cloudformation=cloudformation)
            if region == config.deployment_account_region:
                cloudformation.create_iam_stack()

        # Updating the stack on the master account in deployment region
        cloudformation = CloudFormation(
            region=config.deployment_account_region,
            deployment_account_region=config.deployment_account_region,
            role=boto3,
            wait=True,
            stack_name=None,
            s3=s3,
            s3_key_path='adf-build',
            account_id=ACCOUNT_ID)
        cloudformation.create_stack()
        threads = []
        account_ids = [
            account_id["Id"] for account_id in organizations.get_accounts()
        ]
        for account_id in [
                account for account in account_ids
                if account != deployment_account_id
        ]:
            thread = PropagatingThread(target=worker_thread,
                                       args=(account_id, sts, config, s3,
                                             cache, kms_and_bucket_dict))
            thread.start()
            threads.append(thread)

        for thread in threads:
            thread.join()

        LOGGER.info("Executing Step Function on Deployment Account")
        step_functions = StepFunctions(
            role=deployment_account_role,
            deployment_account_id=deployment_account_id,
            deployment_account_region=config.deployment_account_region,
            regions=config.target_regions,
            account_ids=account_ids,
            update_pipelines_only=0)

        step_functions.execute_statemachine()
    except ParameterNotFoundError:
        LOGGER.info(
            'A Deployment Account is ready to be bootstrapped! '
            'The Account provisioner will now kick into action, '
            'be sure to check out its progress in AWS Step Functions in this account.'
        )
        return
Пример #25
0
def main(): #pylint: disable=R0915
    LOGGER.info("ADF Version %s", ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    scp = SCP()
    config = Config()
    config.store_config()

    try:
        parameter_store = ParameterStore(REGION_DEFAULT, boto3)
        deployment_account_id = parameter_store.fetch_parameter(
            'deployment_account_id'
        )
        organizations = Organizations(
            role=boto3,
            account_id=deployment_account_id
        )
        scp.apply(organizations, parameter_store, config.config)

        sts = STS()
        deployment_account_role = prepare_deployment_account(
            sts=sts,
            deployment_account_id=deployment_account_id,
            config=config
        )

        cache = Cache()
        ou_id = organizations.get_parent_info().get("ou_parent_id")
        account_path = organizations.build_account_path(
            ou_id=ou_id,
            account_path=[],
            cache=cache
        )
        s3 = S3(
            region=REGION_DEFAULT,
            bucket=S3_BUCKET_NAME
        )

        # Updating the stack on the master account in deployment region
        cloudformation = CloudFormation(
            region=config.deployment_account_region,
            deployment_account_region=config.deployment_account_region, # pylint: disable=R0801
            role=boto3,
            wait=True,
            stack_name=None,
            s3=s3,
            s3_key_path='adf-build',
            account_id=ACCOUNT_ID
        )
        cloudformation.create_stack()

        # First Setup/Update the Deployment Account in all regions (KMS Key and S3 Bucket + Parameter Store values)
        for region in list(set([config.deployment_account_region] + config.target_regions)):
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=config.deployment_account_region,
                role=deployment_account_role,
                wait=True,
                stack_name=None,
                s3=s3,
                s3_key_path=account_path,
                account_id=deployment_account_id
            )
            cloudformation.create_stack()

            update_deployment_account_output_parameters(
                deployment_account_region=config.deployment_account_region,
                region=region,
                deployment_account_role=deployment_account_role,
                cloudformation=cloudformation
            )

        threads = []
        account_ids = organizations.get_account_ids()
        for account_id in [account for account in account_ids if account != deployment_account_id]:
            thread = PropagatingThread(target=worker_thread, args=(
                account_id,
                sts,
                config,
                s3,
                cache
            ))
            thread.start()
            threads.append(thread)

        for thread in threads:
            thread.join()

        step_functions = StepFunctions(
            role=deployment_account_role,
            deployment_account_id=deployment_account_id,
            deployment_account_region=config.deployment_account_region,
            regions=config.target_regions,
            account_ids=account_ids,
            update_pipelines_only=0
        )

        step_functions.execute_statemachine()
    except ParameterNotFoundError:
        LOGGER.info(
            'You are now ready to bootstrap a deployment account '
            'by moving it into your deployment OU. '
            'Once you have moved it into the deployment OU, '
            'be sure to check out its progress in AWS Step Functions'
        )
        return
Пример #26
0
 def load_ssm(self):
     parameter_store = ParameterStore()
     resp = parameter_store.fetch_parameter(self.path)
     return resp
Пример #27
0
def main():
    try:
        parameter_store = ParameterStore(REGION_DEFAULT, boto3)
        deployment_account_id = parameter_store.fetch_parameter(
            'deployment_account_id')

        config = Config()
        config.store_config()

        if deployment_account_id is None:
            raise NotConfiguredError(
                "Deployment Account has not yet been configured")

        organizations = Organizations(boto3, deployment_account_id)

        sts = STS(boto3)
        ou_id = organizations.get_parent_info().get("ou_parent_id")

        deployment_account_role = ensure_deployment_account_configured(
            sts, config, ou_id, deployment_account_id)

        cache = Cache()
        account_path = organizations.build_account_path(
            ou_id,
            [],  # Initial empty array to hold OU Path
            cache)

        s3 = S3(REGION_DEFAULT, boto3, S3_BUCKET)

        # (First) Setup the Deployment Account in all regions (KMS Key and S3 Bucket + Parameter Store values)
        for region in list(
                set([config.deployment_account_region] +
                    config.target_regions)):
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=config.deployment_account_region,
                role=deployment_account_role,
                wait=True,
                stack_name=None,
                s3=s3,
                s3_key_path=account_path,
                file_path=None,
            )

            cloudformation.create_stack()

            update_deployment_account_output_parameters(
                deployment_account_region=config.deployment_account_region,
                region=region,
                deployment_account_role=deployment_account_role,
                cloudformation=cloudformation)

        threads = []
        account_ids = organizations.get_account_ids()
        for account_id in account_ids:
            t = PropagatingThread(target=worker_thread,
                                  args=(account_id, sts, config, s3, cache))
            t.start()
            threads.append(t)

        for thread in threads:
            thread.join()

        step_functions = StepFunctions(
            role=deployment_account_role,
            deployment_account_id=deployment_account_id,
            deployment_account_region=config.deployment_account_region,
            regions=config.target_regions,
            account_ids=[
                i for i in account_ids if i != config.deployment_account_region
            ],
            update_pipelines_only=False)

        step_functions.execute_statemachine()

    except NotConfiguredError as not_configured_error:
        LOGGER.info(not_configured_error)