def delete_vpc_peering_connection(self): try: ssm = SSM(self.logger) # read values from SSM Parameter Store requester_account_name = self.params.get('AccountName') requester_account_id = self.params.get('PeeringAccountID') requester_region = self.params.get('Region') self.logger.debug(requester_account_id) self.logger.info("Peering Account ID: {}".format(requester_account_id)) prefix = self.params.get('PeeringConnectionKeyPrefix') suffix = 'peering_connections' peer_connection_id_parameter_key = '{}/{}/{}'.format(prefix, suffix, requester_account_name) peer_connection_id = ssm.get_parameter(peer_connection_id_parameter_key) self.logger.debug(peer_connection_id) self.logger.info("Peering Connection ID: {}".format(peer_connection_id)) # instantiate EC2 session ec2_peer_requester = EC2(self.logger, requester_region, credentials=self.assume_role(requester_account_id)) # delete vpc peering connection id response = ec2_peer_requester.delete_vpc_peering_connection(peer_connection_id) self.logger.info("Peering Connection ID: {} deleted".format(peer_connection_id)) self.logger.debug(response) except Exception as e: message = {'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e)} self.logger.exception(message) raise
class Metrics(object): def __init__(self, logger): self.logger = logger self.ssm = SSM(logger) def _get_parameter_value(self, key): response = self.ssm.describe_parameters(key) self.logger.info(response) # get paramter if key exist if response: value = self.ssm.get_parameter(key) self.logger.info(value) else: value = 'ssm-param-key-not-found' return value.get('Parameter', {}).get('Value') # Send anonymous metrics def metrics(self, data, solution_id='SO0058', url='https://metrics.awssolutionsbuilder.com/generic'): try: send_metrics = self._get_parameter_value('/solutions/stno/metrics_flag') if send_metrics.lower() == 'yes': uuid = self._get_parameter_value('/solutions/stno/customer_uuid') time_stamp = {'TimeStamp': str(datetime.utcnow().isoformat())} params = {'Solution': solution_id, 'UUID': uuid, 'Data': data} metrics = dict(time_stamp, **params) json_data = json.dumps(metrics, cls=DecimalEncoder) headers = {'content-type': 'application/json'} r = requests.post(url, data=json_data, headers=headers) code = r.status_code return code except: pass
def create_secure_ssm_parameter(self): try: self.logger.info("Executing: " + self.__class__.__name__ + "/" + inspect.stack()[0][3]) ssm = SSM(self.logger) # put values from SSM Parameter Store as a secure string # if the key already exists the value will be overwritten self.logger.info( "Create/Update Secure SSM Parameter Key: {}".format( self.params.get('PSKey'))) response = ssm.put_parameter(self.params.get('PSKey'), self.params.get('PSValue'), self.params.get('PSDescription'), 'SecureString') self.logger.info(response) return response # return version number except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise
def get_parameter_values(self): ssm = SSM(self.logger) parameters = {} # read values from SSM Parameter Store for key_name in self.params.get('SSMParameterKeys'): value = ssm.get_parameter(key_name) parameters.update({key_name: value}) self.logger.info(parameters) return parameters
def put_ssm_parameter(self, key, value): try: ssm = SSM(self.logger) response = ssm.describe_parameters(key) self.logger.info(response) # put parameter if key does not exist if not response: ssm.put_parameter(key, value) except Exception as e: self.logger.info(e) pass
def create_vpc_peering_connection(self): try: ssm = SSM(self.logger) requester_account_id = self.params.get('PeeringAccountID') requester_vpc_id = self.params.get('PeeringVPCID') requester_region = self.params.get('PeeringRegion') accepter_region = self.params.get('Region') accepter_account_id = self.params.get('AccountID') accepter_vpc_id = self.params.get('VPCID') # instantiate EC2 sessions ec2_peer_requester = EC2(self.logger, requester_region, credentials=self.assume_role(requester_account_id)) ec2_peer_accepter = EC2(self.logger, accepter_region, credentials=self.assume_role(accepter_account_id)) # request vpc peering connection response = ec2_peer_requester.create_vpc_peering_connection(accepter_vpc_id, requester_vpc_id, accepter_account_id, accepter_region) # peer_connection_id = response.get('VpcPeeringConnection', {}).get('VpcPeeringConnectionId') self.check_peering_status(ec2_peer_requester, peer_connection_id, ['pending-acceptance', 'active'], ['failed', 'rejected']) # accept vpc peering resp_peer_connection_accept = ec2_peer_accepter.accept_vpc_peering_connection(peer_connection_id) accepter_peer_connection_info = resp_peer_connection_accept.get('VpcPeeringConnection') if accepter_peer_connection_info.get('Status').get('Code') is not 'active': self.check_peering_status(ec2_peer_requester, peer_connection_id, ['active'], ['failed', 'rejected']) # get vpc details requester_vpc_cidr = response.get('VpcPeeringConnection', {}).get('RequesterVpcInfo').get('CidrBlock') accepter_vpc_cidr = accepter_peer_connection_info.get('AccepterVpcInfo').get('CidrBlock') # write peering connection id to SSM Parameter store requester_account_name = self.params.get('AccountName') prefix = self.params.get('PeeringConnectionKeyPrefix') suffix = 'peering_connections' peering_connection_id_parameter_key = '{}/{}/{}'.format(prefix, suffix, requester_account_name) self.logger.info("SSM Parameter key: {} has been written".format(peering_connection_id_parameter_key)) ssm.put_parameter(peering_connection_id_parameter_key, peer_connection_id) return { 'PeerConnectionID': peer_connection_id, 'RequesterAccountID': requester_account_id, 'AccepterAccountID': accepter_account_id, 'RequesterVPCCIDR': requester_vpc_cidr, 'AccepterVPCCIDR': accepter_vpc_cidr } except Exception as e: message = {'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e)} self.logger.exception(message) raise
class Metrics(object): def __init__(self, logger): self.logger = logger self.ssm = SSM(logger) def _get_parameter_value(self, key): response = self.ssm.describe_parameters(key) self.logger.info(response) # get paramter if key exist if response: value = self.ssm.get_parameter(key) self.logger.info(value) else: value = 'ssm-param-key-not-found' return value # Send anonymous metrics def metrics(self, data, solution_id='SO0044', url='https://metrics.awssolutionsbuilder.com/generic'): try: send_metrics = self._get_parameter_value( '/org/primary/metrics_flag') if send_metrics.lower() == 'yes': uuid = self._get_parameter_value('/org/primary/customer_uuid') time_stamp = {'TimeStamp': str(datetime.utcnow().isoformat())} params = {'Solution': solution_id, 'UUID': uuid, 'Data': data} metrics = dict(time_stamp, **params) json_data = json.dumps(metrics, indent=4, cls=DecimalEncoder, sort_keys=True) headers = {'content-type': 'application/json'} r = requests.post(url, data=json_data, headers=headers) code = r.status_code return code except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise
def delete_secure_ssm_parameter(self): try: self.logger.info("Executing: " + self.__class__.__name__ + "/" + inspect.stack()[0][3]) ssm = SSM(self.logger) self.logger.info("Delete Secure SSM Parameter Key: {}".format( self.params.get('PSKey'))) response = ssm.delete_parameter(self.params.get('PSKey')) return response # should be empty dict {} except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise
def ssm_put_parameters(self): try: self.logger.info("Executing: " + self.__class__.__name__ + "/" + inspect.stack()[0][3]) self.logger.info(self.params) ssm = SSM(self.logger) ssm_params = self.params.get('SSMParameters') self.logger.info(ssm_params) ssm_value = 'NotFound' if ssm_params is not None and type(ssm_params) is dict: # iterate through the keys to save them in SSM Parameter Store for key, value in ssm_params.items(): if value.startswith('$[') and value.endswith(']'): value = value[2:-1] # Iterate through all the keys in the event (includes the nested keys) for k, v in self.nested_dictionary_iteration(self.event): if value.lower() == k.lower(): ssm_value = v break else: ssm_value = 'NotFound' if ssm_value == 'NotFound': # Raise Exception if the key is not found in the State Machine output raise Exception( "Unable to find the key: {} in the State Machine Output" .format(value)) else: self.logger.info("Adding {}: {} into SSM PS.".format( key, ssm_value)) ssm.put_parameter(key, ssm_value) else: self.logger.info("Nothing to add in SSM Parameter Store") return self.event except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) gf = GeneralFunctions(self.event, self.logger) gf.send_failure_to_cfn() raise
def __init__(self, logger, wait_time, manifest_file_path, sm_arn_launch_avm): self.state_machine = StateMachine(logger) self.ssm = SSM(logger) self.param_handler = ParamsHandler(logger) self.logger = logger self.manifest_file_path = manifest_file_path self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)] self.wait_time = wait_time self.sm_arn_launch_avm = sm_arn_launch_avm self.manifest = None self.list_sm_exec_arns = []
def __init__(self, logger, sm_arns_map, staging_bucket, manifest_file_path, pipeline_stage, token, execution_mode, primary_account_id): self.state_machine = StateMachine(logger) self.ssm = SSM(logger) self.s3 = S3(logger) self.send = Metrics(logger) self.param_handler = ParamsHandler(logger) self.logger = logger self.sm_arns_map = sm_arns_map self.manifest = None self.staging_bucket = staging_bucket self.manifest_file_path = manifest_file_path self.token = token self.pipeline_stage = pipeline_stage self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)] if execution_mode.lower() == 'sequential': self.isSequential = True else: self.isSequential = False self.index = 100 self.primary_account_id = primary_account_id
def __init__(self, logger, wait_time, manifest_file_path, sm_arn_launch_avm, batch_size): self.state_machine = StateMachine(logger) self.ssm = SSM(logger) self.param_handler = ParamsHandler(logger) self.logger = logger self.manifest_file_path = manifest_file_path self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)] self.wait_time = wait_time self.sm_arn_launch_avm = sm_arn_launch_avm self.manifest = None self.list_sm_exec_arns = [] self.batch_size = batch_size self.avm_product_name = None self.avm_portfolio_name = None self.avm_params = None self.root_id = None
def __init__(self, logger, wait_time, manifest_file_path, sm_arn_stackset, staging_bucket, execution_mode): self.state_machine = StateMachine(logger) self.ssm = SSM(logger) self.s3 = S3(logger) self.send = Metrics(logger) self.param_handler = ParamsHandler(logger) self.logger = logger self.manifest_file_path = manifest_file_path self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)] self.wait_time = wait_time self.sm_arn_stackset = sm_arn_stackset self.manifest = None self.list_sm_exec_arns = [] self.staging_bucket = staging_bucket self.root_id = None self.uuid = uuid4() self.state_machine_event = {} if execution_mode.lower() == 'sequential': self.logger.info("Running {} mode".format(execution_mode)) self.sequential_flag = True else: self.logger.info("Running {} mode".format(execution_mode)) self.sequential_flag = False
def __init__(self, logger, wait_time, manifest_file_path, sm_arn_launch_avm, batch_size): self.state_machine = StateMachine(logger) self.ssm = SSM(logger) self.sc = SC(logger) self.param_handler = CFNParamsHandler(logger) self.logger = logger self.manifest_file_path = manifest_file_path self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)] self.wait_time = wait_time self.sm_arn_launch_avm = sm_arn_launch_avm self.manifest = None self.list_sm_exec_arns = [] self.batch_size = batch_size self.avm_product_name = None self.avm_product_id = None self.avm_artifact_id = None self.avm_params = None self.root_id = None self.sc_portfolios = {} self.sc_products = {} self.provisioned_products = {} # [productid] = [] self.provisioned_products_by_account = { } # [account] = [] list of ppids
def __init__(self, logger): self.logger = logger self.ssm = SSM(self.logger) self.kms = KMS(self.logger) self.assume_role = AssumeRole()
logger.error( "master_avm_template does not have valid value. Allowed values: 'yes' or 'no'" ) sys.exit(1) if __name__ == '__main__': if len(sys.argv) > 4: log_level = sys.argv[1] lambda_arn_param = sys.argv[2] manifest_file_path = sys.argv[3] master_avm_template = sys.argv[4] logger = Logger(loglevel=log_level) TEMPLATE_KEY_PREFIX = '_aws_landing_zone_templates_staging' ssm = SSM(logger) logger = logger uuid = uuid.uuid4() manifest = Manifest(manifest_file_path) manifest_file_name = 'manifest.yaml' if master_avm_template.lower( ) == 'yes' else 'add_on_manifest.yaml' logger.info("Manifest File Name: {}".format(manifest_file_name)) manifest_folder = manifest_file_path[:-len(manifest_file_name)] lambda_arn = ssm.get_parameter(lambda_arn_param) master_avm_files = 'master_avm_files.json' add_on_avm_files = 'add_on_avm_files.json' file_mode = 'w' for portfolio in manifest.portfolios: for product in portfolio.products: if product.product_type.lower() == 'baseline':
class ParamsHandler(object): def __init__(self, logger): self.logger = logger self.ssm = SSM(self.logger) self.kms = KMS(self.logger) self.assume_role = AssumeRole() def _session(self, region, account_id): # instantiate EC2 sessions return EC2(self.logger, region, credentials=self.assume_role(self.logger, account_id)) def _extract_string(self, str, search_str): return str[len(search_str):] def _get_ssm_params(self, ssm_parm_name): try: return self.ssm.get_parameter(ssm_parm_name) except Exception as e: raise Exception( "Missing SSM parameter value for: {} in the SSM Parameter Store." .format(ssm_parm_name)) def _get_kms_key_id(self): alias_name = environ.get('kms_key_alias_name') response = self.kms.describe_key(alias_name) self.logger.debug(response) key_id = response.get('KeyMetadata', {}).get('KeyId') return key_id def get_azs_from_member_account(self, region, qty, account, key_az=None): """gets a predefined quantity of (random) az's from a specified region Args: region (str): region name qty: quantity of az's to return account: account id of the member account Returns: list: availability zone names """ try: if key_az: self.logger.info( "Looking up values in SSM parameter:{}".format(key_az)) existing_param = self.ssm.describe_parameters(key_az) if existing_param: self.logger.info( 'Found existing SSM parameter, returning exising AZ list.' ) return self.ssm.get_parameter(key_az) if account is not None: ec2 = self._session(region, account) self.logger.info( "Getting list of AZs in region: {} from account: {}". format(region, account)) return self._get_az(ec2, key_az, qty) else: self.logger.info( "Creating EC2 Session in {} region".format(region)) ec2 = EC2(self.logger, region) return self._get_az(ec2, key_az, qty) except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def _get_az(self, ec2, key_az, qty): # Get AZs az_list = ec2.describe_availability_zones() self.logger.info("_get_azs output: %s" % az_list) random_az_list = ','.join(random.sample(az_list, qty)) description = "Contains random AZs selected by Landing Zone Solution" if key_az: self.ssm.put_parameter(key_az, random_az_list, description) return random_az_list def create_key_pair(self, account, region, param_key_material=None, param_key_fingerprint=None, param_key_name=None): if param_key_name: self.logger.info( "Looking up values in SSM parameter:{}".format(param_key_name)) existing_param = self.ssm.describe_parameters(param_key_name) if existing_param: return self.ssm.get_parameter(param_key_name) key_name = sanitize( "%s_%s_%s_%s" % ('lz', account, region, time.strftime("%Y-%m-%dT%H-%M-%S"))) try: ec2 = self._session(region, account) # create EC2 key pair in member account self.logger.info( "Create key pair in the member account {} in region: {}". format(account, region)) response = ec2.create_key_pair(key_name) # add key material and fingerprint in the SSM Parameter Store self.logger.info("Adding Key Material and Fingerprint to SSM PS") description = "Contains EC2 key pair asset created by Landing Zone Solution: " \ "EC2 Key Pair Custom Resource." # Get Landing Zone KMS Key ID key_id = self._get_kms_key_id() if param_key_fingerprint: self.ssm.put_parameter_use_cmk(param_key_fingerprint, response.get('KeyFingerprint'), key_id, description) if param_key_material: self.ssm.put_parameter_use_cmk(param_key_material, response.get('KeyMaterial'), key_id, description) if param_key_name: self.ssm.put_parameter(param_key_name, key_name, description) return key_name except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def random_password(self, length, key_password=None, alphanum=True): """Generates a random string, by default only including letters and numbers Args: length (int): length of string to generate alphanum (bool): [optional] if False it will also include ';:=+!@#%^&*()[]{}' in the character set """ try: response = '_get_ssm_secure_string_' + key_password if key_password: self.logger.info( "Looking up values in SSM parameter:{}".format( key_password)) existing_param = self.ssm.describe_parameters(key_password) if existing_param: return response additional = '' if not alphanum: additional = ';:=+!@#%^&*()[]{}' chars = string.ascii_uppercase + string.ascii_lowercase + string.digits + additional # Making sure the password has two numbers and symbols at the very least password = ''.join(random.SystemRandom().choice(chars) for _ in range(length-4)) + \ ''.join(random.SystemRandom().choice(string.digits) for _ in range(2)) + \ ''.join(random.SystemRandom().choice(additional) for _ in range(2)) self.logger.info("Adding Random password to SSM PS") description = "Contains random password created by Landing Zone Solution" if key_password: key_id = self._get_kms_key_id() self.ssm.put_parameter_use_cmk(key_password, password, key_id, description) return response except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def update_params(self, params_in, account=None, region=None, substitute_ssm_values=True): """ Args: params_in (list): Python List of dict of input params e.g. [{ "ParameterKey": "LoggingAccountId", "ParameterValue": "$[alfred_ssm_/org/member/logging/account_id]" },{ "ParameterKey": "foo", "ParameterValue": "bar" }] Return: params_out (dict): Python dict of output params e.g. { "LoggingAccountId": "${AWS::AccountId}", "foo": "bar" } """ try: self.logger.info("params in : {}".format(params_in)) params_out = {} for param in params_in: key = param.get("ParameterKey") value = param.get("ParameterValue") if not isinstance(value, list): if value.startswith('$[') and value.endswith(']'): # Apply transformations keyword = value[2:-1] # Check if supported keyword e.g. alfred_ssm_, alfred_genaz_, alfred_getaz_, alfred_genuuid, etcself. if keyword.startswith('alfred_ssm_'): ssm_param_name = self._extract_string( keyword, 'alfred_ssm_') if ssm_param_name: # If this flag is True, it will replace the SSM parameter name i.e. /org/member/ss/directory-name with its # value i.e. example, whereas if its False, it will leave the parameter name as-is if substitute_ssm_values: value = self._get_ssm_params( ssm_param_name) else: raise Exception( "Missing SSM parameter name for: {} in the parameters JSON file." .format(key)) elif keyword.startswith('alfred_genkeypair'): keymaterial_param_name = None keyfingerprint_param_name = None keyname_param_name = None ssm_parameters = param.get('ssm_parameters', []) if type(ssm_parameters) is list: for ssm_parameter in ssm_parameters: val = ssm_parameter.get('value')[2:-1] if val.lower() == 'keymaterial': keymaterial_param_name = ssm_parameter.get( 'name') elif val.lower() == 'keyfingerprint': keyfingerprint_param_name = ssm_parameter.get( 'name') elif val.lower() == 'keyname': keyname_param_name = ssm_parameter.get( 'name') value = self.create_key_pair( account, region, keymaterial_param_name, keyfingerprint_param_name, keyname_param_name) elif keyword.startswith('alfred_genpass_'): sub_string = self._extract_string( keyword, 'alfred_genpass_') if sub_string: pw_length = int(sub_string) else: pw_length = 8 password_param_name = None ssm_parameters = param.get('ssm_parameters', []) if type(ssm_parameters) is list: for ssm_parameter in ssm_parameters: val = ssm_parameter.get('value')[2:-1] if val.lower() == 'password': password_param_name = ssm_parameter.get( 'name') value = self.random_password( pw_length, password_param_name, False) elif keyword.startswith('alfred_genaz_'): sub_string = self._extract_string( keyword, 'alfred_genaz_') if sub_string: no_of_az = int(sub_string) else: no_of_az = 2 az_param_name = None ssm_parameters = param.get('ssm_parameters', []) if type(ssm_parameters) is list: for ssm_parameter in ssm_parameters: val = ssm_parameter.get('value')[2:-1] if val.lower() == 'az': az_param_name = ssm_parameter.get( 'name') value = self.get_azs_from_member_account( region, no_of_az, account, az_param_name) else: value = keyword params_out.update({key: value}) self.logger.info("params out : {}".format(params_out)) return params_out except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__, 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise
class CFNParamsHandler(object): """This class goes through the cfn parameters passed by users to state machines and SSM parameters to get the correct parameter , create parameter value and update SSM parameters as applicable. For example, if a cfn parameter is passed, save it in SSM parameter store. """ def __init__(self, logger): self.logger = logger self.ssm = SSM(self.logger) self.kms = KMS(self.logger) self.assume_role = AssumeRole() def _session(self, region, account_id=None): # instantiate EC2 session if account_id is None: return EC2(self.logger, region) else: return EC2(self.logger, region, credentials=self.assume_role(self.logger, account_id)) def _get_ssm_params(self, ssm_parm_name): return self.ssm.get_parameter(ssm_parm_name) def _get_kms_key_id(self): alias_name = environ.get('KMS_KEY_ALIAS_NAME') response = self.kms.describe_key(alias_name) self.logger.debug(response) key_id = response.get('KeyMetadata', {}).get('KeyId') return key_id def get_azs_from_member_account(self, region, qty, account, key_az=None): """gets a predefined quantity of (random) az's from a specified region Args: region (str): region name qty: quantity of az's to return account: account id of the member account key_az (str): ssm parameter store key where existing AZ list is stored Returns: list: availability zone names """ if key_az: self.logger.info( "Looking up values in SSM parameter:{}".format(key_az)) existing_param = self.ssm.describe_parameters(key_az) if existing_param: self.logger.info('Found existing SSM parameter, returning' ' existing AZ list.') return self.ssm.get_parameter(key_az) if account is not None: acct = account[0] if isinstance(account, list) else account ec2 = self._session(region, acct) self.logger.info("Getting list of AZs in region: {} from" " account: {}".format(region, acct)) return self._get_az(ec2, key_az, qty) else: self.logger.info( "Creating EC2 Session in {} region".format(region)) ec2 = EC2(self.logger, region) return self._get_az(ec2, key_az, qty) def _get_az(self, ec2, key_az, qty): # Get AZs az_list = ec2.describe_availability_zones() self.logger.info("_get_azs output: %s" % az_list) random_az_list = ','.join(random.sample(az_list, qty)) description = "Contains random AZs selected by Landing Zone" \ "Solution" if key_az: self.ssm.put_parameter(key_az, random_az_list, description) return random_az_list def _create_key_pair(self, account, region, param_key_material=None, param_key_fingerprint=None, param_key_name=None): """Creates an ec2 key pair if it does not exist already. Args: account: region: param_key_material: key material used to encrypt and decrypt data. Default to None param_key_fingerprint: key finger print. Default to None param_key_name: key name. A key name will be automatically created if there is none. Default to None Returns: key name """ if param_key_name: self.logger.info( "Looking up values in SSM parameter:{}".format(param_key_name)) existing_param = self.ssm.describe_parameters(param_key_name) if existing_param: return self.ssm.get_parameter(param_key_name) key_name = sanitize( "%s_%s_%s_%s" % ('lz', account, region, time.strftime("%Y-%m-%dT%H-%M-%S"))) ec2 = self._session(region, account) # create EC2 key pair in member account self.logger.info("Create key pair in the member account {} in" " region: {}".format(account, region)) response = ec2.create_key_pair(key_name) # add key material and fingerprint in the SSM Parameter Store self.logger.info("Adding Key Material and Fingerprint to SSM PS") description = "Contains EC2 key pair asset created by " \ "Landing Zone Solution: " \ "EC2 Key Pair Custom Resource." # Get AWS Landing Zone KMS Key ID key_id = self._get_kms_key_id() if param_key_fingerprint: self.ssm.put_parameter_use_cmk(param_key_fingerprint, response.get('KeyFingerprint'), key_id, description) if param_key_material: self.ssm.put_parameter_use_cmk(param_key_material, response.get('KeyMaterial'), key_id, description) if param_key_name: self.ssm.put_parameter(param_key_name, key_name, description) return key_name def random_password(self, length, key_password=None, alphanum=True): """Generates a random string, by default only including letters and numbers Args: length (int): length of string to generate key_password (str): ssm parameter store key where existing password is stored alphanum (bool): [optional] if False it will also include ';:=+!@#%^&*()[]{}' in the character set """ response = '_get_ssm_secure_string_' + key_password param_exists = False if key_password: self.logger.info( "Looking up values in SSM parameter:{}".format(key_password)) existing_param = self.ssm.describe_parameters(key_password) if existing_param: param_exists = True if not param_exists: additional = '' if not alphanum: additional = ';:=+!@#%^&*()[]{}' password = random_pwd_generator(length, additional) self.logger.info("Adding Random password to SSM Parameter Store") description = "Contains random password created by Landing Zone"\ " Solution" if key_password: key_id = self._get_kms_key_id() self.ssm.put_parameter_use_cmk(key_password, password, key_id, description) return response def update_params(self, params_in, account=None, region=None, substitute_ssm_values=True): """Updates SSM parameters Args: params_in (list): Python List of dict of input params e.g. [{ "ParameterKey": "LoggingAccountId", "ParameterValue": "$[alfred_ssm_/org/member/logging/account_id]" },{ "ParameterKey": "foo", "ParameterValue": "bar" }] Return: params_out (dict): Python dict of output params e.g. { "LoggingAccountId": "${AWS::AccountId}", "foo": "bar" } """ self.logger.info("params in : {}".format(params_in)) params_out = {} for param in params_in: key = param.get("ParameterKey") value = param.get("ParameterValue") if not isinstance(value, list): if value.startswith('$[') and value.endswith(']'): # Apply transformations keyword = value[2:-1] # Check if supported keyword e.g. alfred_ssm_, # alfred_genaz_, alfred_getaz_, alfred_genuuid, etc. if keyword.startswith('alfred_ssm_'): value, param_flag = self._update_alfred_ssm( keyword, key, value, substitute_ssm_values) if param_flag is False: raise Exception( "Missing SSM parameter name for:" " {} in the parameters JSON file.".format(key)) elif keyword.startswith('alfred_genkeypair'): value = self._update_alfred_genkeypair( param, account, region) elif keyword.startswith('alfred_genpass_'): value = self._update_alfred_genpass(keyword, param) elif keyword.startswith('alfred_genaz_'): value = self._update_alfred_genaz( keyword, param, account, region) else: value = keyword params_out.update({key: value}) self.logger.info("params out : {}".format(params_out)) return params_out def _update_alfred_ssm(self, keyword, key, value, substitute_ssm_values): """Gets the value of the SSM parameter whose name starts with 'alfred_ssm_ ' Args: keyword: string. trimmed parameter value without unwanted leading and trailing characters key: parameter key value: parameter value substitute_ssm_values: boolean. default to true Return: value of the SSM parameter """ ssm_param_name = trim_string_from_front(keyword, 'alfred_ssm_') param_flag = True if ssm_param_name: # If this flag is True, it will replace the SSM parameter name # i.e. /org/member/ss/directory-name with its value i.e. example, # whereas if it is False, it will leave the parameter name as-is. if substitute_ssm_values: value = self._get_ssm_params(ssm_param_name) else: param_flag = False return value, param_flag def _update_alfred_genkeypair(self, param, account, region): """Gets the ec2 key pair name if SSM parameter name starts with 'alfred_genkeypair ' Args: value: string. parameter value param: one parameter in list account: string region: string Return: ec2 key pair name """ keymaterial_param_name = None keyfingerprint_param_name = None keyname_param_name = None ssm_parameters = param.get('ssm_parameters', []) if type(ssm_parameters) is list: for ssm_parameter in ssm_parameters: val = ssm_parameter.get('value')[2:-1] if val.lower() == 'keymaterial': keymaterial_param_name = ssm_parameter.get('name') elif val.lower() == 'keyfingerprint': keyfingerprint_param_name = ssm_parameter.get('name') elif val.lower() == 'keyname': keyname_param_name = ssm_parameter.get('name') value = self._create_key_pair(account, region, keymaterial_param_name, keyfingerprint_param_name, keyname_param_name) return value def _update_alfred_genpass(self, keyword, param): """Creates a random password if SSM parameter name starts with 'alfred_genpass_ ' Args: keyword: string. trimmed parameter value without unwanted leading and trailing characters value: string. parameter value param: one parameter in list Return: generated random password """ sub_string = trim_string_from_front(keyword, 'alfred_genpass_') if sub_string: pw_length = int(sub_string) else: pw_length = 8 password_param_name = None ssm_parameters = param.get('ssm_parameters', []) if type(ssm_parameters) is list: for ssm_parameter in ssm_parameters: val = ssm_parameter.get('value')[2:-1] if val.lower() == 'password': password_param_name = ssm_parameter.get('name') value = self.random_password(pw_length, password_param_name, False) return value def _update_alfred_genaz(self, keyword, param, account, region): """gets a predefined list of (random) az's from a specified region if SSM parameter name starts with 'alfred_genaz ' Args: keyword: string. trimmed parameter value without unwanted leading and trailing characters value: string. parameter value param: one parameter in list account: string region: string Return: list of random az's """ sub_string = trim_string_from_front(keyword, 'alfred_genaz_') if sub_string: no_of_az = int(sub_string) else: no_of_az = 2 az_param_name = None ssm_parameters = param.get('ssm_parameters', []) if type(ssm_parameters) is list: for ssm_parameter in ssm_parameters: val = ssm_parameter.get('value')[2:-1] if val.lower() == 'az': az_param_name = ssm_parameter.get('name') value = self.get_azs_from_member_account(region, no_of_az, account, az_param_name) return value
import json import inspect import zipfile from hashlib import md5 from lib.crhelper import cfn_handler from uuid import uuid4 from lib.helper import get_available_regions # initialise logger log_level = os.environ.get('log_level') logger = Logger(loglevel=log_level) init_failed = False # instantiate classes from lib kms = KMS(logger) ssm = SSM(logger) def unique_email_validator(email_list): result = set([x for x in email_list if email_list.count(x) > 1]) duplicate_list = list(result) logger.info("Duplicate Emails: {}".format(duplicate_list)) if not duplicate_list: logger.info("Duplicate emails not found") else: raise Exception( "Found duplicate email(s) {} in the parameters.".format( duplicate_list)) def unzip_function(zip_file_name, function_path, output_path):
def __init__(self, logger): self.logger = logger self.ssm = SSM(logger)
def __init__(self, logger): self.logger = logger self.ssm = SSM(self.logger) self.kms = KMS(self.logger)
class StateMachineTriggerLambda(object): def __init__(self, logger, sm_arns_map, staging_bucket, manifest_file_path, pipeline_stage, token, execution_mode, primary_account_id): self.state_machine = StateMachine(logger) self.ssm = SSM(logger) self.s3 = S3(logger) self.send = Metrics(logger) self.param_handler = ParamsHandler(logger) self.logger = logger self.sm_arns_map = sm_arns_map self.manifest = None self.staging_bucket = staging_bucket self.manifest_file_path = manifest_file_path self.token = token self.pipeline_stage = pipeline_stage self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)] if execution_mode.lower() == 'sequential': self.isSequential = True else: self.isSequential = False self.index = 100 self.primary_account_id = primary_account_id def _save_sm_exec_arn(self, list_sm_exec_arns): if list_sm_exec_arns is not None and type(list_sm_exec_arns) is list: self.logger.debug( "Saving the token:{} with list of sm_exec_arns:{}".format( self.token, list_sm_exec_arns)) if len(list_sm_exec_arns) > 0: sm_exec_arns = ",".join( list_sm_exec_arns ) # Create comma seperated string from list e.g. 'a','b','c' self.ssm.put_parameter( self.token, sm_exec_arns) # Store the list of SM execution ARNs in SSM else: self.ssm.put_parameter(self.token, 'PASS') else: raise Exception( "Expecting a list of state machine execution ARNs to store in SSM for token:{}, but found nothing to store." .format(self.token)) def _stage_template(self, relative_template_path): if relative_template_path.lower().startswith('s3'): # Convert the remote template URL s3://bucket-name/object # to Virtual-hosted style URL https://bucket-name.s3.amazonaws.com/object t = relative_template_path.split("/", 3) s3_url = "https://{}.s3.amazonaws.com/{}".format(t[2], t[3]) else: local_file = os.path.join(self.manifest_folder, relative_template_path) remote_file = "{}/{}_{}".format( TEMPLATE_KEY_PREFIX, self.token, relative_template_path[relative_template_path.rfind('/') + 1:]) logger.info( "Uploading the template file: {} to S3 bucket: {} and key: {}". format(local_file, self.staging_bucket, remote_file)) self.s3.upload_file(self.staging_bucket, local_file, remote_file) s3_url = "{}{}{}{}".format('https://s3.amazonaws.com/', self.staging_bucket, '/', remote_file) return s3_url def _download_remote_file(self, remote_s3_path): _file = tempfile.mkstemp()[1] t = remote_s3_path.split("/", 3) # s3://bucket-name/key remote_bucket = t[2] # Bucket name remote_key = t[3] # Key logger.info("Downloading {}/{} from S3 to {}".format( remote_bucket, remote_key, _file)) self.s3.download_file(remote_bucket, remote_key, _file) return _file def _load_policy(self, relative_policy_path): if relative_policy_path.lower().startswith('s3'): policy_file = self._download_remote_file(relative_policy_path) else: policy_file = os.path.join(self.manifest_folder, relative_policy_path) logger.info("Parsing the policy file: {}".format(policy_file)) with open(policy_file, 'r') as content_file: policy_file_content = content_file.read() #Check if valid json json.loads(policy_file_content) #Return the Escaped JSON text return policy_file_content.replace('"', '\"').replace('\n', '\r\n') def _load_params(self, relative_parameter_path, account=None, region=None): if relative_parameter_path.lower().startswith('s3'): parameter_file = self._download_remote_file( relative_parameter_path) else: parameter_file = os.path.join(self.manifest_folder, relative_parameter_path) logger.info("Parsing the parameter file: {}".format(parameter_file)) with open(parameter_file, 'r') as content_file: parameter_file_content = content_file.read() params = json.loads(parameter_file_content) if account is not None: #Deploying Core resource Stack Set # The last parameter is set to False, because we do not want to replace the SSM parameter values yet. sm_params = self.param_handler.update_params( params, account, region, False) else: # Deploying Baseline resource Stack Set sm_params = self.param_handler.update_params(params) logger.info("Input Parameters for State Machine: {}".format(sm_params)) return sm_params def _load_template_rules(self, relative_rules_path): rules_file = os.path.join(self.manifest_folder, relative_rules_path) logger.info("Parsing the template rules file: {}".format(rules_file)) with open(rules_file, 'r') as content_file: rules_file_content = content_file.read() rules = json.loads(rules_file_content) logger.info( "Template Constraint Rules for State Machine: {}".format(rules)) return rules def _populate_ssm_params(self, sm_input): # The scenario is if you have one core resource that exports output from CFN stack to SSM parameter # and then the next core resource reads the SSM parameter as input, then it has to wait for the first core resource to # finish; read the SSM parameters and use its value as input for second core resource's input for SM # Get the parameters for CFN template from sm_input logger.debug("Populating SSM parameter values for SM input: {}".format( sm_input)) params = sm_input.get('ResourceProperties').get('Parameters', {}) # First transform it from {name: value} to [{'ParameterKey': name}, {'ParameterValue': value}] # then replace the SSM parameter names with its values sm_params = self.param_handler.update_params(transform_params(params)) # Put it back into the sm_input sm_input.get('ResourceProperties').update({'Parameters': sm_params}) logger.debug( "Done populating SSM parameter values for SM input: {}".format( sm_input)) return sm_input def _create_ssm_input_map(self, ssm_parameters): ssm_input_map = {} for ssm_parameter in ssm_parameters: key = ssm_parameter.name value = ssm_parameter.value ssm_value = self.param_handler.update_params( transform_params({key: value})) ssm_input_map.update(ssm_value) return ssm_input_map def _create_state_machine_input_map(self, input_params, request_type='Create'): request = {} request.update({'RequestType': request_type}) request.update({'ResourceProperties': input_params}) return request def _create_account_state_machine_input_map(self, ou_name, account_name='', account_email='', ssm_map=None): input_params = {} input_params.update({'OUName': ou_name}) input_params.update({'AccountName': account_name}) input_params.update({'AccountEmail': account_email}) if ssm_map is not None: input_params.update({'SSMParameters': ssm_map}) return self._create_state_machine_input_map(input_params) def _create_stack_set_state_machine_input_map( self, stack_set_name, template_url, parameters, account_list=[], regions_list=[], ssm_map=None, capabilities='CAPABILITY_NAMED_IAM'): input_params = {} input_params.update({'StackSetName': sanitize(stack_set_name)}) input_params.update({'TemplateURL': template_url}) input_params.update({'Parameters': parameters}) input_params.update({'Capabilities': capabilities}) if len(account_list) > 0: input_params.update({'AccountList': account_list}) if len(regions_list) > 0: input_params.update({'RegionList': regions_list}) else: input_params.update({'RegionList': [self.manifest.region]}) else: input_params.update({'AccountList': ''}) input_params.update({'RegionList': ''}) if ssm_map is not None: input_params.update({'SSMParameters': ssm_map}) return self._create_state_machine_input_map(input_params) def _create_service_control_policy_state_machine_input_map( self, policy_name, policy_content, policy_desc=''): input_params = {} policy_doc = {} policy_doc.update({'Name': sanitize(policy_name)}) policy_doc.update({'Description': policy_desc}) policy_doc.update({'Content': policy_content}) input_params.update({'PolicyDocument': policy_doc}) input_params.update({'AccountId': ''}) input_params.update({'PolicyList': []}) input_params.update({'Operation': ''}) return self._create_state_machine_input_map(input_params) def _create_service_catalog_state_machine_input_map( self, portfolio, product): input_params = {} sc_portfolio = {} sc_portfolio.update({'PortfolioName': sanitize(portfolio.name, True)}) sc_portfolio.update( {'PortfolioDescription': sanitize(portfolio.description, True)}) sc_portfolio.update( {'PortfolioProvider': sanitize(portfolio.owner, True)}) ssm_value = self.param_handler.update_params( transform_params({'principal_role': portfolio.principal_role})) sc_portfolio.update({'PrincipalArn': ssm_value.get('principal_role')}) sc_product = {} sc_product.update({'ProductName': sanitize(product.name, True)}) sc_product.update({'ProductDescription': product.description}) sc_product.update({'ProductOwner': sanitize(portfolio.owner, True)}) if product.hide_old_versions is True: sc_product.update({'HideOldVersions': 'Yes'}) else: sc_product.update({'HideOldVersions': 'No'}) ssm_value = self.param_handler.update_params( transform_params( {'launch_constraint_role': product.launch_constraint_role})) sc_product.update({'RoleArn': ssm_value.get('launch_constraint_role')}) ec2 = EC2(self.logger, environ.get('AWS_REGION')) region_list = [] for region in ec2.describe_regions(): region_list.append(region.get('RegionName')) if os.path.isfile( os.path.join(self.manifest_folder, product.skeleton_file)): lambda_arn_param = get_env_var('lambda_arn_param_name') lambda_arn = self.ssm.get_parameter(lambda_arn_param) portfolio_index = self.manifest.portfolios.index(portfolio) product_index = self.manifest.portfolios[ portfolio_index].products.index(product) product_name = self.manifest.portfolios[portfolio_index].products[ product_index].name logger.info( "Generating the product template for {} from {}".format( product_name, os.path.join(self.manifest_folder, product.skeleton_file))) j2loader = jinja2.FileSystemLoader(self.manifest_folder) j2env = jinja2.Environment(loader=j2loader) j2template = j2env.get_template(product.skeleton_file) template_url = None if product.product_type.lower() == 'baseline': # j2result = j2template.render(manifest=self.manifest, portfolio_index=portfolio_index, # product_index=product_index, lambda_arn=lambda_arn, uuid=uuid.uuid4(), # regions=region_list) template_url = self._stage_template(product.skeleton_file + ".template") elif product.product_type.lower() == 'optional': if len(product.template_file) > 0: template_url = self._stage_template(product.template_file) j2result = j2template.render( manifest=self.manifest, portfolio_index=portfolio_index, product_index=product_index, lambda_arn=lambda_arn, uuid=uuid.uuid4(), template_url=template_url) generated_avm_template = os.path.join( self.manifest_folder, product.skeleton_file + ".generated.template") logger.info( "Writing the generated product template to {}".format( generated_avm_template)) with open(generated_avm_template, "w") as fh: fh.write(j2result) template_url = self._stage_template(generated_avm_template) else: raise Exception( "Missing template_file location for portfolio:{} and product:{} in Manifest file" .format(portfolio.name, product.name)) else: raise Exception( "Missing skeleton_file for portfolio:{} and product:{} in Manifest file" .format(portfolio.name, product.name)) artifact_params = {} artifact_params.update({'Info': {'LoadTemplateFromURL': template_url}}) artifact_params.update({'Type': 'CLOUD_FORMATION_TEMPLATE'}) artifact_params.update({'Description': product.description}) sc_product.update({'ProvisioningArtifactParameters': artifact_params}) try: if product.rules_file: rules = self._load_template_rules(product.rules_file) sc_product.update({'Rules': rules}) except Exception as e: logger.error(e) input_params.update({'SCPortfolio': sc_portfolio}) input_params.update({'SCProduct': sc_product}) return self._create_state_machine_input_map(input_params) def _create_launch_avm_state_machine_input_map(self, portfolio, product, accounts): input_params = {} input_params.update({'PortfolioName': sanitize(portfolio, True)}) input_params.update({'ProductName': sanitize(product, True)}) input_params.update({'ProvisioningParametersList': accounts}) return self._create_state_machine_input_map(input_params) def _run_or_queue_state_machine(self, sm_input, sm_arn, list_sm_exec_arns, sm_name): logger.info("State machine Input: {}".format(sm_input)) exec_name = "%s-%s-%s" % (sm_input.get('RequestType'), sm_name.replace(" ", ""), time.strftime("%Y-%m-%dT%H-%M-%S")) # If Sequential, kick off the first SM, and save the state machine input JSON # for the rest in SSM parameter store under /job_id/0 tree if self.isSequential: if self.index == 100: sm_input = self._populate_ssm_params(sm_input) sm_exec_arn = self.state_machine.trigger_state_machine( sm_arn, sm_input, exec_name) list_sm_exec_arns.append(sm_exec_arn) else: param_name = "/%s/%s" % (self.token, self.index) self.ssm.put_parameter(param_name, json.dumps(sm_input)) # Else if Parallel, execute all SM at regular interval of wait_time else: sm_input = self._populate_ssm_params(sm_input) sm_exec_arn = self.state_machine.trigger_state_machine( sm_arn, sm_input, exec_name) time.sleep(int(wait_time)) # Sleeping for sometime list_sm_exec_arns.append(sm_exec_arn) self.index = self.index + 1 def _deploy_resource(self, resource, sm_arn, list_sm_exec_arns, account_id=None): template_full_path = self._stage_template(resource.template_file) params = {} deploy_resource_flag = True if resource.parameter_file: if len(resource.regions) > 0: params = self._load_params(resource.parameter_file, account_id, resource.regions[0]) else: params = self._load_params(resource.parameter_file, account_id, self.manifest.region) ssm_map = self._create_ssm_input_map(resource.ssm_parameters) if account_id is not None: #Deploying Core resource Stack Set stack_name = "AWS-Landing-Zone-{}".format(resource.name) sm_input = self._create_stack_set_state_machine_input_map( stack_name, template_full_path, params, [str(account_id)], resource.regions, ssm_map) else: #Deploying Baseline resource Stack Set stack_name = "AWS-Landing-Zone-Baseline-{}".format(resource.name) sm_input = self._create_stack_set_state_machine_input_map( stack_name, template_full_path, params, [], [], ssm_map) stack_set = StackSet(self.logger) response = stack_set.describe_stack_set(stack_name) if response is not None: self.logger.info("Found existing stack set.") self.logger.info( "Comparing the template of the StackSet: {} with local copy of template" .format(stack_name)) relative_template_path = resource.template_file if relative_template_path.lower().startswith('s3'): local_template_file = self._download_remote_file( relative_template_path) else: local_template_file = os.path.join(self.manifest_folder, relative_template_path) cfn_template_file = tempfile.mkstemp()[1] with open(cfn_template_file, "w") as f: f.write(response.get('StackSet').get('TemplateBody')) template_compare = filecmp.cmp(local_template_file, cfn_template_file) self.logger.info( "Comparing the parameters of the StackSet: {} with local copy of JSON parameters file" .format(stack_name)) params_compare = True if template_compare: cfn_params = reverse_transform_params( response.get('StackSet').get('Parameters')) for key, value in params.items(): if cfn_params.get(key, '') == value: pass else: params_compare = False break self.logger.info( "template_compare={}".format(template_compare)) self.logger.info("params_compare={}".format(params_compare)) if template_compare and params_compare: deploy_resource_flag = False self.logger.info( "Found no changes in template & parameters, so skipping Update StackSet for {}" .format(stack_name)) if deploy_resource_flag: self._run_or_queue_state_machine(sm_input, sm_arn, list_sm_exec_arns, stack_name) def start_core_account_sm(self, sm_arn_account): try: logger.info("Setting the lock_down_stack_sets_role={}".format( self.manifest.lock_down_stack_sets_role)) if self.manifest.lock_down_stack_sets_role is True: self.ssm.put_parameter('lock_down_stack_sets_role_flag', 'yes') else: self.ssm.put_parameter('lock_down_stack_sets_role_flag', 'no') # Send metric - pipeline run count data = {"PipelineRunCount": "1"} self.send.metrics(data) logger.info("Processing Core Accounts from {} file".format( self.manifest_file_path)) list_sm_exec_arns = [] for ou in self.manifest.organizational_units: ou_name = ou.name logger.info( "Generating the state machine input json for OU: {}". format(ou_name)) if len(ou.core_accounts) == 0: # Empty OU with no Accounts sm_input = self._create_account_state_machine_input_map( ou_name) self._run_or_queue_state_machine(sm_input, sm_arn_account, list_sm_exec_arns, ou_name) for account in ou.core_accounts: account_name = account.name if account_name.lower() == 'primary': account_email = '' else: account_email = account.email if not account_email: raise Exception( "Failed to retrieve the email address for the Account: {}" .format(account_name)) ssm_map = self._create_ssm_input_map( account.ssm_parameters) sm_input = self._create_account_state_machine_input_map( ou_name, account_name, account_email, ssm_map) self._run_or_queue_state_machine(sm_input, sm_arn_account, list_sm_exec_arns, account_name) self._save_sm_exec_arn(list_sm_exec_arns) return except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def start_core_resource_sm(self, sm_arn_stack_set): try: logger.info("Parsing Core Resources from {} file".format( self.manifest_file_path)) list_sm_exec_arns = [] count = 0 for ou in self.manifest.organizational_units: for account in ou.core_accounts: account_name = account.name account_id = '' for ssm_parameter in account.ssm_parameters: if ssm_parameter.value == '$[AccountId]': account_id = self.ssm.get_parameter( ssm_parameter.name) if account_id == '': raise Exception( "Missing required SSM parameter: {} to retrive the account Id of Account: {} defined in Manifest" .format(ssm_parameter.name, account_name)) for resource in account.core_resources: # Count number of stacksets count += 1 if resource.deploy_method.lower() == 'stack_set': self._deploy_resource(resource, sm_arn_stack_set, list_sm_exec_arns, account_id) else: raise Exception( "Unsupported deploy_method: {} found for resource {} and Account: {} in Manifest" .format(resource.deploy_method, resource.name, account_name)) data = {"CoreAccountStackSetCount": str(count)} self.send.metrics(data) self._save_sm_exec_arn(list_sm_exec_arns) return except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def start_service_control_policy_sm(self, sm_arn_scp): try: logger.info("Processing SCPs from {} file".format( self.manifest_file_path)) list_sm_exec_arns = [] count = 0 for policy in self.manifest.organization_policies: policy_content = self._load_policy(policy.policy_file) sm_input = self._create_service_control_policy_state_machine_input_map( policy.name, policy_content, policy.description) self._run_or_queue_state_machine(sm_input, sm_arn_scp, list_sm_exec_arns, policy.name) # Count number of stacksets count += 1 self._save_sm_exec_arn(list_sm_exec_arns) data = {"SCPPolicyCount": str(count)} self.send.metrics(data) return except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def start_service_catalog_sm(self, sm_arn_sc): try: logger.info( "Processing Service catalogs section from {} file".format( self.manifest_file_path)) list_sm_exec_arns = [] for portfolio in self.manifest.portfolios: for product in portfolio.products: sm_input = self._create_service_catalog_state_machine_input_map( portfolio, product) self._run_or_queue_state_machine(sm_input, sm_arn_sc, list_sm_exec_arns, product.name) self._save_sm_exec_arn(list_sm_exec_arns) return except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def start_baseline_resources_sm(self, sm_arn_stack_set): try: logger.info("Parsing Basline Resources from {} file".format( self.manifest_file_path)) list_sm_exec_arns = [] count = 0 for resource in self.manifest.baseline_resources: if resource.deploy_method.lower() == 'stack_set': self._deploy_resource(resource, sm_arn_stack_set, list_sm_exec_arns) # Count number of stacksets count += 1 else: raise Exception( "Unsupported deploy_method: {} found for resource {} in Manifest" .format(resource.deploy_method, resource.name)) data = {"BaselineStackSetCount": str(count)} self.send.metrics(data) self._save_sm_exec_arn(list_sm_exec_arns) return except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def start_launch_avm(self, sm_arn_launch_avm): try: logger.info("Starting the launch AVM trigger") list_sm_exec_arns = [] ou_id_map = {} org = Organizations(self.logger) response = org.list_roots() self.logger.info("List roots Response") self.logger.info(response) root_id = response['Roots'][0].get('Id') response = org.list_organizational_units_for_parent( ParentId=root_id) next_token = response.get('NextToken', None) for ou in response['OrganizationalUnits']: ou_id_map.update({ou.get('Name'): ou.get('Id')}) while next_token is not None: response = org.list_organizational_units_for_parent( ParentId=root_id, NextToken=next_token) next_token = response.get('NextToken', None) for ou in response['OrganizationalUnits']: ou_id_map.update({ou.get('Name'): ou.get('Id')}) self.logger.info("ou_id_map={}".format(ou_id_map)) for portfolio in self.manifest.portfolios: for product in portfolio.products: if product.product_type.lower() == 'baseline': _params = self._load_params(product.parameter_file) logger.info( "Input parameters format for AVM: {}".format( _params)) list_of_accounts = [] for ou in product.apply_baseline_to_accounts_in_ou: self.logger.debug( "Looking up ou={} in ou_id_map".format(ou)) ou_id = ou_id_map.get(ou) self.logger.debug( "ou_id={} for ou={} in ou_id_map".format( ou_id, ou)) response = org.list_accounts_for_parent(ou_id) self.logger.debug( "List Accounts for Parent Response") self.logger.debug(response) for account in response.get('Accounts'): params = _params.copy() for key, value in params.items(): if value.lower() == 'accountemail': params.update( {key: account.get('Email')}) elif value.lower() == 'accountname': params.update( {key: account.get('Name')}) elif value.lower() == 'orgunitname': params.update({key: ou}) logger.info( "Input parameters format for Account: {} are {}" .format(account.get('Name'), params)) list_of_accounts.append(params) if len(list_of_accounts) > 0: sm_input = self._create_launch_avm_state_machine_input_map( portfolio.name, product.name, list_of_accounts) logger.info( "Launch AVM state machine Input: {}".format( sm_input)) exec_name = "%s-%s-%s" % ( sm_input.get('RequestType'), "Launch-AVM", time.strftime("%Y-%m-%dT%H-%M-%S")) sm_exec_arn = self.state_machine.trigger_state_machine( sm_arn_launch_avm, sm_input, exec_name) list_sm_exec_arns.append(sm_exec_arn) time.sleep(int(wait_time)) # Sleeping for sometime self._save_sm_exec_arn(list_sm_exec_arns) return except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def trigger_state_machines(self): try: self.manifest = Manifest(self.manifest_file_path) if self.pipeline_stage == 'core_accounts': self.start_core_account_sm(self.sm_arns_map.get('account')) elif self.pipeline_stage == 'core_resources': self.start_core_resource_sm(self.sm_arns_map.get('stack_set')) elif self.pipeline_stage == 'service_control_policy': self.start_service_control_policy_sm( self.sm_arns_map.get('service_control_policy')) elif self.pipeline_stage == 'service_catalog': self.start_service_catalog_sm( self.sm_arns_map.get('service_catalog')) elif self.pipeline_stage == 'baseline_resources': self.start_baseline_resources_sm( self.sm_arns_map.get('stack_set')) elif self.pipeline_stage == 'launch_avm': self.start_launch_avm(self.sm_arns_map.get('launch_avm')) except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise def get_state_machines_execution_status(self): try: sm_exec_arns = self.ssm.get_parameter(self.token) if sm_exec_arns == 'PASS': self.ssm.delete_parameter(self.token) return 'SUCCEEDED', '' else: list_sm_exec_arns = sm_exec_arns.split( "," ) # Create a list from comma seperated string e.g. ['a','b','c'] for sm_exec_arn in list_sm_exec_arns: status = self.state_machine.check_state_machine_status( sm_exec_arn) if status == 'RUNNING': return 'RUNNING', '' elif status == 'SUCCEEDED': continue else: self.ssm.delete_parameter(self.token) self.ssm.delete_parameters_by_path(self.token) err_msg = "State Machine Execution Failed, please check the Step function console for State Machine Execution ARN: {}".format( sm_exec_arn) return 'FAILED', err_msg if self.isSequential: _params_list = self.ssm.get_parameters_by_path(self.token) if _params_list: params_list = sorted(_params_list, key=lambda i: i['Name']) sm_input = json.loads(params_list[0].get('Value')) if self.pipeline_stage == 'core_accounts': sm_arn = self.sm_arns_map.get('account') sm_name = sm_input.get('ResourceProperties').get( 'OUName') + "-" + sm_input.get( 'ResourceProperties').get('AccountName') account_name = sm_input.get( 'ResourceProperties').get('AccountName') if account_name.lower() == 'primary': org = Organizations(self.logger) response = org.describe_account( self.primary_account_id) account_email = response.get('Account').get( 'Email', '') sm_input.get('ResourceProperties').update( {'AccountEmail': account_email}) elif self.pipeline_stage == 'core_resources': sm_arn = self.sm_arns_map.get('stack_set') sm_name = sm_input.get('ResourceProperties').get( 'StackSetName') sm_input = self._populate_ssm_params(sm_input) elif self.pipeline_stage == 'service_control_policy': sm_arn = self.sm_arns_map.get( 'service_control_policy') sm_name = sm_input.get('ResourceProperties').get( 'PolicyDocument').get('Name') elif self.pipeline_stage == 'service_catalog': sm_arn = self.sm_arns_map.get('service_catalog') sm_name = sm_input.get('ResourceProperties').get( 'SCProduct').get('ProductName') elif self.pipeline_stage == 'baseline_resources': sm_arn = self.sm_arns_map.get('stack_set') sm_name = sm_input.get('ResourceProperties').get( 'StackSetName') sm_input = self._populate_ssm_params(sm_input) exec_name = "%s-%s-%s" % (sm_input.get( 'RequestType'), sm_name.replace( " ", ""), time.strftime("%Y-%m-%dT%H-%M-%S")) sm_exec_arn = self.state_machine.trigger_state_machine( sm_arn, sm_input, exec_name) self._save_sm_exec_arn([sm_exec_arn]) self.ssm.delete_parameter(params_list[0].get('Name')) return 'RUNNING', '' self.ssm.delete_parameter(self.token) return 'SUCCEEDED', '' except Exception as e: message = { 'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e) } self.logger.exception(message) raise