def build_bundles(): """ Build bundles for the environment """ bundle_types = config.get_bundles() if not bundle_types: logger.warning( 'No bundles configured, will deploy without any bundles') return None for bundle_type in bundle_types: # Run pre-bundle-hook _pre_bundle_hook(bundle_type) if config.has_pre_built_bundle(bundle_type): bundle_path = config.get_pre_built_bundle_path(bundle_type) logger.info('Using pre-built bundle: {}'.format(bundle_path)) try: _upload_bundle(bundle_path, bundle_type) except UnsupportedCompression: raise else: logger.info('Building bundle {}'.format(bundle_type)) logger.info('Bundle paths: {}'.format(', '.join( config.get_bundle_paths(bundle_type)))) tmptar = tempfile.NamedTemporaryFile(suffix='.zip', delete=False) logger.debug('Created temporary tar file {}'.format(tmptar.name)) try: _bundle_zip(tmptar, bundle_type, config.get_environment(), config.get_bundle_paths(bundle_type)) tmptar.close() try: _upload_bundle(tmptar.name, bundle_type) except UnsupportedCompression: raise finally: logger.debug('Removing temporary tar file {}'.format( tmptar.name)) os.remove(tmptar.name) # Run post-bundle-hook _post_bundle_hook(bundle_type) logger.info('Done bundling {}'.format(bundle_type))
def ensure_stack(stack_name, parameters, template, tags=None, disable_rollback=False, timeout_in_minutes=None, capabilities=['CAPABILITY_IAM']): """ Ensure that a CloudFormation stack is running If the stack does not exist, it will be created. If the stack exists it will be updated. :type stack_name: str :param stack_name: Name of the stack to ensure :type parameters: list :param parameters: List of tuples with parameters and values :type template: str :parameter template: Template in JSON string or a HTTP URL :type tags: dict :param tags: Dict with keys and values :type disable_rollback: bool :param disable_rollback: Disable rollbacks of failed creates/updates :type timeout_in_minutes: int :parameter timeout_in_minutes: Timeout the stack creation after x minutes :type capabilities: list :parameter capabilities: The list of capabilities you want to allow in the stack. Currently, the only valid capability is 'CAPABILITY_IAM' """ LOGGER.info('Ensuring stack {} with template {}'.format( stack_name, template)) cumulus_parameters = [ ('CumulusBundleBucket', config.get_environment_option('bucket')), ('CumulusEnvironment', config.get_environment()), ('CumulusVersion', config.get_environment_option('version')) ] for parameter in cumulus_parameters + parameters: LOGGER.debug( 'Adding parameter "{}" with value "{}" to CF template'.format( parameter[0], parameter[1])) if timeout_in_minutes: LOGGER.debug('Will time out stack creation after {:d} minutes'.format( timeout_in_minutes)) try: if stack_exists(stack_name): LOGGER.debug('Updating existing stack to version {}'.format( config.get_environment_option('version'))) if template[0:4] == 'http': CONNECTION.update_stack(stack_name, parameters=cumulus_parameters + parameters, template_url=template, disable_rollback=disable_rollback, capabilities=['CAPABILITY_IAM'], timeout_in_minutes=timeout_in_minutes, tags=tags) else: CONNECTION.update_stack( stack_name, parameters=cumulus_parameters + parameters, template_body=_get_json_from_template(template), disable_rollback=disable_rollback, capabilities=['CAPABILITY_IAM'], timeout_in_minutes=timeout_in_minutes, tags=tags) stack_status = \ _wait_for_stack_complete(stack_name, filter_type='UPDATE') else: LOGGER.debug('Creating new stack with version {}'.format( config.get_environment_option('version'))) if template[0:4] == 'http': CONNECTION.create_stack(stack_name, parameters=cumulus_parameters + parameters, template_url=template, disable_rollback=disable_rollback, capabilities=['CAPABILITY_IAM'], timeout_in_minutes=timeout_in_minutes, tags=tags) else: CONNECTION.create_stack( stack_name, parameters=cumulus_parameters + parameters, template_body=_get_json_from_template(template), disable_rollback=disable_rollback, capabilities=['CAPABILITY_IAM'], timeout_in_minutes=timeout_in_minutes, tags=tags) stack_status = \ _wait_for_stack_complete(stack_name, filter_type='CREATE') except IOError as error: LOGGER.error("Error reading template file: {}".format(error)) return except ValueError as error: raise InvalidTemplateException( 'Malformatted template: {}'.format(error)) except boto.exception.BotoServerError as error: if (error.error_code == 'ValidationError' and error.error_message == 'No updates are to be performed.'): # Do not raise this exception if it is due to lack of updates # We do not want to fail any other stack updates after this # stack LOGGER.warning('No CloudFormation updates are to be ' 'performed for {}'.format(stack_name)) return LOGGER.error('Boto exception {}: {}'.format(error.error_code, error.error_message)) return _print_stack_output(stack_name) return stack_status
def _upload_bundle(bundle_path, bundle_type): """ Upload all bundles to S3 :type bundle_path: str :param bundle_path: Local path to the bundle :type bundle_type: str :param bundle_type: Bundle type """ try: connection = connection_handler.connect_s3() except Exception: raise bucket = connection.get_bucket(config.get_environment_option('bucket')) # Check that the bundle actually exists if not ospath.exists(bundle_path): logger.error('File not found: {}'.format(bundle_path)) sys.exit(1) if bundle_path.endswith('.zip'): compression = 'zip' else: raise UnsupportedCompression( 'Unknown compression format for {}. ' 'We are currently only supporting .zip'.format(bundle_path)) # Generate a md5 checksum for the local bundle local_hash = _generate_local_md5hash(bundle_path) key_name = ( '{environment}/{version}/' 'bundle-{environment}-{version}-{bundle_type}.{compression}').format( environment=config.get_environment(), version=config.get_environment_option('version'), bundle_type=bundle_type, compression=compression) # Do not upload bundles if the key already exists and has the same # md5 checksum if _key_exists(config.get_environment_option('bucket'), key_name, checksum=local_hash): logger.info( 'This bundle is already uploaded to AWS S3. Skipping upload.') return # Get the key object key = bucket.new_key(key_name) logger.info('Starting upload of {} to s3://{}/{}'.format( bundle_type, bucket.name, key_name)) key.set_contents_from_filename(bundle_path, replace=True) logger.info('Completed upload of {} to s3://{}/{}'.format( bundle_type, bucket.name, key_name)) # Compare MD5 checksums if local_hash == key.md5: logger.debug('Uploaded bundle checksum OK ({})'.format(key.md5)) else: logger.error('Mismatching md5 checksum {} ({}) and {} ({})'.format( bundle_path, local_hash, key_name, key.md5)) raise ChecksumMismatchException( 'Mismatching md5 checksum {} ({}) and {} ({})'.format( bundle_path, local_hash, key_name, key.md5))