def validate_templates_all_stacks(): """ Validate the template for all stacks""" for stack in config.get_stacks(): template = config.get_stack_template(stack) result = CONNECTION.validate_template( _get_json_from_template(template)) if result: LOGGER.info('Template {} is valid!'.format(template))
def connect_s3(): """ Connect to AWS S3 :returns: boto.s3.connection """ try: return boto.connect_s3( aws_access_key_id=config.get_environment_option('access-key-id'), aws_secret_access_key=config.get_environment_option( 'secret-access-key')) except Exception as err: logger.error('A problem occurred connecting to AWS S3: {}'.format(err)) raise
def connect_s3(): """ Connect to AWS S3 :returns: boto.s3.connection """ try: return boto.connect_s3( aws_access_key_id=config.get_environment_option( 'access-key-id'), aws_secret_access_key=config.get_environment_option( 'secret-access-key')) except Exception as err: logger.error('A problem occurred connecting to AWS S3: {}'.format(err)) raise
def _bundle_zip(tmpfile, bundle_type, environment, paths): """ Create a zip archive :type tmpfile: tempfile instance :param tmpfile: Tempfile object :type bundle_type: str :param bundle_type: Bundle name :type environment: str :param environment: Environment name :type paths: list :param paths: List of paths to include """ logger.info('Generating zip file for {}'.format(bundle_type)) archive = zipfile.ZipFile(tmpfile, 'w') path_rewrites = config.get_bundle_path_rewrites(bundle_type) for path in paths: path = _convert_paths_to_local_format(path) if ospath.isdir(path): # Extract all file names from directory filenames = _find_files(path) else: filenames = [path] for filename in filenames: arcname = filename # Exclude files with other target environments prefix = '__cumulus-{}__'.format(environment) basename = ospath.basename(filename) if basename.startswith('__cumulus-'): if len(basename.split(prefix)) != 2: logger.debug('Excluding file {}'.format(filename)) continue elif prefix in filename.split(ospath.sep): logger.debug('Excluding file {}'.format(filename)) continue # Do all rewrites for rewrite in path_rewrites: target = _convert_paths_to_local_format( rewrite['target'].replace('\\\\', '\\')) destination = _convert_paths_to_local_format( rewrite['destination'].replace('\\\\', '\\')) try: if arcname[:len(target)] == target: arcname = arcname.replace(target, destination) logger.debug( 'Replaced "{}" with "{}" in bundle {}'.format( target, destination, bundle_type)) except IndexError: pass logger.debug('Adding: {}'.format(filename)) archive.write(filename, arcname, zipfile.ZIP_DEFLATED) archive.close()
def connect_cloudformation(): """ Connect to AWS CloudFormation :returns: boto.cloudformation.connection """ try: return cloudformation.connect_to_region( config.get_environment_option('region'), aws_access_key_id=config.get_environment_option('access-key-id'), aws_secret_access_key=config.get_environment_option( 'secret-access-key')) except Exception as err: logger.error( 'A problem occurred connecting to AWS CloudFormation: {}'.format( err)) raise
def undeploy(force=False): """ Undeploy an environment :type force: bool :param force: Skip the safety question :returns: bool -- True if the delete of all stacks was successful """ message = ( 'This will DELETE all stacks in the environment. ' 'This action cannot be undone. ' 'Are you sure you want to do continue? [N/y] ') choice = 'yes' if not force: choice = raw_input(message).lower() if choice not in ['yes', 'y']: print('Skipping undeployment.') return None stacks = config.get_stacks() stacks.reverse() if not stacks: LOGGER.warning('No stacks to undeploy.') return None delete_successful = True for stack in stacks: status = delete_stack(stack) if status != 'DELETE_COMPLETE': LOGGER.warning('The stack finished with status {}'.format(status)) delete_successful = False return delete_successful
def undeploy(force=False): """ Undeploy an environment :type force: bool :param force: Skip the safety question :returns: bool -- True if the delete of all stacks was successful """ message = ('This will DELETE all stacks in the environment. ' 'This action cannot be undone. ' 'Are you sure you want to do continue? [N/y] ') choice = 'yes' if not force: choice = raw_input(message).lower() if choice not in ['yes', 'y']: print('Skipping undeployment.') return None stacks = config.get_stacks() stacks.reverse() if not stacks: LOGGER.warning('No stacks to undeploy.') return None delete_successful = True for stack in stacks: status = delete_stack(stack) if status != 'DELETE_COMPLETE': LOGGER.warning('The stack finished with status {}'.format(status)) delete_successful = False return delete_successful
def connect_cloudformation(): """ Connect to AWS CloudFormation :returns: boto.cloudformation.connection """ try: return cloudformation.connect_to_region( config.get_environment_option('region'), aws_access_key_id=config.get_environment_option( 'access-key-id'), aws_secret_access_key=config.get_environment_option( 'secret-access-key')) except Exception as err: logger.error( 'A problem occurred connecting to AWS CloudFormation: {}'.format( err)) raise
def _pre_deploy_hook(): """ Execute a pre-deploy-hook """ command = config.get_pre_deploy_hook() if not command: return None LOGGER.info('Running pre-deploy-hook command: "{}"'.format(command)) try: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError, error: raise HookExecutionException( 'The pre-deploy-hook returned a non-zero exit code: {}'.format( error))
def list_all_stacks(): """ List stacks and their statuses """ cf_stacks = CONNECTION.list_stacks() for stack in config.get_stacks(): stack_found = False for cf_stack in cf_stacks: if stack == cf_stack.stack_name: stack_found = True if stack_found: print('{:<30}{}'.format(stack, cf_stack.stack_status)) else: print('{:<30}{}'.format(stack, 'NOT_RUNNING'))
def _post_deploy_hook(): """ Execute a post-deploy-hook """ command = config.get_post_deploy_hook() if not command: return None LOGGER.info('Running post-deploy-hook command: "{}"'.format(command)) try: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError, error: raise HookExecutionException( 'The post-deploy-hook returned a non-zero exit code: {}'.format( error))
def list_events_all_stacks(): """ List events for all configured stacks """ for stack_name in config.get_stacks(): stack = get_stack_by_name(stack_name) if not stack: break _print_event_log_title() written_events = [] for event in reversed(CONNECTION.describe_stack_events( stack.stack_id)): if event.event_id not in written_events: written_events.append(event.event_id) _print_event_log_event(event)
def _pre_bundle_hook(bundle_name): """ Execute a pre-bundle-hook :type bundle: str :param bundle: Bundle name """ command = config.get_pre_bundle_hook(bundle_name) if not command: return None logger.info('Running pre-bundle-hook command: "{}"'.format(command)) try: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError, error: raise HookExecutionException( 'The pre-bundle-hook returned a non-zero exit code: {}'.format( error))
def build_bundles(): """ Build bundles for the environment """ bundle_types = config.get_bundles() if not bundle_types: logger.warning( 'No bundles configured, will deploy without any bundles') return None for bundle_type in bundle_types: # Run pre-bundle-hook _pre_bundle_hook(bundle_type) if config.has_pre_built_bundle(bundle_type): bundle_path = config.get_pre_built_bundle_path(bundle_type) logger.info('Using pre-built bundle: {}'.format(bundle_path)) try: _upload_bundle(bundle_path, bundle_type) except UnsupportedCompression: raise else: logger.info('Building bundle {}'.format(bundle_type)) logger.info('Bundle paths: {}'.format(', '.join( config.get_bundle_paths(bundle_type)))) tmptar = tempfile.NamedTemporaryFile(suffix='.zip', delete=False) logger.debug('Created temporary tar file {}'.format(tmptar.name)) try: _bundle_zip(tmptar, bundle_type, config.get_environment(), config.get_bundle_paths(bundle_type)) tmptar.close() try: _upload_bundle(tmptar.name, bundle_type) except UnsupportedCompression: raise finally: logger.debug('Removing temporary tar file {}'.format( tmptar.name)) os.remove(tmptar.name) # Run post-bundle-hook _post_bundle_hook(bundle_type) logger.info('Done bundling {}'.format(bundle_type))
def deploy(): """ Ensure stack is up and running (create or update it) """ # Run pre-deploy-hook _pre_deploy_hook() stack_names = config.get_stacks() if not stack_names: LOGGER.warning('No stacks configured, nothing to deploy') return for stack_name in stack_names: ensure_stack( stack_name, template=config.get_stack_template(stack_name), disable_rollback=config.get_stack_disable_rollback(stack_name), parameters=config.get_stack_parameters(stack_name), timeout_in_minutes=config.get_stack_timeout_in_minutes(stack_name), tags=config.get_stack_tags(stack_name)) # Run post-deploy-hook _post_deploy_hook()
def print_output_all_stacks(): """ Print the output for all stacks """ for stack in config.get_stacks(): _print_stack_output(stack)
}, 'cumulus_ds.deployment_manager': { 'handlers': ['default'], 'level': 'DEBUG', 'propagate': False }, 'cumulus_ds.helpers.stack': { 'handlers': ['default'], 'level': 'DEBUG', 'propagate': False } } } # Set log level LOGGING_CONFIG['handlers']['default']['level'] = config.get_log_level() logging.config.dictConfig(LOGGING_CONFIG) LOGGER = logging.getLogger(__name__) def main(): """ Main function """ try: if config.args.bundle: bundle_manager.build_bundles() if config.args.undeploy: deployment_manager.undeploy(force=config.args.force) if config.args.deploy:
def ensure_stack(stack_name, parameters, template, tags=None, disable_rollback=False, timeout_in_minutes=None, capabilities=['CAPABILITY_IAM']): """ Ensure that a CloudFormation stack is running If the stack does not exist, it will be created. If the stack exists it will be updated. :type stack_name: str :param stack_name: Name of the stack to ensure :type parameters: list :param parameters: List of tuples with parameters and values :type template: str :parameter template: Template in JSON string or a HTTP URL :type tags: dict :param tags: Dict with keys and values :type disable_rollback: bool :param disable_rollback: Disable rollbacks of failed creates/updates :type timeout_in_minutes: int :parameter timeout_in_minutes: Timeout the stack creation after x minutes :type capabilities: list :parameter capabilities: The list of capabilities you want to allow in the stack. Currently, the only valid capability is 'CAPABILITY_IAM' """ LOGGER.info('Ensuring stack {} with template {}'.format( stack_name, template)) cumulus_parameters = [ ('CumulusBundleBucket', config.get_environment_option('bucket')), ('CumulusEnvironment', config.get_environment()), ('CumulusVersion', config.get_environment_option('version')) ] for parameter in cumulus_parameters + parameters: LOGGER.debug( 'Adding parameter "{}" with value "{}" to CF template'.format( parameter[0], parameter[1])) if timeout_in_minutes: LOGGER.debug('Will time out stack creation after {:d} minutes'.format( timeout_in_minutes)) try: if stack_exists(stack_name): LOGGER.debug('Updating existing stack to version {}'.format( config.get_environment_option('version'))) if template[0:4] == 'http': CONNECTION.update_stack(stack_name, parameters=cumulus_parameters + parameters, template_url=template, disable_rollback=disable_rollback, capabilities=['CAPABILITY_IAM'], timeout_in_minutes=timeout_in_minutes, tags=tags) else: CONNECTION.update_stack( stack_name, parameters=cumulus_parameters + parameters, template_body=_get_json_from_template(template), disable_rollback=disable_rollback, capabilities=['CAPABILITY_IAM'], timeout_in_minutes=timeout_in_minutes, tags=tags) stack_status = \ _wait_for_stack_complete(stack_name, filter_type='UPDATE') else: LOGGER.debug('Creating new stack with version {}'.format( config.get_environment_option('version'))) if template[0:4] == 'http': CONNECTION.create_stack(stack_name, parameters=cumulus_parameters + parameters, template_url=template, disable_rollback=disable_rollback, capabilities=['CAPABILITY_IAM'], timeout_in_minutes=timeout_in_minutes, tags=tags) else: CONNECTION.create_stack( stack_name, parameters=cumulus_parameters + parameters, template_body=_get_json_from_template(template), disable_rollback=disable_rollback, capabilities=['CAPABILITY_IAM'], timeout_in_minutes=timeout_in_minutes, tags=tags) stack_status = \ _wait_for_stack_complete(stack_name, filter_type='CREATE') except IOError as error: LOGGER.error("Error reading template file: {}".format(error)) return except ValueError as error: raise InvalidTemplateException( 'Malformatted template: {}'.format(error)) except boto.exception.BotoServerError as error: if (error.error_code == 'ValidationError' and error.error_message == 'No updates are to be performed.'): # Do not raise this exception if it is due to lack of updates # We do not want to fail any other stack updates after this # stack LOGGER.warning('No CloudFormation updates are to be ' 'performed for {}'.format(stack_name)) return LOGGER.error('Boto exception {}: {}'.format(error.error_code, error.error_message)) return _print_stack_output(stack_name) return stack_status
def _upload_bundle(bundle_path, bundle_type): """ Upload all bundles to S3 :type bundle_path: str :param bundle_path: Local path to the bundle :type bundle_type: str :param bundle_type: Bundle type """ try: connection = connection_handler.connect_s3() except Exception: raise bucket = connection.get_bucket(config.get_environment_option('bucket')) # Check that the bundle actually exists if not ospath.exists(bundle_path): logger.error('File not found: {}'.format(bundle_path)) sys.exit(1) if bundle_path.endswith('.zip'): compression = 'zip' else: raise UnsupportedCompression( 'Unknown compression format for {}. ' 'We are currently only supporting .zip'.format(bundle_path)) # Generate a md5 checksum for the local bundle local_hash = _generate_local_md5hash(bundle_path) key_name = ( '{environment}/{version}/' 'bundle-{environment}-{version}-{bundle_type}.{compression}').format( environment=config.get_environment(), version=config.get_environment_option('version'), bundle_type=bundle_type, compression=compression) # Do not upload bundles if the key already exists and has the same # md5 checksum if _key_exists(config.get_environment_option('bucket'), key_name, checksum=local_hash): logger.info( 'This bundle is already uploaded to AWS S3. Skipping upload.') return # Get the key object key = bucket.new_key(key_name) logger.info('Starting upload of {} to s3://{}/{}'.format( bundle_type, bucket.name, key_name)) key.set_contents_from_filename(bundle_path, replace=True) logger.info('Completed upload of {} to s3://{}/{}'.format( bundle_type, bucket.name, key_name)) # Compare MD5 checksums if local_hash == key.md5: logger.debug('Uploaded bundle checksum OK ({})'.format(key.md5)) else: logger.error('Mismatching md5 checksum {} ({}) and {} ({})'.format( bundle_path, local_hash, key_name, key.md5)) raise ChecksumMismatchException( 'Mismatching md5 checksum {} ({}) and {} ({})'.format( bundle_path, local_hash, key_name, key.md5))