def handler(cls, options, config): """Use Terraform to destroy any existing infrastructure Args: options (argparse.Namespace): Parsed arguments from manage.py config (CLIConfig): Loaded StreamAlert config Returns: bool: False if errors occurred, True otherwise """ # Check for valid credentials if not check_credentials(): return False # Verify terraform is installed if not terraform_check(): return False # Ask for approval here since multiple Terraform commands may be necessary if not continue_prompt(message='Are you sure you want to destroy?'): return False if options.target: target_modules, valid = _get_valid_tf_targets(config, options.target) if not valid: return False return tf_runner( action='destroy', auto_approve=True, targets=target_modules if target_modules else None ) # Migrate back to local state so Terraform can successfully # destroy the S3 bucket used by the backend. # Do not check for terraform or aws creds again since these were checked above if not terraform_generate_handler(config=config, init=True, check_tf=False, check_creds=False): return False if not run_command(['terraform', 'init']): return False # Destroy all of the infrastructure if not tf_runner(action='destroy', auto_approve=True): return False # Remove old Terraform files return TerraformCleanCommand.handler(options, config)
def _terraform_init_backend(config): """Initialize the infrastructure backend (S3) using Terraform Returns: bool: False if errors occurred, True otherwise """ # Check for valid credentials if not check_credentials(): return False # Verify terraform is installed if not terraform_check(): return False # See generate_main() for how it uses the `init` kwarg for the local/remote backend if not terraform_generate_handler(config=config, init=False): return False LOGGER.info('Initializing StreamAlert backend') return run_command(['terraform', 'init'])
def terraform_generate_handler(config, init=False, check_tf=True, check_creds=True): """Generate all Terraform plans for the configured clusters. Keyword Args: config (dict): The loaded config from the 'conf/' directory init (bool): Indicates if main.tf.json is generated for `init` Returns: bool: Result of cluster generating """ # Check for valid credentials if check_creds and not check_credentials(): return False # Verify terraform is installed if check_tf and not terraform_check(): return False _copy_terraform_files(config) # Setup the main.tf.json file LOGGER.debug('Generating cluster file: main.tf.json') _create_terraform_module_file( generate_main(config, init=init), os.path.join(config.build_directory, 'main.tf.json') ) # Return early during the init process, clusters are not needed yet if init: return True # Setup cluster files for cluster in config.clusters(): if cluster in RESTRICTED_CLUSTER_NAMES: raise InvalidClusterName( 'Rename cluster "main" or "athena" to something else!') LOGGER.debug('Generating cluster file: %s.tf.json', cluster) cluster_dict = generate_cluster(config=config, cluster_name=cluster) if not cluster_dict: LOGGER.error( 'An error was generated while creating the %s cluster', cluster) return False file_name = '{}.tf.json'.format(cluster) _create_terraform_module_file( cluster_dict, os.path.join(config.build_directory, file_name), ) metric_filters = generate_aggregate_cloudwatch_metric_filters(config) if metric_filters: _create_terraform_module_file( metric_filters, os.path.join(config.build_directory, 'metric_filters.tf.json') ) metric_alarms = generate_aggregate_cloudwatch_metric_alarms(config) if metric_alarms: _create_terraform_module_file( metric_alarms, os.path.join(config.build_directory, 'metric_alarms.tf.json') ) # Setup Threat Intel Downloader Lambda function if it is enabled generate_global_lambda_settings( config, conf_name='threat_intel_downloader_config', generate_func=generate_threat_intel_downloader, tf_tmp_file_name='ti_downloader', required=False, ) # Setup Rule Promotion if it is enabled generate_global_lambda_settings( config, conf_name='rule_promotion_config', generate_func=generate_rule_promotion, tf_tmp_file_name='rule_promotion', required=False, ) # Setup Athena Partitioner generate_global_lambda_settings( config, conf_name='athena_partitioner_config', generate_func=generate_athena, tf_tmp_file_name='athena', ) # Setup Rules Engine generate_global_lambda_settings( config, conf_name='rules_engine_config', generate_func=generate_rules_engine, tf_tmp_file_name='rules_engine', ) # Setup Alert Processor generate_global_lambda_settings( config, conf_name='alert_processor_config', generate_func=generate_alert_processor, tf_tmp_file_name='alert_processor', ) # Setup Alert Merger generate_global_lambda_settings( config, conf_name='alert_merger_config', generate_func=generate_alert_merger, tf_tmp_file_name='alert_merger', ) # Setup Lookup Tables if applicable _generate_lookup_tables_settings(config) # Setup StreamQuery _generate_streamquery_module(config) # FIXME: make sure test 'python manage.py destroy' artifact_extractor case # Setup artifact_extractor _generate_artifact_extractor_module(config) return True