def generate_alert_merger(config): """Generate Terraform for the Alert Merger Args: config (dict): The loaded config from the 'conf/' directory Returns: dict: Alert Merger Terraform definition to be marshaled to JSON """ prefix = config['global']['account']['prefix'] result = infinitedict() # Set variables for the alert merger's IAM permissions result['module']['alert_merger_iam'] = { 'source': 'modules/tf_alert_merger_iam', 'account_id': config['global']['account']['aws_account_id'], 'region': config['global']['account']['region'], 'prefix': config['global']['account']['prefix'], 'role_id': '${module.alert_merger_lambda.role_id}' } # Set variables for the Lambda module result['module']['alert_merger_lambda'] = generate_lambda( ALERT_MERGER_NAME, config, { 'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix), 'ALERT_PROCESSOR': '{}_streamalert_alert_processor'.format(prefix), 'ALERT_PROCESSOR_TIMEOUT_SEC': config['lambda']['alert_processor_config']['timeout'], }) return result
def generate_alert_processor(config): """Generate Terraform for the Alert Processor Args: config (dict): The loaded config from the 'conf/' directory Returns: dict: Alert Processor dict to be marshaled to JSON """ prefix = config['global']['account']['prefix'] result = infinitedict() # Set variables for the IAM permissions module result['module']['alert_processor_iam'] = { 'source': 'modules/tf_alert_processor_iam', 'account_id': config['global']['account']['aws_account_id'], 'region': config['global']['account']['region'], 'prefix': prefix, 'role_id': '${module.alert_processor_lambda.role_id}', 'kms_key_arn': '${aws_kms_key.stream_alert_secrets.arn}', 'sse_kms_key_arn': '${aws_kms_key.server_side_encryption.arn}', 'output_lambda_functions': [ # Strip qualifiers: only the function name is needed for the IAM permissions func.split(':')[0] for func in config['outputs'].get('aws-lambda', {}).values() ], 'output_s3_buckets': config['outputs'].get('aws-s3', {}).values(), 'output_sns_topics': config['outputs'].get('aws-sns', {}).values(), 'output_sqs_queues': config['outputs'].get('aws-sqs', {}).values() } # Set variables for the Lambda module result['module']['alert_processor_lambda'] = generate_lambda( '{}_streamalert_{}'.format(config['global']['account']['prefix'], ALERT_PROCESSOR_NAME), AlertProcessorPackage.package_name + '.zip', AlertProcessorPackage.lambda_handler, config['lambda']['alert_processor_config'], config, environment={ 'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix), 'AWS_ACCOUNT_ID': config['global']['account']['aws_account_id'], 'STREAMALERT_PREFIX': prefix }) return result
def generate_apps(cluster_name, cluster_dict, config): """Add the app integrations module to the Terraform cluster dict. Args: cluster_name (str): The name of the currently generating cluster cluster_dict (defaultdict): The dict containing all Terraform config for a given cluster. config (dict): The loaded config from the 'conf/' directory Returns: bool: Result of applying the app integration module """ prefix = config['global']['account']['prefix'] for function_name, app_info in config['clusters'][cluster_name][ 'modules'].get('stream_alert_apps', {}).iteritems(): tf_module_prefix = 'app_{}_{}'.format(app_info['app_name'], cluster_name) destination_func = '{}_streamalert_classifier_{}'.format( prefix, cluster_name) app_config = { 'app_type': app_info['type'], 'destination_function_name': destination_func, 'schedule_expression': app_info['schedule_expression'] } # Format the iam module with 'app_<app_name_<cluster>_iam' cluster_dict['module']['{}_iam'.format(tf_module_prefix)] = { 'account_id': config['global']['account']['aws_account_id'], 'destination_function_name': destination_func, 'function_name': function_name, 'region': config['global']['account']['region'], 'function_role_id': '${{module.{}_lambda.role_id}}'.format(tf_module_prefix), 'source': 'modules/tf_stream_alert_app_iam' } # Format the lambda module with 'app_<app_name_<cluster>_lambda' cluster_dict['module']['{}_lambda'.format( tf_module_prefix)] = generate_lambda( function_name, AppPackage.package_name + '.zip', AppPackage.lambda_handler, config['clusters'][cluster_name]['modules'] ['stream_alert_apps'][function_name], config, input_event=app_config)
def generate_app_integrations(cluster_name, cluster_dict, config): """Add the app integrations module to the Terraform cluster dict. Args: cluster_name (str): The name of the currently generating cluster cluster_dict (defaultdict): The dict containing all Terraform config for a given cluster. config (dict): The loaded config from the 'conf/' directory Returns: bool: Result of applying the app integration module """ prefix = config['global']['account']['prefix'] for function_name, app_info in config['clusters'][cluster_name][ 'modules'].get('stream_alert_apps', {}).iteritems(): func_prefix = function_name.rstrip('_app') module_prefix = 'app_{}_{}'.format(app_info['app_name'], cluster_name) config_param = json.dumps({ 'type': app_info['type'], 'app_name': app_info['app_name'], 'prefix': prefix, 'cluster': cluster_name, 'schedule_expression': app_info['schedule_expression'] }) # Format the iam module with 'app_<app_name_<cluster>_iam' cluster_dict['module']['{}_iam'.format(module_prefix)] = { 'account_id': config['global']['account']['aws_account_id'], 'app_config_parameter': config_param, 'cluster': cluster_name, 'function_prefix': func_prefix, 'prefix': prefix, 'region': config['global']['account']['region'], 'role_id': '${{module.{}_lambda.role_id}}'.format(module_prefix), 'source': 'modules/tf_stream_alert_app_iam', 'type': app_info['type'] } # Format the lambda module with 'app_<app_name_<cluster>_lambda' cluster_dict['module']['{}_lambda'.format( module_prefix)] = generate_lambda( '{}_app'.format(func_prefix), AppIntegrationPackage.package_name + '.zip', AppIntegrationPackage.lambda_handler, config['clusters'] [cluster_name]['modules']['stream_alert_apps'][function_name], config)
def generate_rules_engine(config): """Generate Terraform for the Rules Engine Args: config (dict): The loaded config from the 'conf/' directory Returns: dict: Rules Engine Terraform definition to be marshaled to JSON """ prefix = config['global']['account']['prefix'] result = infinitedict() # Set variables for the rules engine IAM permissions result['module']['rules_engine_iam'] = { 'source': 'modules/tf_rules_engine', 'account_id': config['global']['account']['aws_account_id'], 'region': config['global']['account']['region'], 'prefix': prefix, 'function_role_id': '${module.rules_engine_lambda.role_id}', 'function_alias_arn': '${module.rules_engine_lambda.function_alias_arn}', 'function_name': '${module.rules_engine_lambda.function_name}', 'threat_intel_enabled': config.get('threat_intel', {}).get('enabled'), 'dynamodb_table_name': config.get('threat_intel', {}).get('dynamodb_table_name'), 'rules_table_arn': '${module.globals.rules_table_arn}', 'classifier_sqs_queue_arn': '${module.globals.classifier_sqs_queue_arn}', 'classifier_sqs_sse_kms_key_arn': '${module.globals.classifier_sqs_sse_kms_key_arn}', 'sqs_record_batch_size': min(config.get('sqs_record_batch_size', 10), 10) } # Set variables for the Lambda module result['module']['rules_engine_lambda'] = generate_lambda( '{}_streamalert_{}'.format(prefix, RULES_ENGINE_FUNCTION_NAME), RulesEnginePackage.package_name + '.zip', RulesEnginePackage.lambda_handler, config['lambda']['rules_engine_config'], config, environment={ 'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix), 'STREAMALERT_PREFIX': prefix }) return result
def generate_rule_promotion(config): """Generate Terraform for the Rule Promotion function Args: config (dict): The loaded config from the 'conf/' directory Returns: dict: Rule Promotion dict to be marshaled to JSON """ result = infinitedict() state_param = json.dumps( { 'send_digest_hour_utc': int(config['lambda']['rule_promotion_config'] ['send_digest_hour_utc']), 'sent_daily_digest': False }, sort_keys=True) # Set variables for the IAM permissions, etc module result['module']['rule_promotion_iam'] = { 'source': 'modules/tf_rule_promotion_iam', 'stats_publisher_state_name': StatsPublisher.SSM_STATE_NAME, 'stats_publisher_state_value': state_param, 'digest_sns_topic': StatsPublisher.formatted_sns_topic_arn(config).split(':')[-1], 'role_id': '${module.rule_promotion_lambda.role_id}', 'rules_table_arn': '${module.globals.rules_table_arn}', 'athena_results_bucket_arn': '${module.stream_alert_athena.results_bucket_arn}' } # Set variables for the Lambda module result['module']['rule_promotion_lambda'] = generate_lambda( '{}_streamalert_{}'.format(config['global']['account']['prefix'], RULE_PROMOTION_NAME), RulePromotionPackage.package_name + '.zip', RulePromotionPackage.lambda_handler, config['lambda']['rule_promotion_config'], config) return result
def generate_rule_promotion(config): """Generate Terraform for the Rule Promotion function Args: config (dict): The loaded config from the 'conf/' directory Returns: dict: Rule Promotion dict to be marshaled to JSON """ # The Rule Promotion Lambda function is dependent on the rule staging feature being # enabled, so do not generate the code for this Lambda function if it not enabled if not config['global']['infrastructure']['rule_staging'].get('enabled', False): return False result = infinitedict() athena_config = config['lambda']['athena_partition_refresh_config'] data_buckets = athena_config['buckets'].keys() # Set variables for the IAM permissions, etc module result['module']['rule_promotion_iam'] = { 'source': 'modules/tf_rule_promotion_iam', 'send_digest_schedule_expression': config['lambda']['rule_promotion_config']['send_digest_schedule_expression'], 'digest_sns_topic': StatsPublisher.formatted_sns_topic_arn(config).split(':')[-1], 'role_id': '${module.rule_promotion_lambda.role_id}', 'rules_table_arn': '${module.globals.rules_table_arn}', 'function_alias_arn': '${module.rule_promotion_lambda.function_alias_arn}', 'function_name': '${module.rule_promotion_lambda.function_name}', 'athena_results_bucket_arn': '${module.stream_alert_athena.results_bucket_arn}', 'athena_data_buckets': data_buckets, 's3_kms_key_arn': '${aws_kms_key.server_side_encryption.arn}' } # Set variables for the Lambda module result['module']['rule_promotion_lambda'] = generate_lambda( '{}_streamalert_{}'.format(config['global']['account']['prefix'], RULE_PROMOTION_NAME), RulePromotionPackage.package_name + '.zip', RulePromotionPackage.lambda_handler, config['lambda']['rule_promotion_config'], config ) return result
def generate_classifier(cluster_name, cluster_dict, config): """Add this cluster's classifier module to the Terraform cluster dict. Args: cluster_name (str): The name of the currently generating cluster cluster_dict (defaultdict): The dict containing all Terraform config for a given cluster. config (dict): The loaded config from the 'conf/' directory JSON Input from the config: "stream_alert": { "classifier_config": { "log_level": "info", "log_retention_days": 14, "memory": 128, "metric_alarms": { "errors": { "enabled": true, "evaluation_periods": 1, "period_secs": 120, "threshold": 0 }, "throttles": { "enabled": true, "evaluation_periods": 1, "period_secs": 120, "threshold": 0 } }, "timeout": 60, "vpc_config": { "security_group_ids": [], "subnet_ids": [] } } } """ classifier_config = ( config['clusters'][cluster_name]['modules']['stream_alert']['classifier_config'] ) tf_module_prefix = 'classifier_{}'.format(cluster_name) iam_module = '{}_iam'.format(tf_module_prefix) # Set variables for the alert merger's IAM permissions cluster_dict['module'][iam_module] = { 'source': 'modules/tf_classifier', 'account_id': config['global']['account']['aws_account_id'], 'region': config['global']['account']['region'], 'function_role_id': '${{module.{}_lambda.role_id}}'.format(tf_module_prefix), 'function_alias_arn': '${{module.{}_lambda.function_alias_arn}}'.format(tf_module_prefix), 'function_name': '${{module.{}_lambda.function_name}}'.format(tf_module_prefix), 'classifier_sqs_queue_arn': '${module.globals.classifier_sqs_queue_arn}', 'classifier_sqs_queue_url': '${module.globals.classifier_sqs_queue_url}', 'classifier_sqs_sse_kms_key_arn': '${module.globals.classifier_sqs_sse_kms_key_arn}', } # Add Classifier input config from the loaded cluster file input_config = classifier_config.get('inputs') if input_config: input_mapping = { 'input_sns_topics': 'aws-sns' } for tf_key, input_key in input_mapping.items(): if input_key in input_config: cluster_dict['module'][iam_module][tf_key] = input_config[input_key] # Set variables for the Lambda module cluster_dict['module']['{}_lambda'.format(tf_module_prefix)] = generate_lambda( '{}_streamalert_classifier_{}'.format(config['global']['account']['prefix'], cluster_name), ClassifierPackage.package_name + '.zip', ClassifierPackage.lambda_handler, classifier_config, config, environment={ 'CLUSTER': cluster_name, 'SQS_QUEUE_URL': '${module.globals.classifier_sqs_queue_url}', }, tags={ 'Cluster': cluster_name }, )