Пример #1
0
def generate_alert_merger(config):
    """Generate Terraform for the Alert Merger
    Args:
        config (dict): The loaded config from the 'conf/' directory
    Returns:
        dict: Alert Merger Terraform definition to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the alert merger's IAM permissions
    result['module']['alert_merger_iam'] = {
        'source': './modules/tf_alert_merger_iam',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': config['global']['account']['prefix'],
        'role_id': '${module.alert_merger_lambda.role_id}'
    }

    # Set variables for the Lambda module
    result['module']['alert_merger_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'], ALERT_MERGER_NAME),
        AlertMergerPackage.package_name + '.zip',
        AlertMergerPackage.lambda_handler,
        config['lambda']['alert_merger_config'],
        config,
        environment={
            'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
            'ALERT_PROCESSOR': '{}_streamalert_alert_processor'.format(prefix),
            'ALERT_PROCESSOR_TIMEOUT_SEC': config['lambda']['alert_processor_config']['timeout'],
        }
    )

    return result
Пример #2
0
def generate_alert_processor(config):
    """Generate Terraform for the Alert Processor

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Alert Processor dict to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the IAM permissions module
    result['module']['alert_processor_iam'] = {
        'source':
        './modules/tf_alert_processor_iam',
        'account_id':
        config['global']['account']['aws_account_id'],
        'region':
        config['global']['account']['region'],
        'prefix':
        prefix,
        'role_id':
        '${module.alert_processor_lambda.role_id}',
        'kms_key_arn':
        '${aws_kms_key.streamalert_secrets.arn}',
        'sse_kms_key_arn':
        '${aws_kms_key.server_side_encryption.arn}',
        'output_lambda_functions': [
            # Strip qualifiers: only the function name is needed for the IAM permissions
            func.split(':')[0]
            for func in list(config['outputs'].get('aws-lambda', {}).values())
        ],
        'output_s3_buckets':
        list(config['outputs'].get('aws-s3', {}).values()),
        'output_sns_topics':
        list(config['outputs'].get('aws-sns', {}).values()),
        'output_sqs_queues':
        list(config['outputs'].get('aws-sqs', {}).values())
    }

    # Set variables for the Lambda module
    result['module']['alert_processor_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'],
                                   ALERT_PROCESSOR_NAME),
        AlertProcessorPackage.package_name + '.zip',
        AlertProcessorPackage.lambda_handler,
        config['lambda']['alert_processor_config'],
        config,
        environment={
            'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
            'AWS_ACCOUNT_ID': config['global']['account']['aws_account_id'],
            'STREAMALERT_PREFIX': prefix
        })

    return result
Пример #3
0
def generate_apps(cluster_name, cluster_dict, config):
    """Add the app integrations module to the Terraform cluster dict.

    Args:
        cluster_name (str): The name of the currently generating cluster
        cluster_dict (defaultdict): The dict containing all Terraform config for
                                    a given cluster.
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        bool: Result of applying the app integration module
    """
    prefix = config['global']['account']['prefix']

    for function_name, app_info in config['clusters'][cluster_name][
            'modules'].get('streamalert_apps', {}).items():

        tf_module_prefix = 'app_{}_{}'.format(app_info['app_name'],
                                              cluster_name)

        destination_func = '{}_{}_streamalert_classifier'.format(
            prefix, cluster_name)

        app_config = {
            'app_type': app_info['type'],
            'destination_function_name': destination_func,
            'schedule_expression': app_info['schedule_expression']
        }

        # Format the iam module with 'app_<app_name_<cluster>_iam'
        cluster_dict['module']['{}_iam'.format(tf_module_prefix)] = {
            'account_id':
            config['global']['account']['aws_account_id'],
            'destination_function_name':
            destination_func,
            'function_name':
            function_name,
            'region':
            config['global']['account']['region'],
            'function_role_id':
            '${{module.{}_lambda.role_id}}'.format(tf_module_prefix),
            'source':
            './modules/tf_app_iam'
        }

        # Format the lambda module with 'app_<app_name_<cluster>_lambda'
        cluster_dict['module']['{}_lambda'.format(
            tf_module_prefix)] = generate_lambda(
                function_name,
                'streamalert.apps.main.handler',
                config['clusters'][cluster_name]['modules']['streamalert_apps']
                [function_name],
                config,
                input_event=app_config,
                include_layers=True,
            )
Пример #4
0
def generate_rule_promotion(config):
    """Generate Terraform for the Rule Promotion function

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Rule Promotion dict to be marshaled to JSON
    """
    # The Rule Promotion Lambda function is dependent on the rule staging feature being
    # enabled, so do not generate the code for this Lambda function if it not enabled
    if not config['global']['infrastructure']['rule_staging'].get(
            'enabled', False):
        return False

    result = infinitedict()

    alerts_bucket = firehose_alerts_bucket(config)

    # Set variables for the IAM permissions, etc module
    result['module']['rule_promotion_iam'] = {
        'source':
        './modules/tf_rule_promotion_iam',
        'send_digest_schedule_expression':
        config['lambda']['rule_promotion_config']
        ['send_digest_schedule_expression'],
        'digest_sns_topic':
        StatsPublisher.formatted_sns_topic_arn(config).split(':')[-1],
        'role_id':
        '${module.rule_promotion_lambda.role_id}',
        'rules_table_arn':
        '${module.globals.rules_table_arn}',
        'function_alias_arn':
        '${module.rule_promotion_lambda.function_alias_arn}',
        'function_name':
        '${module.rule_promotion_lambda.function_name}',
        'athena_results_bucket_arn':
        '${module.streamalert_athena.results_bucket_arn}',
        'alerts_bucket':
        alerts_bucket,
        's3_kms_key_arn':
        '${aws_kms_key.server_side_encryption.arn}'
    }

    # Set variables for the Lambda module
    result['module']['rule_promotion_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'],
                                   RULE_PROMOTION_NAME),
        RulePromotionPackage.package_name + '.zip',
        RulePromotionPackage.lambda_handler,
        config['lambda']['rule_promotion_config'], config)

    return result
Пример #5
0
def generate_threat_intel_downloader(config):
    """Generate Threat Intel Downloader Terrafrom

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Athena dict to be marshalled to JSON
    """
    # Use the monitoring topic as a dead letter queue
    dlq_topic, _ = monitoring_topic_name(config)

    prefix = config['global']['account']['prefix']

    # Threat Intel Downloader module
    tid_config = config['lambda']['threat_intel_downloader_config']

    # old format of config used interval, but tf_lambda expects 'schedule_expression'
    if 'schedule_expression' not in tid_config:
        tid_config['schedule_expression'] = tid_config.get(
            'interval', 'rate(1 day)')

    result = infinitedict()

    # Set variables for the threat intel downloader configuration
    result['module']['threat_intel_downloader_iam'] = {
        'source': './modules/tf_threat_intel_downloader',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': prefix,
        'function_role_id': '${module.threat_intel_downloader.role_id}',
        'function_alias_arn':
        '${module.threat_intel_downloader.function_alias_arn}',
        'function_cloudwatch_log_group_name':
        '${module.threat_intel_downloader.log_group_name}',
        'monitoring_sns_topic': dlq_topic,
        'table_rcu': tid_config.get('table_rcu', '10'),
        'table_wcu': tid_config.get('table_wcu', '10'),
        'max_read_capacity': tid_config.get('max_read_capacity', '5'),
        'min_read_capacity': tid_config.get('min_read_capacity', '5'),
        'target_utilization': tid_config.get('target_utilization', '70')
    }

    result['module']['threat_intel_downloader'] = generate_lambda(
        '{}_streamalert_{}'.format(prefix, THREAT_INTEL_DOWNLOADER_NAME),
        'streamalert.threat_intel_downloader.main.handler',
        tid_config,
        config,
    )
    return result
Пример #6
0
def generate_rules_engine(config):
    """Generate Terraform for the Rules Engine
    Args:
        config (dict): The loaded config from the 'conf/' directory
    Returns:
        dict: Rules Engine Terraform definition to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the rules engine IAM permissions
    result['module']['rules_engine_iam'] = {
        'source': './modules/tf_rules_engine',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': prefix,
        'function_role_id': '${module.rules_engine_lambda.role_id}',
        'function_alias_arn': '${module.rules_engine_lambda.function_alias_arn}',
        'function_name': '${module.rules_engine_lambda.function_name}',
        'threat_intel_enabled': config.get('threat_intel', {}).get('enabled'),
        'dynamodb_table_name': config.get('threat_intel', {}).get('dynamodb_table_name'),
        'rules_table_arn': '${module.globals.rules_table_arn}',
        'enable_rule_staging': config['global']['infrastructure']['rule_staging'].get(
            'enabled', False
        ),
        'classifier_sqs_queue_arn': '${module.globals.classifier_sqs_queue_arn}',
        'classifier_sqs_sse_kms_key_arn': '${module.globals.classifier_sqs_sse_kms_key_arn}',
        'sqs_record_batch_size': min(config.get('sqs_record_batch_size', 10), 10)
    }

    environment = {
        'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
        'STREAMALERT_PREFIX': prefix,
    }

    if config['lambda']['rules_engine_config'].get('log_rule_statistics'):
        environment['STREAMALERT_TRACK_RULE_STATS'] = '1'

    # Set variables for the Lambda module
    result['module']['rules_engine_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(prefix, RULES_ENGINE_FUNCTION_NAME),
        'streamalert.rules_engine.main.handler',
        config['lambda']['rules_engine_config'],
        config,
        environment=environment,
    )

    return result
Пример #7
0
def generate_classifier(cluster_name, cluster_dict, config):
    """Add this cluster's classifier module to the Terraform cluster dict.

    Args:
        cluster_name (str): The name of the currently generating cluster
        cluster_dict (defaultdict): The dict containing all Terraform config for a given cluster.
        config (dict): The loaded config from the 'conf/' directory

    JSON Input from the config:

        {
          "classifier_config": {
            "log_level": "info",
            "log_retention_days": 14,
            "memory": 128,
            "metric_alarms": {
              "errors": {
                "enabled": true,
                "evaluation_periods": 1,
                "period_secs": 120,
                "threshold": 0
              },
              "throttles": {
                "enabled": true,
                "evaluation_periods": 1,
                "period_secs": 120,
                "threshold": 0
              }
            },
            "timeout": 60,
            "vpc_config": {
              "security_group_ids": [],
              "subnet_ids": []
            }
          }
        }
    """
    classifier_config = config['clusters'][cluster_name]['classifier_config']

    firehose_config = config['global']['infrastructure'].get('firehose', {})
    # The default value here must be consistent with the firehose client default
    use_firehose_prefix = firehose_config.get('use_prefix', True)

    tf_module_prefix = 'classifier_{}'.format(cluster_name)
    iam_module = '{}_iam'.format(tf_module_prefix)

    # Set variables for the alert merger's IAM permissions
    cluster_dict['module'][iam_module] = {
        'source': './modules/tf_classifier',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': config['global']['account']['prefix'],
        'firehose_use_prefix': use_firehose_prefix,
        'function_role_id': '${{module.{}_lambda.role_id}}'.format(tf_module_prefix),
        'function_alias_arn': '${{module.{}_lambda.function_alias_arn}}'.format(tf_module_prefix),
        'function_name': '${{module.{}_lambda.function_name}}'.format(tf_module_prefix),
        'classifier_sqs_queue_arn': '${module.globals.classifier_sqs_queue_arn}',
        'classifier_sqs_sse_kms_key_arn': '${module.globals.classifier_sqs_sse_kms_key_arn}',
    }

    # Add Classifier input config from the loaded cluster file
    input_config = classifier_config.get('inputs')
    if input_config:
        input_mapping = {
            'input_sns_topics': 'aws-sns'
        }
        for tf_key, input_key in input_mapping.items():
            if input_key in input_config:
                cluster_dict['module'][iam_module][tf_key] = input_config[input_key]

    # Set variables for the Lambda module
    cluster_dict['module']['{}_lambda'.format(tf_module_prefix)] = generate_lambda(
        '{}_{}_streamalert_classifier'.format(config['global']['account']['prefix'], cluster_name),
        'streamalert.classifier.main.handler',
        classifier_config,
        config,
        environment={
            'CLUSTER': cluster_name,
            'SQS_QUEUE_URL': '${module.globals.classifier_sqs_queue_url}',
        },
        tags={
            'Cluster': cluster_name
        },
    )
Пример #8
0
def generate_athena(config):
    """Generate Athena Terraform.

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Athena dict to be marshalled to JSON
    """
    result = infinitedict()

    prefix = config['global']['account']['prefix']
    athena_config = config['lambda']['athena_partitioner_config']

    data_buckets = athena_partition_buckets_tf(config)
    database = athena_config.get('database_name',
                                 '{}_streamalert'.format(prefix))

    results_bucket_name = athena_query_results_bucket(config)

    queue_name = athena_config.get(
        'queue_name',
        '{}_streamalert_athena_s3_notifications'.format(prefix)).strip()

    logging_bucket, _ = s3_access_logging_bucket(config)

    # Set variables for the athena partitioner's IAM permissions
    result['module']['athena_partitioner_iam'] = {
        'source':
        './modules/tf_athena',
        'account_id':
        config['global']['account']['aws_account_id'],
        'prefix':
        prefix,
        's3_logging_bucket':
        logging_bucket,
        'database_name':
        database,
        'queue_name':
        queue_name,
        'athena_data_buckets':
        data_buckets,
        'results_bucket':
        results_bucket_name,
        'lambda_timeout':
        athena_config['timeout'],
        'kms_key_id':
        '${aws_kms_key.server_side_encryption.key_id}',
        'function_role_id':
        '${module.athena_partitioner_lambda.role_id}',
        'function_name':
        '${module.athena_partitioner_lambda.function_name}',
        'function_alias_arn':
        '${module.athena_partitioner_lambda.function_alias_arn}',
    }

    # Set variables for the Lambda module
    result['module']['athena_partitioner_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(prefix, ATHENA_PARTITIONER_NAME),
        'streamalert.athena_partitioner.main.handler',
        athena_config,
        config,
        tags={'Subcomponent': 'AthenaPartitioner'})

    return result