예제 #1
0
def test_generate_cloudwatch_monitoring_custom_sns():
    """CLI - Terraform Generate Cloudwatch Monitoring with Existing SNS Topic"""

    # Test a custom SNS topic name
    CONFIG['clusters']['test']['modules']['cloudwatch_monitoring'] = {
        'enabled': True
    }
    CONFIG['global']['infrastructure']['monitoring'][
        'sns_topic_name'] = 'unit_test_monitoring'

    cluster_dict = common.infinitedict()
    result = monitoring.generate_monitoring('test', cluster_dict, CONFIG)

    expected_cloudwatch_tf_custom = {
        'source': './modules/tf_monitoring',
        'sns_topic_arn':
        'arn:aws:sns:us-west-1:12345678910:unit_test_monitoring',
        'lambda_functions': ['unit-test_test_streamalert_classifier'],
        'kinesis_stream': '${module.kinesis_test.stream_name}',
        'lambda_alarms_enabled': True,
        'kinesis_alarms_enabled': True
    }

    assert_true(result)
    assert_equal(cluster_dict['module']['cloudwatch_monitoring_test'],
                 expected_cloudwatch_tf_custom)
예제 #2
0
    def test_firehose_enabled_log(self):
        """CLI - Terraform Generate Kinesis Firehose, Enabled Log"""
        cluster_dict = common.infinitedict()

        # Add an enabled log, with no alarm configuration (aka: alarms disabled)
        self.config['global']['infrastructure']['firehose']['enabled_logs'] = {
            'json:embedded': {}
        }

        firehose.generate_firehose(self._logging_bucket_name, cluster_dict,
                                   self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
                'kinesis_firehose_json_embedded': {
                    'source': './modules/tf_kinesis_firehose_delivery_stream',
                    'buffer_size': 128,
                    'buffer_interval': 900,
                    'file_format': 'parquet',
                    'stream_name': 'unit_test_streamalert_json_embedded',
                    'role_arn':
                    '${module.kinesis_firehose_setup.firehose_role_arn}',
                    's3_bucket_name': 'unit-test-streamalert-data',
                    'kms_key_arn': '${aws_kms_key.server_side_encryption.arn}',
                    'glue_catalog_db_name': 'unit-test_streamalert',
                    'glue_catalog_table_name': 'json_embedded',
                    'schema': self._get_expected_schema()
                }
            }
        }
        assert_equal(cluster_dict, expected_result)
예제 #3
0
def generate_aggregate_cloudwatch_metric_alarms(config):
    """Return any CloudWatch Metric Alarms for aggregate metrics

    Args:
        config (dict): The loaded config from the 'conf/' directory
    """
    result = infinitedict()

    sns_topic_arn = monitoring_topic_arn(config)

    for func, func_config in config['lambda'].items():
        metric_alarms = func_config.get('custom_metric_alarms')
        if not metric_alarms:
            continue

        func = func.replace('_config', '')

        for idx, name in enumerate(sorted(metric_alarms)):
            alarm_settings = metric_alarms[name]
            alarm_settings['source'] = './modules/tf_metric_alarms'
            alarm_settings['sns_topic_arn'] = sns_topic_arn
            alarm_settings['alarm_name'] = name
            result['module']['metric_alarm_{}_{}'.format(func,
                                                         idx)] = alarm_settings

    return result
예제 #4
0
def generate_alert_merger(config):
    """Generate Terraform for the Alert Merger
    Args:
        config (dict): The loaded config from the 'conf/' directory
    Returns:
        dict: Alert Merger Terraform definition to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the alert merger's IAM permissions
    result['module']['alert_merger_iam'] = {
        'source': './modules/tf_alert_merger_iam',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': config['global']['account']['prefix'],
        'role_id': '${module.alert_merger_lambda.role_id}'
    }

    # Set variables for the Lambda module
    result['module']['alert_merger_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'], ALERT_MERGER_NAME),
        AlertMergerPackage.package_name + '.zip',
        AlertMergerPackage.lambda_handler,
        config['lambda']['alert_merger_config'],
        config,
        environment={
            'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
            'ALERT_PROCESSOR': '{}_streamalert_alert_processor'.format(prefix),
            'ALERT_PROCESSOR_TIMEOUT_SEC': config['lambda']['alert_processor_config']['timeout'],
        }
    )

    return result
예제 #5
0
    def test_firehose_enabled_log_alarm_custom(self):
        """CLI - Terraform Generate Kinesis Firehose, Enabled Alarm - Custom Settings"""
        cluster_dict = common.infinitedict()

        # Add an enabled log, with alarms on with custom settings
        self.config['global']['infrastructure']['firehose']['enabled_logs'] = {
            'json:embedded': {
                'enable_alarm': True,
                'evaluation_periods': 10,
                'period_seconds': 3600,
                'log_min_count_threshold': 100000
            }
        }

        firehose.generate_firehose(self._logging_bucket_name, cluster_dict,
                                   self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
                'kinesis_firehose_json_embedded': {
                    'source':
                    './modules/tf_kinesis_firehose_delivery_stream',
                    'buffer_size':
                    128,
                    'buffer_interval':
                    900,
                    'file_format':
                    'parquet',
                    'stream_name':
                    'unit_test_streamalert_json_embedded',
                    'role_arn':
                    '${module.kinesis_firehose_setup.firehose_role_arn}',
                    's3_bucket_name':
                    'unit-test-streamalert-data',
                    'kms_key_arn':
                    '${aws_kms_key.server_side_encryption.arn}',
                    'enable_alarm':
                    True,
                    'evaluation_periods':
                    10,
                    'period_seconds':
                    3600,
                    'alarm_threshold':
                    100000,
                    'alarm_actions': [
                        'arn:aws:sns:us-west-1:12345678910:unit-test_streamalert_monitoring'
                    ],
                    'glue_catalog_db_name':
                    'unit-test_streamalert',
                    'glue_catalog_table_name':
                    'json_embedded',
                    'schema':
                    self._get_expected_schema()
                }
            }
        }

        assert_equal(cluster_dict, expected_result)
예제 #6
0
def generate_cluster(config, cluster_name):
    """Generate a StreamAlert cluster file.

    Args:
        config (dict): The loaded config from the 'conf/' directory
        cluster_name (str): The name of the currently generating cluster

    Returns:
        dict: generated Terraform cluster dictionary
    """
    modules = config['clusters'][cluster_name]['modules']
    cluster_dict = infinitedict()

    generate_classifier(cluster_name, cluster_dict, config)

    generate_cluster_cloudwatch_metric_filters(cluster_name, cluster_dict,
                                               config)

    generate_cluster_cloudwatch_metric_alarms(cluster_name, cluster_dict,
                                              config)

    if modules.get('cloudwatch_monitoring', {}).get('enabled'):
        if not generate_monitoring(cluster_name, cluster_dict, config):
            return

    if modules.get('kinesis'):
        if not generate_kinesis_streams(cluster_name, cluster_dict, config):
            return

    if modules.get('kinesis_events'):
        if not generate_kinesis_events(cluster_name, cluster_dict, config):
            return

    if modules.get('cloudtrail'):
        if not generate_cloudtrail(cluster_name, cluster_dict, config):
            return

    # purposely not using .get, since no extra settings are required for this module
    if 'cloudwatch_events' in modules:
        if not generate_cloudwatch_events(cluster_name, cluster_dict, config):
            return

    if modules.get('cloudwatch_logs_destination'):
        if not generate_cloudwatch_destinations(cluster_name, cluster_dict,
                                                config):
            return

    if modules.get('flow_logs'):
        if not generate_flow_logs(cluster_name, cluster_dict, config):
            return

    if modules.get('s3_events'):
        if not generate_s3_events(cluster_name, cluster_dict, config):
            return

    generate_apps(cluster_name, cluster_dict, config)

    return cluster_dict
예제 #7
0
def test_generate_cloudwatch_monitoring_invalid_config(mock_logging):
    """CLI - Terraform Generate Cloudwatch Monitoring with Invalid Config"""
    CONFIG['global']['infrastructure'] = {}

    cluster_dict = common.infinitedict()
    result = monitoring.generate_monitoring('test', cluster_dict, CONFIG)

    assert_true(mock_logging.error.called)
    assert_false(result)
예제 #8
0
def test_generate_cloudwatch_monitoring_disabled():
    """CLI - Terraform Generate Cloudwatch Monitoring Disabled"""
    cluster_dict = common.infinitedict()
    cluster = 'trusted'
    result = monitoring.generate_monitoring(cluster, cluster_dict, CONFIG)

    assert_true(result)
    assert_true('cloudwatch_monitoring_{}'.format(cluster) not in
                cluster_dict['module'])
예제 #9
0
def generate_alert_processor(config):
    """Generate Terraform for the Alert Processor

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Alert Processor dict to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the IAM permissions module
    result['module']['alert_processor_iam'] = {
        'source':
        './modules/tf_alert_processor_iam',
        'account_id':
        config['global']['account']['aws_account_id'],
        'region':
        config['global']['account']['region'],
        'prefix':
        prefix,
        'role_id':
        '${module.alert_processor_lambda.role_id}',
        'kms_key_arn':
        '${aws_kms_key.streamalert_secrets.arn}',
        'sse_kms_key_arn':
        '${aws_kms_key.server_side_encryption.arn}',
        'output_lambda_functions': [
            # Strip qualifiers: only the function name is needed for the IAM permissions
            func.split(':')[0]
            for func in list(config['outputs'].get('aws-lambda', {}).values())
        ],
        'output_s3_buckets':
        list(config['outputs'].get('aws-s3', {}).values()),
        'output_sns_topics':
        list(config['outputs'].get('aws-sns', {}).values()),
        'output_sqs_queues':
        list(config['outputs'].get('aws-sqs', {}).values())
    }

    # Set variables for the Lambda module
    result['module']['alert_processor_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'],
                                   ALERT_PROCESSOR_NAME),
        AlertProcessorPackage.package_name + '.zip',
        AlertProcessorPackage.lambda_handler,
        config['lambda']['alert_processor_config'],
        config,
        environment={
            'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
            'AWS_ACCOUNT_ID': config['global']['account']['aws_account_id'],
            'STREAMALERT_PREFIX': prefix
        })

    return result
예제 #10
0
def _terraform_defaults(region):
    return infinitedict({
        'terraform': {
            'required_version': TERRAFORM_VERSION,
        },
        'provider': {
            'aws': {
                'region': region,
                'version': TERRAFORM_PROVIDER_VERSION,
            },
        },
    })
예제 #11
0
def generate_rule_promotion(config):
    """Generate Terraform for the Rule Promotion function

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Rule Promotion dict to be marshaled to JSON
    """
    # The Rule Promotion Lambda function is dependent on the rule staging feature being
    # enabled, so do not generate the code for this Lambda function if it not enabled
    if not config['global']['infrastructure']['rule_staging'].get(
            'enabled', False):
        return False

    result = infinitedict()

    alerts_bucket = firehose_alerts_bucket(config)

    # Set variables for the IAM permissions, etc module
    result['module']['rule_promotion_iam'] = {
        'source':
        './modules/tf_rule_promotion_iam',
        'send_digest_schedule_expression':
        config['lambda']['rule_promotion_config']
        ['send_digest_schedule_expression'],
        'digest_sns_topic':
        StatsPublisher.formatted_sns_topic_arn(config).split(':')[-1],
        'role_id':
        '${module.rule_promotion_lambda.role_id}',
        'rules_table_arn':
        '${module.globals.rules_table_arn}',
        'function_alias_arn':
        '${module.rule_promotion_lambda.function_alias_arn}',
        'function_name':
        '${module.rule_promotion_lambda.function_name}',
        'athena_results_bucket_arn':
        '${module.streamalert_athena.results_bucket_arn}',
        'alerts_bucket':
        alerts_bucket,
        's3_kms_key_arn':
        '${aws_kms_key.server_side_encryption.arn}'
    }

    # Set variables for the Lambda module
    result['module']['rule_promotion_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'],
                                   RULE_PROMOTION_NAME),
        RulePromotionPackage.package_name + '.zip',
        RulePromotionPackage.lambda_handler,
        config['lambda']['rule_promotion_config'], config)

    return result
예제 #12
0
    def test_firehose_defaults(self):
        """CLI - Terraform Generate Kinesis Firehose, Defaults"""
        cluster_dict = common.infinitedict()
        firehose.generate_firehose(self._logging_bucket_name, cluster_dict,
                                   self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
            }
        }

        assert_equal(cluster_dict, expected_result)
예제 #13
0
def generate_threat_intel_downloader(config):
    """Generate Threat Intel Downloader Terrafrom

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Athena dict to be marshalled to JSON
    """
    # Use the monitoring topic as a dead letter queue
    dlq_topic, _ = monitoring_topic_name(config)

    prefix = config['global']['account']['prefix']

    # Threat Intel Downloader module
    tid_config = config['lambda']['threat_intel_downloader_config']

    # old format of config used interval, but tf_lambda expects 'schedule_expression'
    if 'schedule_expression' not in tid_config:
        tid_config['schedule_expression'] = tid_config.get(
            'interval', 'rate(1 day)')

    result = infinitedict()

    # Set variables for the threat intel downloader configuration
    result['module']['threat_intel_downloader_iam'] = {
        'source': './modules/tf_threat_intel_downloader',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': prefix,
        'function_role_id': '${module.threat_intel_downloader.role_id}',
        'function_alias_arn':
        '${module.threat_intel_downloader.function_alias_arn}',
        'function_cloudwatch_log_group_name':
        '${module.threat_intel_downloader.log_group_name}',
        'monitoring_sns_topic': dlq_topic,
        'table_rcu': tid_config.get('table_rcu', '10'),
        'table_wcu': tid_config.get('table_wcu', '10'),
        'max_read_capacity': tid_config.get('max_read_capacity', '5'),
        'min_read_capacity': tid_config.get('min_read_capacity', '5'),
        'target_utilization': tid_config.get('target_utilization', '70')
    }

    result['module']['threat_intel_downloader'] = generate_lambda(
        '{}_streamalert_{}'.format(prefix, THREAT_INTEL_DOWNLOADER_NAME),
        'streamalert.threat_intel_downloader.main.handler',
        tid_config,
        config,
    )
    return result
예제 #14
0
def generate_rules_engine(config):
    """Generate Terraform for the Rules Engine
    Args:
        config (dict): The loaded config from the 'conf/' directory
    Returns:
        dict: Rules Engine Terraform definition to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the rules engine IAM permissions
    result['module']['rules_engine_iam'] = {
        'source': './modules/tf_rules_engine',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': prefix,
        'function_role_id': '${module.rules_engine_lambda.role_id}',
        'function_alias_arn': '${module.rules_engine_lambda.function_alias_arn}',
        'function_name': '${module.rules_engine_lambda.function_name}',
        'threat_intel_enabled': config.get('threat_intel', {}).get('enabled'),
        'dynamodb_table_name': config.get('threat_intel', {}).get('dynamodb_table_name'),
        'rules_table_arn': '${module.globals.rules_table_arn}',
        'enable_rule_staging': config['global']['infrastructure']['rule_staging'].get(
            'enabled', False
        ),
        'classifier_sqs_queue_arn': '${module.globals.classifier_sqs_queue_arn}',
        'classifier_sqs_sse_kms_key_arn': '${module.globals.classifier_sqs_sse_kms_key_arn}',
        'sqs_record_batch_size': min(config.get('sqs_record_batch_size', 10), 10)
    }

    environment = {
        'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
        'STREAMALERT_PREFIX': prefix,
    }

    if config['lambda']['rules_engine_config'].get('log_rule_statistics'):
        environment['STREAMALERT_TRACK_RULE_STATS'] = '1'

    # Set variables for the Lambda module
    result['module']['rules_engine_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(prefix, RULES_ENGINE_FUNCTION_NAME),
        'streamalert.rules_engine.main.handler',
        config['lambda']['rules_engine_config'],
        config,
        environment=environment,
    )

    return result
예제 #15
0
    def test_firehose_enabled_log_alarm_defaults(self):
        """CLI - Terraform Generate Kinesis Firehose, Enabled Alarm - Default Settings"""
        cluster_dict = common.infinitedict()

        # Add an enabled log, with alarms on (will use terraform default settings)
        self.config['global']['infrastructure']['firehose']['enabled_logs'] = {
            'json:embedded': {
                'enable_alarm': True
            }
        }

        firehose.generate_firehose(self._logging_bucket_name, cluster_dict,
                                   self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
                'kinesis_firehose_json_embedded': {
                    'source':
                    './modules/tf_kinesis_firehose_delivery_stream',
                    'buffer_size':
                    128,
                    'buffer_interval':
                    900,
                    'compression_format':
                    'GZIP',
                    'use_prefix':
                    True,
                    'prefix':
                    'unit-test',
                    'log_name':
                    'json_embedded',
                    'role_arn':
                    '${module.kinesis_firehose_setup.firehose_role_arn}',
                    's3_bucket_name':
                    'unit-test-streamalert-data',
                    'kms_key_arn':
                    '${aws_kms_key.server_side_encryption.arn}',
                    'enable_alarm':
                    True,
                    'alarm_actions': [
                        'arn:aws:sns:us-west-1:12345678910:unit-test_streamalert_monitoring'
                    ]
                }
            }
        }

        assert_equal(cluster_dict, expected_result)
예제 #16
0
def generate_artifact_extractor(config):
    """Generate Terraform for the Artifact Extractor Lambda function
    Args:
        config (dict): The loaded config from the 'conf/' directory
    Returns:
        dict: Artifact Extractor Terraform definition to be marshaled to JSON
    """
    result = infinitedict()

    if not artifact_extractor_enabled(config):
        return

    ae_config = config['global']['infrastructure']['artifact_extractor']
    stream_name = FirehoseClient.artifacts_firehose_stream_name(config)

    # Set variables for the artifact extractor module
    result['module']['artifact_extractor'] = {
        'source':
        './modules/tf_artifact_extractor',
        'account_id':
        config['global']['account']['aws_account_id'],
        'prefix':
        config['global']['account']['prefix'],
        'region':
        config['global']['account']['region'],
        'glue_catalog_db_name':
        get_database_name(config),
        'glue_catalog_table_name':
        ae_config.get('table_name', DEFAULT_ARTIFACTS_TABLE_NAME),
        's3_bucket_name':
        firehose_data_bucket(config),
        'stream_name':
        stream_name,
        'buffer_size':
        ae_config.get('firehose_buffer_size', 128),
        'buffer_interval':
        ae_config.get('firehose_buffer_interval', 900),
        'kms_key_arn':
        '${aws_kms_key.server_side_encryption.arn}',
        'schema':
        generate_artifacts_table_schema()
    }

    return result
예제 #17
0
def test_generate_cloudwatch_monitoring():
    """CLI - Terraform Generate Cloudwatch Monitoring"""
    cluster_dict = common.infinitedict()
    result = monitoring.generate_monitoring('test', cluster_dict, CONFIG)

    # Test the default SNS topic option
    expected_cloudwatch_tf = {
        'source': './modules/tf_monitoring',
        'sns_topic_arn':
        'arn:aws:sns:us-west-1:12345678910:unit-test_streamalert_monitoring',
        'lambda_functions': ['unit-test_test_streamalert_classifier'],
        'kinesis_stream': '${module.kinesis_test.stream_name}',
        'lambda_alarms_enabled': True,
        'kinesis_alarms_enabled': True
    }

    assert_true(result)
    assert_equal(cluster_dict['module']['cloudwatch_monitoring_test'],
                 expected_cloudwatch_tf)
예제 #18
0
def test_kinesis_streams_with_custom_name():
    """CLI - Terraform Generate Kinesis Streams with Custom Name"""
    cluster_dict = common.infinitedict()
    stream_name = 'test-stream-name'
    cluster = 'advanced'
    CONFIG['clusters'][cluster]['modules']['kinesis']['streams'][
        'stream_name'] = stream_name
    result = kinesis_streams.generate_kinesis_streams(cluster, cluster_dict,
                                                      CONFIG)

    expected_result = {
        'module': {
            'kinesis_advanced': {
                'source': './modules/tf_kinesis_streams',
                'account_id': '12345678910',
                'shard_level_metrics': ["IncomingBytes"],
                'region': 'us-west-1',
                'prefix': 'unit-test',
                'cluster': cluster,
                'stream_name': stream_name,
                'shards': 1,
                'retention': 24,
                'create_user': True,
                'trusted_accounts': []
            }
        },
        'output': {
            'kinesis_advanced_access_key_id': {
                'value': '${module.kinesis_advanced.access_key_id}'
            },
            'kinesis_advanced_secret_key': {
                'value': '${module.kinesis_advanced.secret_key}'
            },
            'kinesis_advanced_user_arn': {
                'value': '${module.kinesis_advanced.user_arn}'
            }
        }
    }

    assert_true(result)
    assert_equal(cluster_dict, expected_result)
예제 #19
0
def test_generate_s3_events():
    """CLI - Terraform - S3 Events, No Module Prefix"""
    cluster_dict = common.infinitedict()
    result = s3_events.generate_s3_events('advanced', cluster_dict, CONFIG)

    expected_config = {
        'module': {
            's3_events_unit-test_advanced_unit-test-bucket_data': {
                'source': './modules/tf_s3_events',
                'lambda_function_alias': '${module.classifier_advanced_lambda.function_alias}',
                'lambda_function_alias_arn': (
                    '${module.classifier_advanced_lambda.function_alias_arn}'
                ),
                'lambda_function_name': '${module.classifier_advanced_lambda.function_name}',
                'bucket_name': 'unit-test-bucket.data',
                'lambda_role_id': '${module.classifier_advanced_lambda.role_id}',
                'filters': [
                    {
                        'filter_prefix': 'AWSLogs/123456789/CloudTrail/us-east-1/',
                        'filter_suffix': '.log'
                    }
                ]
            },
            's3_events_unit-test_advanced_unit-test_cloudtrail_data': {
                'source': './modules/tf_s3_events',
                'lambda_function_alias': '${module.classifier_advanced_lambda.function_alias}',
                'lambda_function_alias_arn': (
                    '${module.classifier_advanced_lambda.function_alias_arn}'
                ),
                'lambda_function_name': '${module.classifier_advanced_lambda.function_name}',
                'bucket_name': 'unit-test.cloudtrail.data',
                'lambda_role_id': '${module.classifier_advanced_lambda.role_id}',
                'filters': []
            }
        }
    }

    assert_equal(result, True)
    assert_equal(cluster_dict, expected_config)
예제 #20
0
def test_generate_s3_events_with_prefix():
    """CLI - Terraform - S3 Events, With Module Prefix"""
    cluster_dict = common.infinitedict()
    bucket_config = {
        'unit-test-bucket': [
            {
                'filter_prefix': 'AWSLogs/123456789/CloudTrail/us-east-1/',
            }
        ]
    }
    s3_events.generate_s3_events_by_bucket(
        'advanced',
        cluster_dict,
        CONFIG,
        bucket_config,
        module_prefix='cloudtrail'
    )

    expected_config = {
        'module': {
            'cloudtrail_s3_events_unit-test_advanced_unit-test-bucket': {
                'source': './modules/tf_s3_events',
                'lambda_function_alias': '${module.classifier_advanced_lambda.function_alias}',
                'lambda_function_alias_arn': (
                    '${module.classifier_advanced_lambda.function_alias_arn}'
                ),
                'lambda_function_name': '${module.classifier_advanced_lambda.function_name}',
                'bucket_name': 'unit-test-bucket',
                'lambda_role_id': '${module.classifier_advanced_lambda.role_id}',
                'filters': [
                    {
                        'filter_prefix': 'AWSLogs/123456789/CloudTrail/us-east-1/'
                    }
                ]
            },
        }
    }

    assert_equal(cluster_dict, expected_config)
예제 #21
0
def test_kinesis_events():
    """CLI - Terraform Generate Kinesis Events"""
    cluster_dict = common.infinitedict()
    result = kinesis_events.generate_kinesis_events('advanced', cluster_dict, CONFIG)

    expected_result = {
        'module': {
            'kinesis_events_advanced': {
                'source': './modules/tf_kinesis_events',
                'batch_size': 100,
                'lambda_production_enabled': True,
                'lambda_role_id': '${module.classifier_advanced_lambda.role_id}',
                'lambda_function_alias_arn': (
                    '${module.classifier_advanced_lambda.function_alias_arn}'
                ),
                'kinesis_stream_arn': '${module.kinesis_advanced.arn}',
            }
        }
    }

    assert_true(result)
    assert_equal(cluster_dict, expected_result)
예제 #22
0
def test_kinesis_streams_with_trusted_account():
    """CLI - Terraform Generate Kinesis Streams with trusted account"""
    cluster_dict = common.infinitedict()
    result = kinesis_streams.generate_kinesis_streams('trusted', cluster_dict,
                                                      CONFIG)

    expected_result = {
        'module': {
            'kinesis_trusted': {
                'source': './modules/tf_kinesis_streams',
                'account_id': '12345678910',
                'shard_level_metrics': [],
                'region': 'us-west-1',
                'prefix': 'unit-test',
                'cluster': 'trusted',
                'stream_name': 'unit-test_trusted_streamalert',
                'shards': 1,
                'retention': 24,
                'create_user': True,
                'trusted_accounts': ['98765432100']
            }
        },
        'output': {
            'kinesis_trusted_access_key_id': {
                'value': '${module.kinesis_trusted.access_key_id}'
            },
            'kinesis_trusted_secret_key': {
                'value': '${module.kinesis_trusted.secret_key}'
            },
            'kinesis_trusted_username': {
                'value': '${module.kinesis_trusted.username}'
            }
        }
    }

    assert_true(result)
    assert_equal(cluster_dict, expected_result)
def generate_threat_intel_downloader(config):
    """Generate Threat Intel Downloader Terrafrom

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Athena dict to be marshalled to JSON
    """
    # Use the monitoring topic as a dead letter queue
    dlq_topic, _ = monitoring_topic_name(config)

    # Threat Intel Downloader module
    ti_downloader_config = config['lambda']['threat_intel_downloader_config']
    ti_downloader_dict = infinitedict()
    ti_downloader_dict['module']['threat_intel_downloader'] = {
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'source': './modules/tf_threat_intel_downloader',
        'lambda_handler': ThreatIntelDownloaderPackage.lambda_handler,
        'lambda_memory': ti_downloader_config.get('memory', '128'),
        'lambda_timeout': ti_downloader_config.get('timeout', '60'),
        'lambda_log_level': ti_downloader_config.get('log_level', 'info'),
        'interval': ti_downloader_config.get('interval', 'rate(1 day)'),
        'prefix': config['global']['account']['prefix'],
        'monitoring_sns_topic': dlq_topic,
        'table_rcu': ti_downloader_config.get('table_rcu', '10'),
        'table_wcu': ti_downloader_config.get('table_wcu', '10'),
        'max_read_capacity': ti_downloader_config.get('max_read_capacity',
                                                      '5'),
        'min_read_capacity': ti_downloader_config.get('min_read_capacity',
                                                      '5'),
        'target_utilization': ti_downloader_config.get('target_utilization',
                                                       '70')
    }
    return ti_downloader_dict
예제 #24
0
def generate_main(config, init=False):
    """Generate the main.tf.json Terraform dict

    Args:
        config (CLIConfig): The loaded CLI config
        init (bool): Terraform is running in the init phase or not (optional)

    Returns:
        dict: main.tf.json Terraform dict
    """
    write_vars(config, region=config['global']['account']['region'])

    main_dict = infinitedict()

    logging_bucket, create_logging_bucket = s3_access_logging_bucket(config)

    state_lock_table_name = '{}_streamalert_terraform_state_lock'.format(
        config['global']['account']['prefix']
    )
    # Setup the Backend depending on the deployment phase.
    # When first setting up StreamAlert, the Terraform statefile
    # is stored locally.  After the first dependencies are created,
    # this moves to S3.
    if init:
        main_dict['terraform']['backend']['local'] = {
            'path': 'terraform.tfstate',
        }
    else:
        terraform_bucket_name, _ = terraform_state_bucket(config)
        main_dict['terraform']['backend']['s3'] = {
            'bucket': terraform_bucket_name,
            'key': config['global'].get('terraform', {}).get(
                'state_key_name',
                'streamalert_state/terraform.tfstate'
            ),
            'region': config['global']['account']['region'],
            'encrypt': True,
            'dynamodb_table': state_lock_table_name,
            'acl': 'private',
            'kms_key_id': 'alias/{}'.format(
                config['global']['account'].get(
                    'kms_key_alias',
                    '{}_streamalert_secrets'.format(config['global']['account']['prefix'])
                )
            ),
        }

    # Configure initial S3 buckets
    main_dict['resource']['aws_s3_bucket'] = {
        'streamalerts': generate_s3_bucket(
            bucket=firehose_alerts_bucket(config),
            logging=logging_bucket
        )
    }

    # Configure remote state locking table
    main_dict['resource']['aws_dynamodb_table'] = {
        'terraform_remote_state_lock': {
            'name': state_lock_table_name,
            'billing_mode': 'PAY_PER_REQUEST',
            'hash_key': 'LockID',
            'attribute': {
                'name': 'LockID',
                'type': 'S'
            },
            'tags': {
                'Name': 'StreamAlert'
            }
        }
    }

    # Create bucket for S3 access logs (if applicable)
    if create_logging_bucket:
        main_dict['resource']['aws_s3_bucket']['logging_bucket'] = generate_s3_bucket(
            bucket=logging_bucket,
            logging=logging_bucket,
            acl='log-delivery-write',
            lifecycle_rule={
                'prefix': '/',
                'enabled': True,
                'transition': {
                    'days': 365,
                    'storage_class': 'GLACIER'
                }
            },
            sse_algorithm='AES256'  # SSE-KMS doesn't seem to work with access logs
        )

    terraform_bucket_name, create_state_bucket = terraform_state_bucket(config)
    # Create bucket for Terraform state (if applicable)
    if create_state_bucket:
        main_dict['resource']['aws_s3_bucket']['terraform_remote_state'] = generate_s3_bucket(
            bucket=terraform_bucket_name,
            logging=logging_bucket
        )

    # Setup Firehose Delivery Streams
    generate_firehose(logging_bucket, main_dict, config)

    # Configure global resources like Firehose alert delivery and alerts table
    main_dict['module']['globals'] = _generate_global_module(config)

    # KMS Key and Alias creation
    main_dict['resource']['aws_kms_key']['server_side_encryption'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert S3 Server-Side Encryption',
        'policy': json.dumps({
            'Version': '2012-10-17',
            'Statement': [
                {
                    'Sid': 'Enable IAM User Permissions',
                    'Effect': 'Allow',
                    'Principal': {
                        'AWS': 'arn:aws:iam::{}:root'.format(
                            config['global']['account']['aws_account_id']
                        )
                    },
                    'Action': 'kms:*',
                    'Resource': '*'
                },
                {
                    'Sid': 'Allow principals in the account to use the key',
                    'Effect': 'Allow',
                    'Principal': '*',
                    'Action': ['kms:Decrypt', 'kms:GenerateDataKey*', 'kms:Encrypt'],
                    'Resource': '*',
                    'Condition': {
                        'StringEquals': {
                            'kms:CallerAccount': config['global']['account']['aws_account_id']
                        }
                    }
                }
            ]
        })
    }
    main_dict['resource']['aws_kms_alias']['server_side_encryption'] = {
        'name': 'alias/{}_server-side-encryption'.format(config['global']['account']['prefix']),
        'target_key_id': '${aws_kms_key.server_side_encryption.key_id}'
    }

    main_dict['resource']['aws_kms_key']['streamalert_secrets'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert secret management'
    }
    main_dict['resource']['aws_kms_alias']['streamalert_secrets'] = {
        'name': 'alias/{}'.format(
            config['global']['account'].get(
                'kms_key_alias',
                '{}_streamalert_secrets'.format(config['global']['account']['prefix'])
            )
        ),
        'target_key_id': '${aws_kms_key.streamalert_secrets.key_id}'
    }

    # Global infrastructure settings
    topic_name, create_topic = monitoring_topic_name(config)
    if create_topic:
        main_dict['resource']['aws_sns_topic']['monitoring'] = {
            'name': topic_name
        }

    return main_dict
예제 #25
0
 def setup(self):
     """Setup before each method"""
     self.cluster_dict = common.infinitedict()
     self.config = CLIConfig(config_path='tests/unit/conf')
예제 #26
0
def generate_athena(config):
    """Generate Athena Terraform.

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Athena dict to be marshalled to JSON
    """
    athena_dict = infinitedict()
    athena_config = config['lambda']['athena_partition_refresh_config']

    data_buckets = sorted(athena_partition_buckets(config))

    prefix = config['global']['account']['prefix']
    database = athena_config.get('database_name',
                                 '{}_streamalert'.format(prefix))

    results_bucket_name = athena_query_results_bucket(config)

    queue_name = athena_config.get(
        'queue_name',
        '{}_streamalert_athena_s3_notifications'.format(prefix)).strip()

    logging_bucket, _ = s3_access_logging_bucket(config)
    athena_dict['module']['streamalert_athena'] = {
        's3_logging_bucket': logging_bucket,
        'source': './modules/tf_athena',
        'database_name': database,
        'queue_name': queue_name,
        'results_bucket': results_bucket_name,
        'kms_key_id': '${aws_kms_key.server_side_encryption.key_id}',
        'lambda_handler': AthenaPackage.lambda_handler,
        'lambda_memory': athena_config.get('memory', '128'),
        'lambda_timeout': athena_config.get('timeout', '60'),
        'lambda_log_level': athena_config.get('log_level', 'info'),
        'athena_data_buckets': data_buckets,
        'concurrency_limit': athena_config.get('concurrency_limit', 10),
        'account_id': config['global']['account']['aws_account_id'],
        'prefix': prefix
    }

    # Cloudwatch monitoring setup
    athena_dict['module']['athena_monitoring'] = {
        'source':
        './modules/tf_monitoring',
        'sns_topic_arn':
        monitoring_topic_arn(config),
        'lambda_functions':
        ['{}_streamalert_athena_partition_refresh'.format(prefix)],
        'kinesis_alarms_enabled':
        False
    }

    # Metrics setup
    if not athena_config.get('enable_custom_metrics', False):
        return athena_dict

    # Check to see if there are any metrics configured for the athena function
    current_metrics = metrics.MetricLogger.get_available_metrics()
    if metrics.ATHENA_PARTITION_REFRESH_NAME not in current_metrics:
        return athena_dict

    metric_prefix = 'AthenaRefresh'
    filter_pattern_idx, filter_value_idx = 0, 1

    # Add filters for the cluster and aggregate
    # Use a list of strings that represent the following comma separated values:
    #   <filter_name>,<filter_pattern>,<value>
    filters = [
        '{},{},{}'.format('{}-{}'.format(metric_prefix,
                                         metric), settings[filter_pattern_idx],
                          settings[filter_value_idx]) for metric, settings in
        current_metrics[metrics.ATHENA_PARTITION_REFRESH_NAME].items()
    ]

    athena_dict['module']['streamalert_athena'][
        'athena_metric_filters'] = filters

    return athena_dict
예제 #27
0
def generate_aggregate_cloudwatch_metric_filters(config):
    """Return the CloudWatch Metric Filters information for aggregate metrics

    Args:
        config (dict): The loaded config from the 'conf/' directory
    """
    functions = {
        cluster: [
            func.replace('_config', '') for func in CLUSTERED_FUNCTIONS
            if cluster_config['{}_config'.format(func)].get(
                'enable_custom_metrics')
        ]
        for cluster, cluster_config in config['clusters'].items()
    }

    functions['global'] = {
        func.replace('_config', '')
        for func, func_config in config['lambda'].items()
        if func_config.get('enable_custom_metrics')
    }

    if not any(funcs for funcs in list(functions.values())):
        return  # Nothing to add if no funcs have metrics enabled

    result = infinitedict()

    current_metrics = metrics.MetricLogger.get_available_metrics()

    for cluster, functions in functions.items():
        is_global = cluster == 'global'

        for function in functions:
            # This function may not actually support any custom metrics
            if function not in current_metrics:
                continue

            metric_prefix = metrics.FUNC_PREFIXES.get(function)
            if not metric_prefix:
                continue

            log_group_name = (
                '${{module.{}_{}_lambda.log_group_name}}'.format(
                    function, cluster) if not is_global else
                '${{module.{}_lambda.log_group_name}}'.format(function))

            cluster = cluster.upper()
            if not is_global:
                cluster = '{}_AGGREGATE'.format(cluster)

            # Add filters for the cluster and aggregate
            for metric, filter_settings in current_metrics[function].items():
                module_name = 'metric_filters_{}_{}_{}'.format(
                    metric_prefix, metric, cluster)
                result['module'][module_name] = {
                    'source': './modules/tf_metric_filters',
                    'log_group_name': log_group_name,
                    'metric_name': '{}-{}'.format(metric_prefix, metric),
                    'metric_pattern': filter_settings[0],
                    'metric_value': filter_settings[1],
                }

    return result
    def test_generate_classifier(self):
        """CLI - Terraform Generate, Classifier"""
        cluster_dict = common.infinitedict()
        classifier.generate_classifier('test', cluster_dict, self.config)

        expected_result = {
            'module': {
                'classifier_test_iam': {
                    'source':
                    './modules/tf_classifier',
                    'account_id':
                    '123456789012',
                    'region':
                    'us-east-1',
                    'prefix':
                    'unit-test',
                    'firehose_use_prefix':
                    True,
                    'function_role_id':
                    '${module.classifier_test_lambda.role_id}',
                    'function_alias_arn':
                    '${module.classifier_test_lambda.function_alias_arn}',
                    'function_name':
                    '${module.classifier_test_lambda.function_name}',
                    'classifier_sqs_queue_arn':
                    '${module.globals.classifier_sqs_queue_arn}',
                    'classifier_sqs_sse_kms_key_arn':
                    ('${module.globals.classifier_sqs_sse_kms_key_arn}'),
                    'input_sns_topics':
                    ['arn:aws:sns:us-east-1:123456789012:foo_bar']
                },
                'classifier_test_lambda': {
                    'alarm_actions':
                    ['arn:aws:sns:us-east-1:123456789012:test_topic'],
                    'description':
                    'Unit-Test Test Streamalert Classifier',
                    'environment_variables': {
                        'CLUSTER': 'test',
                        'SQS_QUEUE_URL':
                        '${module.globals.classifier_sqs_queue_url}',
                        'LOGGER_LEVEL': 'info',
                        'ENABLE_METRICS': '0'
                    },
                    'tags': {
                        'Cluster': 'test'
                    },
                    'errors_alarm_enabled':
                    True,
                    'errors_alarm_evaluation_periods':
                    1,
                    'errors_alarm_period_secs':
                    120,
                    'errors_alarm_threshold':
                    0,
                    'function_name':
                    'unit-test_test_streamalert_classifier',
                    'handler':
                    'streamalert.classifier.main.handler',
                    'log_retention_days':
                    14,
                    'memory_size_mb':
                    128,
                    'source':
                    './modules/tf_lambda',
                    'throttles_alarm_enabled':
                    True,
                    'throttles_alarm_evaluation_periods':
                    1,
                    'throttles_alarm_period_secs':
                    120,
                    'throttles_alarm_threshold':
                    0,
                    'timeout_sec':
                    60,
                    'vpc_security_group_ids': [],
                    'vpc_subnet_ids': [],
                    'input_sns_topics':
                    ['arn:aws:sns:us-east-1:123456789012:foo_bar']
                }
            }
        }

        assert_equal(cluster_dict, expected_result)
예제 #29
0
def generate_athena(config):
    """Generate Athena Terraform.

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Athena dict to be marshalled to JSON
    """
    result = infinitedict()

    prefix = config['global']['account']['prefix']
    athena_config = config['lambda']['athena_partitioner_config']

    data_buckets = athena_partition_buckets_tf(config)
    database = athena_config.get('database_name',
                                 '{}_streamalert'.format(prefix))

    results_bucket_name = athena_query_results_bucket(config)

    queue_name = athena_config.get(
        'queue_name',
        '{}_streamalert_athena_s3_notifications'.format(prefix)).strip()

    logging_bucket, _ = s3_access_logging_bucket(config)

    # Set variables for the athena partitioner's IAM permissions
    result['module']['athena_partitioner_iam'] = {
        'source':
        './modules/tf_athena',
        'account_id':
        config['global']['account']['aws_account_id'],
        'prefix':
        prefix,
        's3_logging_bucket':
        logging_bucket,
        'database_name':
        database,
        'queue_name':
        queue_name,
        'athena_data_buckets':
        data_buckets,
        'results_bucket':
        results_bucket_name,
        'lambda_timeout':
        athena_config['timeout'],
        'kms_key_id':
        '${aws_kms_key.server_side_encryption.key_id}',
        'function_role_id':
        '${module.athena_partitioner_lambda.role_id}',
        'function_name':
        '${module.athena_partitioner_lambda.function_name}',
        'function_alias_arn':
        '${module.athena_partitioner_lambda.function_alias_arn}',
    }

    # Set variables for the Lambda module
    result['module']['athena_partitioner_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(prefix, ATHENA_PARTITIONER_NAME),
        'streamalert.athena_partitioner.main.handler',
        athena_config,
        config,
        tags={'Subcomponent': 'AthenaPartitioner'})

    return result