Ejemplo n.º 1
0
def generate_alert_merger(config):
    """Generate Terraform for the Alert Merger
    Args:
        config (dict): The loaded config from the 'conf/' directory
    Returns:
        dict: Alert Merger Terraform definition to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the alert merger's IAM permissions
    result['module']['alert_merger_iam'] = {
        'source': 'modules/tf_alert_merger_iam',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': config['global']['account']['prefix'],
        'role_id': '${module.alert_merger_lambda.role_id}'
    }

    # Set variables for the Lambda module
    result['module']['alert_merger_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'], ALERT_MERGER_NAME),
        config['lambda']['alert_merger_config'],
        config,
        environment={
            'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
            'ALERT_PROCESSOR': '{}_streamalert_alert_processor'.format(prefix),
            'ALERT_PROCESSOR_TIMEOUT_SEC': config['lambda']['alert_processor_config']['timeout'],
        }
    )

    return result
Ejemplo n.º 2
0
def generate_aggregate_cloudwatch_metric_alarms(config):
    """Return any CloudWatch Metric Alarms for aggregate metrics

    Args:
        config (dict): The loaded config from the 'conf/' directory
    """
    result = infinitedict()

    sns_topic_arn = monitoring_topic_arn(config)

    for func, func_config in config['lambda'].iteritems():
        metric_alarms = func_config.get('custom_metric_alarms')
        if not metric_alarms:
            continue

        func = func.replace('_config', '')

        for idx, name in enumerate(metric_alarms):
            alarm_settings = metric_alarms[name]
            alarm_settings['source'] = 'modules/tf_metric_alarms',
            alarm_settings['sns_topic_arn'] = sns_topic_arn
            alarm_settings['alarm_name'] = name
            result['module']['metric_alarm_{}_{}'.format(func, idx)] = alarm_settings

    return result
Ejemplo n.º 3
0
    def test_firehose_enabled_log(self):
        """CLI - Terraform Generate Kinesis Firehose, Enabled Log"""
        cluster_dict = common.infinitedict()

        # Add an enabled log, with no alarm configuration (aka: alarms disabled)
        self.config['global']['infrastructure']['firehose']['enabled_logs'] = {
            'json:embedded': {}
        }

        firehose.generate_firehose(self._logging_bucket_name, cluster_dict, self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
                'kinesis_firehose_json_embedded': {
                    'source': 'modules/tf_stream_alert_kinesis_firehose_delivery_stream',
                    'buffer_size': 128,
                    'buffer_interval': 900,
                    'compression_format': 'GZIP',
                    'log_name': 'json_embedded',
                    'role_arn': '${module.kinesis_firehose_setup.firehose_role_arn}',
                    's3_bucket_name': 'unit-testing.streamalert.data',
                    'kms_key_arn': '${aws_kms_key.server_side_encryption.arn}'
                }
            }
        }

        assert_equal(cluster_dict, expected_result)
Ejemplo n.º 4
0
def test_kinesis_events():
    """CLI - Terraform Generate Kinesis Events"""
    cluster_dict = common.infinitedict()
    result = kinesis_events.generate_kinesis_events('advanced', cluster_dict,
                                                    CONFIG)

    expected_result = {
        'module': {
            'kinesis_events_advanced': {
                'source':
                'modules/tf_stream_alert_kinesis_events',
                'batch_size':
                100,
                'lambda_production_enabled':
                True,
                'lambda_role_id':
                '${module.classifier_advanced_lambda.role_id}',
                'lambda_function_alias_arn':
                ('${module.classifier_advanced_lambda.function_alias_arn}'),
                'kinesis_stream_arn':
                '${module.kinesis_advanced.arn}',
                'role_policy_prefix':
                'advanced'
            }
        }
    }

    assert_true(result)
    assert_equal(cluster_dict, expected_result)
Ejemplo n.º 5
0
def test_generate_cloudwatch_monitoring_custom_sns():
    """CLI - Terraform Generate Cloudwatch Monitoring with Existing SNS Topic"""

    # Test a custom SNS topic name
    CONFIG['clusters']['test']['modules']['cloudwatch_monitoring'] = {
        'enabled': True
    }
    CONFIG['global']['infrastructure']['monitoring'][
        'create_sns_topic'] = False
    CONFIG['global']['infrastructure']['monitoring'][
        'sns_topic_name'] = 'unit_test_monitoring'

    cluster_dict = common.infinitedict()
    result = monitoring.generate_monitoring('test', cluster_dict, CONFIG)

    expected_cloudwatch_tf_custom = {
        'source': 'modules/tf_stream_alert_monitoring',
        'sns_topic_arn':
        'arn:aws:sns:us-west-1:12345678910:unit_test_monitoring',
        'lambda_functions': ['unit-testing_streamalert_classifier_test'],
        'kinesis_stream': 'unit-testing_test_stream_alert_kinesis',
        'lambda_alarms_enabled': True,
        'kinesis_alarms_enabled': True
    }

    assert_true(result)
    assert_equal(cluster_dict['module']['cloudwatch_monitoring_test'],
                 expected_cloudwatch_tf_custom)
Ejemplo n.º 6
0
def generate_aggregate_cloudwatch_metric_filters(config):
    """Return the CloudWatch Metric Filters information for aggregate metrics

    Args:
        config (dict): The loaded config from the 'conf/' directory
    """
    functions = {
        cluster: [
            func.replace('_config', '')
            for func in CLUSTERED_FUNCTIONS
            if cluster_config['modules']['stream_alert']['{}_config'.format(func)].get(
                'enable_custom_metrics'
            )
        ] for cluster, cluster_config in config['clusters'].iteritems()
    }

    functions['global'] = {
        func.replace('_config', '') for func, func_config in config['lambda'].iteritems()
        if func_config.get('enable_custom_metrics')
    }

    if not any(funcs for funcs in functions.values()):
        return  # Nothing to add if no funcs have metrics enabled

    result = infinitedict()

    current_metrics = metrics.MetricLogger.get_available_metrics()

    for cluster, functions in functions.iteritems():
        is_global = cluster == 'global'

        for function in functions:
            # This function may not actually support any custom metrics
            if function not in current_metrics:
                continue

            metric_prefix = metrics.FUNC_PREFIXES.get(function)
            if not metric_prefix:
                continue

            log_group_name = (
                '${{module.{}_{}_lambda.log_group_name}}'.format(function, cluster)
                if not is_global else '${{module.{}_lambda.log_group_name}}'.format(function)
            )

            # Add filters for the cluster and aggregate
            for metric, filter_settings in current_metrics[function].iteritems():
                module_name = (
                    'metric_filters_{}_{}_{}'.format(metric_prefix, metric, cluster)
                    if is_global else 'metric_filters_{}_{}'.format(metric_prefix, metric)
                )
                result['module'][module_name] = {
                    'source': 'modules/tf_metric_filters',
                    'log_group_name': log_group_name,
                    'metric_name': '{}-{}'.format(metric_prefix, metric),
                    'metric_pattern': filter_settings[0],
                    'metric_value': filter_settings[1],
                }

    return result
Ejemplo n.º 7
0
def test_kinesis_streams_with_trusted_account():
    """CLI - Terraform Generate Kinesis Streams with trusted account"""
    cluster_dict = common.infinitedict()
    result = kinesis_streams.generate_kinesis_streams('trusted', cluster_dict,
                                                      CONFIG)

    expected_result = {
        'module': {
            'kinesis_trusted': {
                'source': 'modules/tf_stream_alert_kinesis_streams',
                'account_id': '12345678910',
                'shard_level_metrics': [],
                'region': 'us-west-1',
                'prefix': 'unit-testing',
                'cluster_name': 'trusted',
                'stream_name': 'unit-testing_trusted_stream_alert_kinesis',
                'shards': 1,
                'retention': 24,
                'create_user': True,
                'trusted_accounts': ['98765432100']
            }
        }
    }

    assert_true(result)
    assert_equal(cluster_dict, expected_result)
def generate_alert_processor(config):
    """Generate Terraform for the Alert Processor

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Alert Processor dict to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the IAM permissions module
    result['module']['alert_processor_iam'] = {
        'source': 'modules/tf_alert_processor_iam',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': prefix,
        'role_id': '${module.alert_processor_lambda.role_id}',
        'kms_key_arn': '${aws_kms_key.stream_alert_secrets.arn}',
        'output_lambda_functions': [
            # Strip qualifiers: only the function name is needed for the IAM permissions
            func.split(':')[0] for func in config['outputs'].get('aws-lambda', {}).values()
        ],
        'output_s3_buckets': config['outputs'].get('aws-s3', {}).values(),
        'output_sns_topics': config['outputs'].get('aws-sns', {}).values(),
        'output_sqs_queues': config['outputs'].get('aws-sqs', {}).values()
    }

    # Set variables for the Lambda module
    result['module']['alert_processor_lambda'] = generate_lambda(
        ALERT_PROCESSOR_NAME, config, {'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix)})

    return result
Ejemplo n.º 9
0
    def test_firehose_enabled_log_alarm_defaults(self):
        """CLI - Terraform Generate Kinesis Firehose, Enabled Alarm - Default Settings"""
        cluster_dict = common.infinitedict()

        # Add an enabled log, with alarms on (will use terraform default settings)
        self.config['global']['infrastructure']['firehose']['enabled_logs'] = {
            'json:embedded': {
                'enable_alarm': True
            }
        }

        firehose.generate_firehose(self._logging_bucket_name, cluster_dict, self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
                'kinesis_firehose_json_embedded': {
                    'source': 'modules/tf_stream_alert_kinesis_firehose_delivery_stream',
                    'buffer_size': 128,
                    'buffer_interval': 900,
                    'compression_format': 'GZIP',
                    'log_name': 'json_embedded',
                    'role_arn': '${module.kinesis_firehose_setup.firehose_role_arn}',
                    's3_bucket_name': 'unit-testing.streamalert.data',
                    'kms_key_arn': '${aws_kms_key.server_side_encryption.arn}',
                    'enable_alarm': True,
                    'alarm_actions': ['arn:aws:sns:us-west-1:12345678910:stream_alert_monitoring']
                }
            }
        }

        assert_equal(cluster_dict, expected_result)
Ejemplo n.º 10
0
def test_generate_s3_events():
    """CLI - Terraform - S3 Events with Valid Buckets"""
    cluster_dict = common.infinitedict()
    result = s3_events.generate_s3_events('advanced', cluster_dict, CONFIG)

    expected_config = {
        'module': {
            's3_events_unit-testing_advanced_0': {
                'source': 'modules/tf_stream_alert_s3_events',
                'lambda_function_arn': '${module.stream_alert_advanced.lambda_arn}',
                'bucket_id': 'unit-test-bucket.data',
                'notification_id': 'advanced_0',
                'enable_events': True,
                'lambda_role_id': '${module.stream_alert_advanced.lambda_role_id}',
                'filter_suffix': '.log',
                'filter_prefix': 'AWSLogs/123456789/CloudTrail/us-east-1/'
            },
            's3_events_unit-testing_advanced_1': {
                'source': 'modules/tf_stream_alert_s3_events',
                'lambda_function_arn': '${module.stream_alert_advanced.lambda_arn}',
                'bucket_id': 'unit-test.cloudtrail.data',
                'enable_events': False,
                'notification_id': 'advanced_1',
                'lambda_role_id': '${module.stream_alert_advanced.lambda_role_id}',
                'filter_suffix': '',
                'filter_prefix': ''
            }
        }
    }

    assert_true(result)
    assert_equal(cluster_dict, expected_config)
Ejemplo n.º 11
0
def test_generate_s3_events_invalid_bucket(mock_logging):
    """CLI - Terraform - S3 Events with Missing Bucket Key"""
    cluster_dict = common.infinitedict()
    CONFIG['clusters']['advanced']['modules']['s3_events'] = [{'wrong_key': 'my-bucket!!!'}]
    result = s3_events.generate_s3_events('advanced', cluster_dict, CONFIG)

    assert_true(mock_logging.error.called)
    assert_false(result)
Ejemplo n.º 12
0
def test_generate_cloudwatch_monitoring_disabled():
    """CLI - Terraform Generate Cloudwatch Monitoring Disabled"""
    cluster_dict = common.infinitedict()
    cluster = 'trusted'
    result = monitoring.generate_monitoring(cluster, cluster_dict, CONFIG)

    assert_true(result)
    assert_true('cloudwatch_monitoring_{}'.format(cluster) not in cluster_dict['module'])
Ejemplo n.º 13
0
def generate_cluster(**kwargs):
    """Generate a StreamAlert cluster file.

    Keyword Args:
        cluster_name (str): The name of the currently generating cluster
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: generated Terraform cluster dictionary
    """
    config = kwargs.get('config')
    cluster_name = kwargs.get('cluster_name')

    modules = config['clusters'][cluster_name]['modules']
    cluster_dict = infinitedict()

    if not generate_stream_alert(cluster_name, cluster_dict, config):
        return

    generate_cloudwatch_metric_filters(cluster_name, cluster_dict, config)

    generate_cloudwatch_metric_alarms(cluster_name, cluster_dict, config)

    if modules.get('cloudwatch_monitoring', {}).get('enabled'):
        if not generate_monitoring(cluster_name, cluster_dict, config):
            return

    if modules.get('kinesis'):
        if not generate_kinesis_streams(cluster_name, cluster_dict, config):
            return

    outputs = config['clusters'][cluster_name].get('outputs')
    if outputs:
        if not generate_outputs(cluster_name, cluster_dict, config):
            return

    if modules.get('kinesis_events'):
        if not generate_kinesis_events(cluster_name, cluster_dict, config):
            return

    cloudtrail_info = modules.get('cloudtrail')
    if cloudtrail_info:
        if not generate_cloudtrail(cluster_name, cluster_dict, config):
            return

    flow_log_info = modules.get('flow_logs')
    if flow_log_info:
        if not generate_flow_logs(cluster_name, cluster_dict, config):
            return

    s3_events_info = modules.get('s3_events')
    if s3_events_info:
        if not generate_s3_events(cluster_name, cluster_dict, config):
            return

    generate_app_integrations(cluster_name, cluster_dict, config)

    return cluster_dict
Ejemplo n.º 14
0
def generate_cluster(config, cluster_name):
    """Generate a StreamAlert cluster file.

    Args:
        config (dict): The loaded config from the 'conf/' directory
        cluster_name (str): The name of the currently generating cluster

    Returns:
        dict: generated Terraform cluster dictionary
    """
    modules = config['clusters'][cluster_name]['modules']
    cluster_dict = infinitedict()

    generate_classifier(cluster_name, cluster_dict, config)

    generate_cluster_cloudwatch_metric_filters(cluster_name, cluster_dict,
                                               config)

    generate_cluster_cloudwatch_metric_alarms(cluster_name, cluster_dict,
                                              config)

    if modules.get('cloudwatch_monitoring', {}).get('enabled'):
        if not generate_monitoring(cluster_name, cluster_dict, config):
            return

    if modules.get('kinesis'):
        if not generate_kinesis_streams(cluster_name, cluster_dict, config):
            return

    outputs = config['clusters'][cluster_name].get('outputs')
    if outputs:
        if not generate_outputs(cluster_name, cluster_dict, config):
            return

    if modules.get('kinesis_events'):
        if not generate_kinesis_events(cluster_name, cluster_dict, config):
            return

    if modules.get('cloudtrail'):
        if not generate_cloudtrail(cluster_name, cluster_dict, config):
            return

    if modules.get('cloudwatch'):
        if not generate_cloudwatch(cluster_name, cluster_dict, config):
            return

    if modules.get('flow_logs'):
        if not generate_flow_logs(cluster_name, cluster_dict, config):
            return

    if modules.get('s3_events'):
        if not generate_s3_events(cluster_name, cluster_dict, config):
            return

    generate_apps(cluster_name, cluster_dict, config)

    return cluster_dict
Ejemplo n.º 15
0
def test_generate_cloudwatch_monitoring_invalid_config(mock_logging):
    """CLI - Terraform Generate Cloudwatch Monitoring with Invalid Config"""
    CONFIG['global']['infrastructure'] = {}

    cluster_dict = common.infinitedict()
    result = monitoring.generate_monitoring('test', cluster_dict, CONFIG)

    assert_true(mock_logging.error.called)
    assert_false(result)
Ejemplo n.º 16
0
def generate_rules_engine(config):
    """Generate Terraform for the Rules Engine
    Args:
        config (dict): The loaded config from the 'conf/' directory
    Returns:
        dict: Rules Engine Terraform definition to be marshaled to JSON
    """
    prefix = config['global']['account']['prefix']

    result = infinitedict()

    # Set variables for the rules engine IAM permissions
    result['module']['rules_engine_iam'] = {
        'source':
        'modules/tf_rules_engine',
        'account_id':
        config['global']['account']['aws_account_id'],
        'region':
        config['global']['account']['region'],
        'prefix':
        prefix,
        'function_role_id':
        '${module.rules_engine_lambda.role_id}',
        'function_alias_arn':
        '${module.rules_engine_lambda.function_alias_arn}',
        'function_name':
        '${module.rules_engine_lambda.function_name}',
        'threat_intel_enabled':
        config.get('threat_intel', {}).get('enabled'),
        'dynamodb_table_name':
        config.get('threat_intel', {}).get('dynamodb_table_name'),
        'rules_table_arn':
        '${module.globals.rules_table_arn}',
        'classifier_sqs_queue_arn':
        '${module.globals.classifier_sqs_queue_arn}',
        'classifier_sqs_sse_kms_key_arn':
        '${module.globals.classifier_sqs_sse_kms_key_arn}',
        'sqs_record_batch_size':
        min(config.get('sqs_record_batch_size', 10), 10)
    }

    # Set variables for the Lambda module
    result['module']['rules_engine_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(prefix, RULES_ENGINE_FUNCTION_NAME),
        RulesEnginePackage.package_name + '.zip',
        RulesEnginePackage.lambda_handler,
        config['lambda']['rules_engine_config'],
        config,
        environment={
            'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
            'STREAMALERT_PREFIX': prefix
        })

    return result
Ejemplo n.º 17
0
    def test_firehose_defaults(self):
        """CLI - Terraform Generate Kinesis Firehose, Defaults"""
        cluster_dict = common.infinitedict()
        firehose.generate_firehose(self._logging_bucket_name, cluster_dict, self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
            }
        }

        assert_equal(cluster_dict, expected_result)
Ejemplo n.º 18
0
def test_generate_s3_events_legacy():
    """CLI - Terraform - S3 Events - Legacy"""
    cluster_dict = common.infinitedict()
    CONFIG['clusters']['test']['modules']['s3_events'] = {
        's3_bucket_id': 'unit-test-bucket.legacy.data'
    }
    result = s3_events.generate_s3_events('test', cluster_dict, CONFIG)

    assert_true(result)
    assert_equal(CONFIG['clusters']['test']['modules']['s3_events'],
                 [{
                     'bucket_id': 'unit-test-bucket.legacy.data'
                 }])
Ejemplo n.º 19
0
def generate_rule_promotion(config):
    """Generate Terraform for the Rule Promotion function

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Rule Promotion dict to be marshaled to JSON
    """
    result = infinitedict()

    state_param = json.dumps(
        {
            'send_digest_hour_utc':
            int(config['lambda']['rule_promotion_config']
                ['send_digest_hour_utc']),
            'sent_daily_digest':
            False
        },
        sort_keys=True)

    # Set variables for the IAM permissions, etc module
    result['module']['rule_promotion_iam'] = {
        'source':
        'modules/tf_rule_promotion_iam',
        'stats_publisher_state_name':
        StatsPublisher.SSM_STATE_NAME,
        'stats_publisher_state_value':
        state_param,
        'digest_sns_topic':
        StatsPublisher.formatted_sns_topic_arn(config).split(':')[-1],
        'role_id':
        '${module.rule_promotion_lambda.role_id}',
        'rules_table_arn':
        '${module.globals.rules_table_arn}',
        'athena_results_bucket_arn':
        '${module.stream_alert_athena.results_bucket_arn}'
    }

    # Set variables for the Lambda module
    result['module']['rule_promotion_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'],
                                   RULE_PROMOTION_NAME),
        RulePromotionPackage.package_name + '.zip',
        RulePromotionPackage.lambda_handler,
        config['lambda']['rule_promotion_config'], config)

    return result
Ejemplo n.º 20
0
def generate_threat_intel_downloader(config):
    """Generate Threat Intel Downloader Terrafrom

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Athena dict to be marshalled to JSON
    """
    # Use the monitoring topic as a dead letter queue
    infrastructure_config = config['global'].get('infrastructure')
    dlq_topic = (DEFAULT_SNS_MONITORING_TOPIC
                 if infrastructure_config.get('monitoring', {}).get('create_sns_topic')
                 else infrastructure_config.get('monitoring', {}).get('sns_topic_name',
                                                                      DEFAULT_SNS_MONITORING_TOPIC))

    # Threat Intel Downloader module
    ti_downloader_config = config['lambda']['threat_intel_downloader_config']
    ti_downloader_dict = infinitedict()
    ti_downloader_dict['module']['threat_intel_downloader'] = {
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'source': 'modules/tf_threat_intel_downloader',
        'lambda_function_arn': '${module.threat_intel_downloader.lambda_arn}',
        'lambda_handler': ti_downloader_config['handler'],
        'lambda_memory': ti_downloader_config.get('memory', '128'),
        'lambda_timeout': ti_downloader_config.get('timeout', '60'),
        'lambda_s3_bucket': ti_downloader_config['source_bucket'],
        'lambda_s3_key': ti_downloader_config['source_object_key'],
        'lambda_log_level': ti_downloader_config.get('log_level', 'info'),
        'interval': ti_downloader_config.get('interval', 'rate(1 day)'),
        'current_version': ti_downloader_config['current_version'],
        'prefix': config['global']['account']['prefix'],
        'monitoring_sns_topic': dlq_topic,
        'table_rcu': ti_downloader_config.get('table_rcu', '10'),
        'table_wcu': ti_downloader_config.get('table_wcu', '10'),
        'ioc_keys': ti_downloader_config.get('ioc_keys',
                                             ['expiration_ts', 'itype', 'source', 'type', 'value']),
        'ioc_filters': ti_downloader_config.get('ioc_filters', ['crowdstrike', '@airbnb.com']),
        'ioc_types': ti_downloader_config.get('ioc_types', ['domain', 'ip', 'md5']),
        'excluded_sub_types': ti_downloader_config.get('excluded_sub_types'),
        'max_read_capacity': ti_downloader_config.get('max_read_capacity', '5'),
        'min_read_capacity': ti_downloader_config.get('min_read_capacity', '5'),
        'target_utilization': ti_downloader_config.get('target_utilization', '70')
    }
    return ti_downloader_dict
Ejemplo n.º 21
0
def generate_rule_promotion(config):
    """Generate Terraform for the Rule Promotion function

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Rule Promotion dict to be marshaled to JSON
    """
    # The Rule Promotion Lambda function is dependent on the rule staging feature being
    # enabled, so do not generate the code for this Lambda function if it not enabled
    if not config['global']['infrastructure']['rule_staging'].get('enabled', False):
        return False

    result = infinitedict()

    athena_config = config['lambda']['athena_partition_refresh_config']
    data_buckets = athena_config['buckets'].keys()

    # Set variables for the IAM permissions, etc module
    result['module']['rule_promotion_iam'] = {
        'source': 'modules/tf_rule_promotion_iam',
        'send_digest_schedule_expression':
            config['lambda']['rule_promotion_config']['send_digest_schedule_expression'],
        'digest_sns_topic': StatsPublisher.formatted_sns_topic_arn(config).split(':')[-1],
        'role_id': '${module.rule_promotion_lambda.role_id}',
        'rules_table_arn': '${module.globals.rules_table_arn}',
        'function_alias_arn': '${module.rule_promotion_lambda.function_alias_arn}',
        'function_name': '${module.rule_promotion_lambda.function_name}',
        'athena_results_bucket_arn': '${module.stream_alert_athena.results_bucket_arn}',
        'athena_data_buckets': data_buckets,
        's3_kms_key_arn': '${aws_kms_key.server_side_encryption.arn}'
    }

    # Set variables for the Lambda module
    result['module']['rule_promotion_lambda'] = generate_lambda(
        '{}_streamalert_{}'.format(config['global']['account']['prefix'], RULE_PROMOTION_NAME),
        RulePromotionPackage.package_name + '.zip',
        RulePromotionPackage.lambda_handler,
        config['lambda']['rule_promotion_config'],
        config
    )

    return result
Ejemplo n.º 22
0
def test_generate_cloudwatch_monitoring():
    """CLI - Terraform Generate Cloudwatch Monitoring"""
    cluster_dict = common.infinitedict()
    result = monitoring.generate_monitoring('test', cluster_dict, CONFIG)

    # Test the default SNS topic option
    expected_cloudwatch_tf = {
        'source': 'modules/tf_stream_alert_monitoring',
        'sns_topic_arn': 'arn:aws:sns:us-west-1:12345678910:stream_alert_monitoring',
        'lambda_functions': ['unit-testing_test_streamalert_classifier'],
        'kinesis_stream': 'unit-testing_test_stream_alert_kinesis',
        'lambda_alarms_enabled': True,
        'kinesis_alarms_enabled': True
    }

    assert_true(result)
    assert_equal(
        cluster_dict['module']['cloudwatch_monitoring_test'],
        expected_cloudwatch_tf)
Ejemplo n.º 23
0
def test_generate_cloudwatch_monitoring_with_settings():
    """CLI - Terraform Generate Cloudwatch Monitoring with Custom Settings"""
    cluster_dict = common.infinitedict()
    result = monitoring.generate_monitoring('advanced', cluster_dict, CONFIG)

    # Test the default SNS topic option
    expected_cloudwatch_tf = {
        'source': 'modules/tf_stream_alert_monitoring',
        'sns_topic_arn': 'arn:aws:sns:us-west-1:12345678910:stream_alert_monitoring',
        'lambda_functions': ['unit-testing_advanced_streamalert_rule_processor'],
        'kinesis_stream': 'unit-testing_advanced_stream_alert_kinesis',
        'lambda_alarms_enabled': True,
        'kinesis_alarms_enabled': True,
        'kinesis_iterator_age_error_threshold': '3000000'
    }

    assert_true(result)
    assert_equal(
        cluster_dict['module']['cloudwatch_monitoring_advanced'],
        expected_cloudwatch_tf)
Ejemplo n.º 24
0
def generate_athena(config):
    """Generate Athena Terraform.

    Args:
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        dict: Athena dict to be marshalled to JSON
    """
    athena_dict = infinitedict()
    athena_config = config['lambda']['athena_partition_refresh_config']

    data_buckets = athena_config['buckets'].keys()

    prefix = config['global']['account']['prefix']
    database = athena_config.get('database_name',
                                 '{}_streamalert'.format(prefix))

    results_bucket_name = athena_config.get(
        'results_bucket',
        '{}.streamalert.athena-results'.format(prefix)).strip()

    queue_name = athena_config.get(
        'queue_name',
        '{}_streamalert_athena_s3_notifications'.format(prefix)).strip()

    athena_dict['module']['stream_alert_athena'] = {
        's3_logging_bucket':
        '{}.streamalert.s3-logging'.format(prefix),
        'source':
        'modules/tf_stream_alert_athena',
        'database_name':
        database,
        'queue_name':
        queue_name,
        'results_bucket':
        results_bucket_name,
        'lambda_handler':
        AthenaPackage.lambda_handler,
        'lambda_memory':
        athena_config.get('memory', '128'),
        'lambda_timeout':
        athena_config.get('timeout', '60'),
        'lambda_log_level':
        athena_config.get('log_level', 'info'),
        'athena_data_buckets':
        data_buckets,
        'schedule_expression':
        athena_config.get('schedule_expression', 'rate(10 minutes)'),
        'enable_metrics':
        athena_config.get('enable_metrics', False),
        'account_id':
        config['global']['account']['aws_account_id'],
        'prefix':
        prefix
    }

    # Cloudwatch monitoring setup
    monitoring_config = config['global'].get('infrastructure',
                                             {}).get('monitoring', {})
    sns_topic_name = DEFAULT_SNS_MONITORING_TOPIC if monitoring_config.get(
        'create_sns_topic') else monitoring_config.get('sns_topic_name')
    athena_dict['module']['athena_monitoring'] = {
        'source':
        'modules/tf_stream_alert_monitoring',
        'sns_topic_arn':
        'arn:aws:sns:{region}:{account_id}:{topic}'.format(
            region=config['global']['account']['region'],
            account_id=config['global']['account']['aws_account_id'],
            topic=sns_topic_name),
        'lambda_functions':
        ['{}_streamalert_athena_partition_refresh'.format(prefix)],
        'kinesis_alarms_enabled':
        False
    }

    # Metrics setup
    if not athena_config.get('enable_metrics', False):
        return athena_dict

    # Check to see if there are any metrics configured for the athena function
    current_metrics = metrics.MetricLogger.get_available_metrics()
    if metrics.ATHENA_PARTITION_REFRESH_NAME not in current_metrics:
        return athena_dict

    metric_prefix = 'AthenaRefresh'
    filter_pattern_idx, filter_value_idx = 0, 1

    # Add filters for the cluster and aggregate
    # Use a list of strings that represnt the following comma separated values:
    #   <filter_name>,<filter_pattern>,<value>
    filters = [
        '{},{},{}'.format('{}-{}'.format(metric_prefix,
                                         metric), settings[filter_pattern_idx],
                          settings[filter_value_idx]) for metric, settings in
        current_metrics[metrics.ATHENA_PARTITION_REFRESH_NAME].iteritems()
    ]

    athena_dict['module']['stream_alert_athena'][
        'athena_metric_filters'] = filters

    return athena_dict
Ejemplo n.º 25
0
def generate_main(config, init=False):
    """Generate the main.tf.json Terraform dict

    Args:
        config (CLIConfig): The loaded CLI config
        init (bool): Terraform is running in the init phase or not (optional)

    Returns:
        dict: main.tf.json Terraform dict
    """
    main_dict = infinitedict()

    # Configure provider along with the minimum version
    main_dict['provider']['aws'] = {
        'version': TERRAFORM_VERSIONS['provider']['aws'],
        'region': config['global']['account']['region']
    }

    # Configure Terraform version requirement
    main_dict['terraform']['required_version'] = TERRAFORM_VERSIONS['application']

    # Setup the Backend depending on the deployment phase.
    # When first setting up StreamAlert, the Terraform statefile
    # is stored locally.  After the first dependencies are created,
    # this moves to S3.
    if init:
        main_dict['terraform']['backend']['local'] = {
            'path': 'terraform.tfstate'}
    else:
        main_dict['terraform']['backend']['s3'] = {
            'bucket': config['global']['terraform']['tfstate_bucket'],
            'key': config['global']['terraform']['tfstate_s3_key'],
            'region': config['global']['account']['region'],
            'encrypt': True,
            'acl': 'private',
            'kms_key_id': 'alias/{}'.format(config['global']['account']['kms_key_alias'])}

    logging_bucket = config['global']['s3_access_logging']['logging_bucket']

    # Configure initial S3 buckets
    main_dict['resource']['aws_s3_bucket'] = {
        'stream_alert_secrets': generate_s3_bucket(
            # FIXME (derek.wang) DRY out by using OutputCredentialsProvider?
            bucket='{}.streamalert.secrets'.format(config['global']['account']['prefix']),
            logging=logging_bucket
        ),
        'streamalerts': generate_s3_bucket(
            bucket='{}.streamalerts'.format(config['global']['account']['prefix']),
            logging=logging_bucket
        )
    }

    # Create bucket for S3 access logs (if applicable)
    if config['global']['s3_access_logging'].get('create_bucket', True):
        main_dict['resource']['aws_s3_bucket']['logging_bucket'] = generate_s3_bucket(
            bucket=logging_bucket,
            logging=logging_bucket,
            acl='log-delivery-write',
            lifecycle_rule={
                'prefix': '/',
                'enabled': True,
                'transition': {
                    'days': 365,
                    'storage_class': 'GLACIER'
                }
            },
            sse_algorithm='AES256'  # SSE-KMS doesn't seem to work with access logs
        )

    # Create bucket for Terraform state (if applicable)
    if config['global']['terraform'].get('create_bucket', True):
        main_dict['resource']['aws_s3_bucket']['terraform_remote_state'] = generate_s3_bucket(
            bucket=config['global']['terraform']['tfstate_bucket'],
            logging=logging_bucket
        )

    # Setup Firehose Delivery Streams
    generate_firehose(logging_bucket, main_dict, config)

    # Configure global resources like Firehose alert delivery and alerts table
    global_module = {
        'source': 'modules/tf_stream_alert_globals',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': config['global']['account']['prefix'],
        'kms_key_arn': '${aws_kms_key.server_side_encryption.arn}',
        'alerts_table_read_capacity': (
            config['global']['infrastructure']['alerts_table']['read_capacity']),
        'alerts_table_write_capacity': (
            config['global']['infrastructure']['alerts_table']['write_capacity']),
        'rules_engine_timeout': config['lambda']['rules_engine_config']['timeout']
    }

    if config['global']['infrastructure']['rule_staging'].get('enabled'):
        global_module['enable_rule_staging'] = True
        global_module['rules_table_read_capacity'] = (
            config['global']['infrastructure']['rule_staging']['table']['read_capacity'])
        global_module['rules_table_write_capacity'] = (
            config['global']['infrastructure']['rule_staging']['table']['write_capacity'])

    main_dict['module']['globals'] = global_module

    # KMS Key and Alias creation
    main_dict['resource']['aws_kms_key']['server_side_encryption'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert S3 Server-Side Encryption',
        'policy': json.dumps({
            'Version': '2012-10-17',
            'Statement': [
                {
                    'Sid': 'Enable IAM User Permissions',
                    'Effect': 'Allow',
                    'Principal': {
                        'AWS': 'arn:aws:iam::{}:root'.format(
                            config['global']['account']['aws_account_id']
                        )
                    },
                    'Action': 'kms:*',
                    'Resource': '*'
                },
                {
                    'Sid': 'Allow principals in the account to use the key',
                    'Effect': 'Allow',
                    'Principal': '*',
                    'Action': ['kms:Decrypt', 'kms:GenerateDataKey*', 'kms:Encrypt'],
                    'Resource': '*',
                    'Condition': {
                        'StringEquals': {
                            'kms:CallerAccount': config['global']['account']['aws_account_id']
                        }
                    }
                }
            ]
        })
    }
    main_dict['resource']['aws_kms_alias']['server_side_encryption'] = {
        'name': 'alias/{}_server-side-encryption'.format(config['global']['account']['prefix']),
        'target_key_id': '${aws_kms_key.server_side_encryption.key_id}'
    }

    main_dict['resource']['aws_kms_key']['stream_alert_secrets'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert secret management'
    }
    main_dict['resource']['aws_kms_alias']['stream_alert_secrets'] = {
        'name': 'alias/{}'.format(config['global']['account']['kms_key_alias']),
        'target_key_id': '${aws_kms_key.stream_alert_secrets.key_id}'
    }

    # Global infrastructure settings
    infrastructure_config = config['global'].get('infrastructure')
    if infrastructure_config and 'monitoring' in infrastructure_config:
        if infrastructure_config['monitoring'].get('create_sns_topic'):
            main_dict['resource']['aws_sns_topic']['stream_alert_monitoring'] = {
                'name': DEFAULT_SNS_MONITORING_TOPIC
            }

    return main_dict
Ejemplo n.º 26
0
def generate_main(config, init=False):
    """Generate the main.tf.json Terraform dict

    Args:
        config (CLIConfig): The loaded CLI config
        init (bool): Terraform is running in the init phase or not (optional)

    Returns:
        dict: main.tf.json Terraform dict
    """
    main_dict = infinitedict()

    # Configure provider along with the minimum version
    main_dict['provider']['aws'] = {
        'version': TERRAFORM_VERSIONS['provider']['aws']
    }

    # Configure Terraform version requirement
    main_dict['terraform']['required_version'] = TERRAFORM_VERSIONS[
        'application']

    # Setup the Backend depending on the deployment phase.
    # When first setting up StreamAlert, the Terraform statefile
    # is stored locally.  After the first dependencies are created,
    # this moves to S3.
    if init:
        main_dict['terraform']['backend']['local'] = {
            'path': 'terraform.tfstate'
        }
    else:
        main_dict['terraform']['backend']['s3'] = {
            'bucket':
            config['global']['terraform']['tfstate_bucket'],
            'key':
            config['global']['terraform']['tfstate_s3_key'],
            'region':
            config['global']['account']['region'],
            'encrypt':
            True,
            'acl':
            'private',
            'kms_key_id':
            'alias/{}'.format(config['global']['account']['kms_key_alias'])
        }

    logging_bucket = config['global']['s3_access_logging']['logging_bucket']

    # Configure initial S3 buckets
    main_dict['resource']['aws_s3_bucket'] = {
        'stream_alert_secrets':
        generate_s3_bucket(bucket='{}.streamalert.secrets'.format(
            config['global']['account']['prefix']),
                           logging=logging_bucket),
        'streamalerts':
        generate_s3_bucket(bucket='{}.streamalerts'.format(
            config['global']['account']['prefix']),
                           logging=logging_bucket)
    }

    # Create bucket for S3 access logs (if applicable)
    if config['global']['s3_access_logging'].get('create_bucket', True):
        main_dict['resource']['aws_s3_bucket'][
            'logging_bucket'] = generate_s3_bucket(
                bucket=logging_bucket,
                logging=logging_bucket,
                acl='log-delivery-write',
                lifecycle_rule={
                    'prefix': '/',
                    'enabled': True,
                    'transition': {
                        'days': 365,
                        'storage_class': 'GLACIER'
                    }
                },
                sse_algorithm=
                'AES256'  # SSE-KMS doesn't seem to work with access logs
            )

    # Create bucket for Terraform state (if applicable)
    if config['global']['terraform'].get('create_bucket', True):
        main_dict['resource']['aws_s3_bucket'][
            'terraform_remote_state'] = generate_s3_bucket(
                bucket=config['global']['terraform']['tfstate_bucket'],
                logging=logging_bucket)

    # Setup Firehose Delivery Streams
    generate_firehose(config, main_dict, logging_bucket)

    # Configure global resources like Firehose alert delivery and alerts table
    global_module = {
        'source':
        'modules/tf_stream_alert_globals',
        'account_id':
        config['global']['account']['aws_account_id'],
        'region':
        config['global']['account']['region'],
        'prefix':
        config['global']['account']['prefix'],
        'kms_key_arn':
        '${aws_kms_key.server_side_encryption.arn}',
        'alerts_table_read_capacity':
        (config['global']['infrastructure']['alerts_table']['read_capacity']),
        'alerts_table_write_capacity':
        (config['global']['infrastructure']['alerts_table']['write_capacity'])
    }

    if config['global']['infrastructure']['rule_staging'].get('enabled'):
        global_module['enable_rule_staging'] = True
        global_module['rules_table_read_capacity'] = (
            config['global']['infrastructure']['rule_staging']['table']
            ['read_capacity'])
        global_module['rules_table_write_capacity'] = (
            config['global']['infrastructure']['rule_staging']['table']
            ['write_capacity'])

    main_dict['module']['globals'] = global_module

    # KMS Key and Alias creation
    main_dict['resource']['aws_kms_key']['server_side_encryption'] = {
        'enable_key_rotation':
        True,
        'description':
        'StreamAlert S3 Server-Side Encryption',
        'policy':
        json.dumps({
            'Version':
            '2012-10-17',
            'Statement': [{
                'Sid': 'Enable IAM User Permissions',
                'Effect': 'Allow',
                'Principal': {
                    'AWS':
                    'arn:aws:iam::{}:root'.format(
                        config['global']['account']['aws_account_id'])
                },
                'Action': 'kms:*',
                'Resource': '*'
            }, {
                'Sid':
                'Allow principals in the account to use the key',
                'Effect':
                'Allow',
                'Principal':
                '*',
                'Action':
                ['kms:Decrypt', 'kms:GenerateDataKey*', 'kms:Encrypt'],
                'Resource':
                '*',
                'Condition': {
                    'StringEquals': {
                        'kms:CallerAccount':
                        config['global']['account']['aws_account_id']
                    }
                }
            }]
        })
    }
    main_dict['resource']['aws_kms_alias']['server_side_encryption'] = {
        'name':
        'alias/{}_server-side-encryption'.format(
            config['global']['account']['prefix']),
        'target_key_id':
        '${aws_kms_key.server_side_encryption.key_id}'
    }

    main_dict['resource']['aws_kms_key']['stream_alert_secrets'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert secret management'
    }
    main_dict['resource']['aws_kms_alias']['stream_alert_secrets'] = {
        'name':
        'alias/{}'.format(config['global']['account']['kms_key_alias']),
        'target_key_id': '${aws_kms_key.stream_alert_secrets.key_id}'
    }

    # Global infrastructure settings
    infrastructure_config = config['global'].get('infrastructure')
    if infrastructure_config and 'monitoring' in infrastructure_config:
        if infrastructure_config['monitoring'].get('create_sns_topic'):
            main_dict['resource']['aws_sns_topic'][
                'stream_alert_monitoring'] = {
                    'name': DEFAULT_SNS_MONITORING_TOPIC
                }

    # Add any global cloudwatch alarms to the main.tf
    monitoring_config = config['global']['infrastructure'].get('monitoring')
    if not monitoring_config:
        return main_dict

    global_metrics = monitoring_config.get('metric_alarms')
    if not global_metrics:
        return main_dict

    sns_topic_arn = monitoring_topic_arn(config)

    formatted_alarms = {}
    # Add global metric alarms for the rule and alert processors
    for func in FUNC_PREFIXES:
        if func not in global_metrics:
            continue

        for name, settings in global_metrics[func].iteritems():
            alarm_info = settings.copy()
            alarm_info['alarm_name'] = name
            alarm_info['namespace'] = 'StreamAlert'
            alarm_info['alarm_actions'] = [sns_topic_arn]
            # Terraform only allows certain characters in resource names
            acceptable_chars = ''.join([string.digits, string.letters, '_-'])
            name = filter(acceptable_chars.__contains__, name)
            formatted_alarms['metric_alarm_{}'.format(name)] = alarm_info

    if formatted_alarms:
        main_dict['resource']['aws_cloudwatch_metric_alarm'] = formatted_alarms

    return main_dict
Ejemplo n.º 27
0
def generate_main(config, init=False):
    """Generate the main.tf.json Terraform dict

    Args:
        config (CLIConfig): The loaded CLI config
        init (bool): Terraform is running in the init phase or not (optional)

    Returns:
        dict: main.tf.json Terraform dict
    """
    main_dict = infinitedict()

    # Configure provider along with the minimum version
    main_dict['provider']['aws'] = {'version': TERRAFORM_VERSIONS['provider']['aws']}

    # Configure Terraform version requirement
    main_dict['terraform']['required_version'] = TERRAFORM_VERSIONS['application']

    # Setup the Backend depending on the deployment phase.
    # When first setting up StreamAlert, the Terraform statefile
    # is stored locally.  After the first dependencies are created,
    # this moves to S3.
    if init:
        main_dict['terraform']['backend']['local'] = {
            'path': 'terraform.tfstate'}
    else:
        main_dict['terraform']['backend']['s3'] = {
            'bucket': '{}.streamalert.terraform.state'.format(
                config['global']['account']['prefix']),
            'key': 'stream_alert_state/terraform.tfstate',
            'region': config['global']['account']['region'],
            'encrypt': True,
            'acl': 'private',
            'kms_key_id': 'alias/{}'.format(config['global']['account']['kms_key_alias'])}

    logging_bucket = '{}.streamalert.s3-logging'.format(
        config['global']['account']['prefix'])
    logging_bucket_lifecycle = {
        'prefix': '/',
        'enabled': True,
        'transition': {
            'days': 30,
            'storage_class': 'GLACIER'}}

    # Configure initial S3 buckets
    main_dict['resource']['aws_s3_bucket'] = {
        'lambda_source': generate_s3_bucket(
            bucket=config['lambda']['rule_processor_config']['source_bucket'],
            logging=logging_bucket
        ),
        'stream_alert_secrets': generate_s3_bucket(
            bucket='{}.streamalert.secrets'.format(config['global']['account']['prefix']),
            logging=logging_bucket
        ),
        'terraform_remote_state': generate_s3_bucket(
            bucket=config['global']['terraform']['tfstate_bucket'],
            logging=logging_bucket
        ),
        'logging_bucket': generate_s3_bucket(
            bucket=logging_bucket,
            logging=logging_bucket,
            acl='log-delivery-write',
            lifecycle_rule=logging_bucket_lifecycle
        ),
        'streamalerts': generate_s3_bucket(
            bucket='{}.streamalerts'.format(config['global']['account']['prefix']),
            logging=logging_bucket
        )
    }

    # Setup Firehose Delivery Streams
    generate_firehose(config, main_dict, logging_bucket)

    # Configure global resources like Firehose alert delivery and alerts table
    main_dict['module']['globals'] = {
        'source': 'modules/tf_stream_alert_globals',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': config['global']['account']['prefix'],
        'alerts_table_read_capacity': (
            config['global']['infrastructure']['alerts_table']['read_capacity']),
        'alerts_table_write_capacity': (
            config['global']['infrastructure']['alerts_table']['write_capacity']),
        'rules_table_read_capacity': (
            config['global']['infrastructure']['rules_table']['read_capacity']),
        'rules_table_write_capacity': (
            config['global']['infrastructure']['rules_table']['write_capacity'])
    }

    # KMS Key and Alias creation
    main_dict['resource']['aws_kms_key']['stream_alert_secrets'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert secret management'
    }
    main_dict['resource']['aws_kms_alias']['stream_alert_secrets'] = {
        'name': 'alias/{}'.format(config['global']['account']['kms_key_alias']),
        'target_key_id': '${aws_kms_key.stream_alert_secrets.key_id}'
    }

    # Global infrastructure settings
    infrastructure_config = config['global'].get('infrastructure')
    if infrastructure_config and 'monitoring' in infrastructure_config:
        if infrastructure_config['monitoring'].get('create_sns_topic'):
            main_dict['resource']['aws_sns_topic']['stream_alert_monitoring'] = {
                'name': DEFAULT_SNS_MONITORING_TOPIC
            }

    # Add any global cloudwatch alarms to the main.tf
    monitoring_config = config['global']['infrastructure'].get('monitoring')
    if not monitoring_config:
        return main_dict

    global_metrics = monitoring_config.get('metric_alarms')
    if not global_metrics:
        return main_dict

    sns_topic_arn = monitoring_topic_arn(config)

    formatted_alarms = {}
    # Add global metric alarms for the rule and alert processors
    for func in FUNC_PREFIXES:
        if func not in global_metrics:
            continue

        for name, settings in global_metrics[func].iteritems():
            alarm_info = settings.copy()
            alarm_info['alarm_name'] = name
            alarm_info['namespace'] = 'StreamAlert'
            alarm_info['alarm_actions'] = [sns_topic_arn]
            # Terraform only allows certain characters in resource names
            acceptable_chars = ''.join([string.digits, string.letters, '_-'])
            name = filter(acceptable_chars.__contains__, name)
            formatted_alarms['metric_alarm_{}'.format(name)] = alarm_info

    if formatted_alarms:
        main_dict['resource']['aws_cloudwatch_metric_alarm'] = formatted_alarms

    return main_dict
    def test_generate_classifier(self):
        """CLI - Terraform Generate, Classifier"""
        cluster_dict = common.infinitedict()
        classifier.generate_classifier('test', cluster_dict, self.config)

        expected_result = {
            'module': {
                'classifier_test_iam': {
                    'source':
                    'modules/tf_classifier',
                    'account_id':
                    '123456789012',
                    'region':
                    'us-east-1',
                    'function_role_id':
                    '${module.classifier_test_lambda.role_id}',
                    'function_alias_arn':
                    '${module.classifier_test_lambda.function_alias_arn}',
                    'function_name':
                    '${module.classifier_test_lambda.function_name}',
                    'classifier_sqs_queue_arn':
                    '${module.globals.classifier_sqs_queue_arn}',
                    'classifier_sqs_sse_kms_key_arn':
                    ('${module.globals.classifier_sqs_sse_kms_key_arn}'),
                    'input_sns_topics':
                    ['arn:aws:sns:us-east-1:123456789012:foo_bar']
                },
                'classifier_test_lambda': {
                    'alarm_actions':
                    ['arn:aws:sns:us-east-1:123456789012:test_topic'],
                    'description':
                    'Unit-Test Streamalert Classifier Test',
                    'environment_variables': {
                        'CLUSTER': 'test',
                        'SQS_QUEUE_URL':
                        '${module.globals.classifier_sqs_queue_url}',
                        'LOGGER_LEVEL': 'info',
                        'ENABLE_METRICS': '0'
                    },
                    'tags': {
                        'Cluster': 'test'
                    },
                    'errors_alarm_enabled':
                    True,
                    'errors_alarm_evaluation_periods':
                    1,
                    'errors_alarm_period_secs':
                    120,
                    'errors_alarm_threshold':
                    0,
                    'filename':
                    'classifier.zip',
                    'function_name':
                    'unit-test_streamalert_classifier_test',
                    'handler':
                    'stream_alert.classifier.main.handler',
                    'log_retention_days':
                    14,
                    'memory_size_mb':
                    128,
                    'source':
                    'modules/tf_lambda',
                    'throttles_alarm_enabled':
                    True,
                    'throttles_alarm_evaluation_periods':
                    1,
                    'throttles_alarm_period_secs':
                    120,
                    'throttles_alarm_threshold':
                    0,
                    'timeout_sec':
                    60,
                    'vpc_security_group_ids': [],
                    'vpc_subnet_ids': [],
                    'input_sns_topics':
                    ['arn:aws:sns:us-east-1:123456789012:foo_bar']
                }
            }
        }

        assert_equal(cluster_dict, expected_result)
Ejemplo n.º 29
0
 def setup(self):
     """Setup before each method"""
     self.cluster_dict = common.infinitedict()
     self.config = CLIConfig(config_path='tests/unit/conf')