Exemplo n.º 1
0
    def test_firehose_enabled_log_alarm_defaults(self):
        """CLI - Terraform Generate Kinesis Firehose, Enabled Alarm - Default Settings"""
        cluster_dict = common.infinitedict()

        # Add an enabled log, with alarms on (will use terraform default settings)
        self.config['global']['infrastructure']['firehose']['enabled_logs'] = {
            'json:embedded': {
                'enable_alarm': True
            }
        }

        firehose.generate_firehose(self._logging_bucket_name, cluster_dict, self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
                'kinesis_firehose_json_embedded': {
                    'source': 'modules/tf_stream_alert_kinesis_firehose_delivery_stream',
                    'buffer_size': 128,
                    'buffer_interval': 900,
                    'compression_format': 'GZIP',
                    'log_name': 'json_embedded',
                    'role_arn': '${module.kinesis_firehose_setup.firehose_role_arn}',
                    's3_bucket_name': 'unit-testing.streamalert.data',
                    'kms_key_arn': '${aws_kms_key.server_side_encryption.arn}',
                    'enable_alarm': True,
                    'alarm_actions': ['arn:aws:sns:us-west-1:12345678910:stream_alert_monitoring']
                }
            }
        }

        assert_equal(cluster_dict, expected_result)
Exemplo n.º 2
0
    def test_firehose_enabled_log(self):
        """CLI - Terraform Generate Kinesis Firehose, Enabled Log"""
        cluster_dict = common.infinitedict()

        # Add an enabled log, with no alarm configuration (aka: alarms disabled)
        self.config['global']['infrastructure']['firehose']['enabled_logs'] = {
            'json:embedded': {}
        }

        firehose.generate_firehose(self._logging_bucket_name, cluster_dict, self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
                'kinesis_firehose_json_embedded': {
                    'source': 'modules/tf_stream_alert_kinesis_firehose_delivery_stream',
                    'buffer_size': 128,
                    'buffer_interval': 900,
                    'compression_format': 'GZIP',
                    'log_name': 'json_embedded',
                    'role_arn': '${module.kinesis_firehose_setup.firehose_role_arn}',
                    's3_bucket_name': 'unit-testing.streamalert.data',
                    'kms_key_arn': '${aws_kms_key.server_side_encryption.arn}'
                }
            }
        }

        assert_equal(cluster_dict, expected_result)
Exemplo n.º 3
0
    def test_firehose_defaults(self):
        """CLI - Terraform Generate Kinesis Firehose, Defaults"""
        cluster_dict = common.infinitedict()
        firehose.generate_firehose(self._logging_bucket_name, cluster_dict, self.config)

        expected_result = {
            'module': {
                'kinesis_firehose_setup': self._default_firehose_config(),
            }
        }

        assert_equal(cluster_dict, expected_result)
Exemplo n.º 4
0
def generate_main(config, init=False):
    """Generate the main.tf.json Terraform dict

    Args:
        config (CLIConfig): The loaded CLI config
        init (bool): Terraform is running in the init phase or not (optional)

    Returns:
        dict: main.tf.json Terraform dict
    """
    main_dict = infinitedict()

    # Configure provider along with the minimum version
    main_dict['provider']['aws'] = {
        'version': TERRAFORM_VERSIONS['provider']['aws'],
        'region': config['global']['account']['region']
    }

    # Configure Terraform version requirement
    main_dict['terraform']['required_version'] = TERRAFORM_VERSIONS['application']

    # Setup the Backend depending on the deployment phase.
    # When first setting up StreamAlert, the Terraform statefile
    # is stored locally.  After the first dependencies are created,
    # this moves to S3.
    if init:
        main_dict['terraform']['backend']['local'] = {
            'path': 'terraform.tfstate'}
    else:
        main_dict['terraform']['backend']['s3'] = {
            'bucket': config['global']['terraform']['tfstate_bucket'],
            'key': config['global']['terraform']['tfstate_s3_key'],
            'region': config['global']['account']['region'],
            'encrypt': True,
            'acl': 'private',
            'kms_key_id': 'alias/{}'.format(config['global']['account']['kms_key_alias'])}

    logging_bucket = config['global']['s3_access_logging']['logging_bucket']

    # Configure initial S3 buckets
    main_dict['resource']['aws_s3_bucket'] = {
        'stream_alert_secrets': generate_s3_bucket(
            # FIXME (derek.wang) DRY out by using OutputCredentialsProvider?
            bucket='{}.streamalert.secrets'.format(config['global']['account']['prefix']),
            logging=logging_bucket
        ),
        'streamalerts': generate_s3_bucket(
            bucket='{}.streamalerts'.format(config['global']['account']['prefix']),
            logging=logging_bucket
        )
    }

    # Create bucket for S3 access logs (if applicable)
    if config['global']['s3_access_logging'].get('create_bucket', True):
        main_dict['resource']['aws_s3_bucket']['logging_bucket'] = generate_s3_bucket(
            bucket=logging_bucket,
            logging=logging_bucket,
            acl='log-delivery-write',
            lifecycle_rule={
                'prefix': '/',
                'enabled': True,
                'transition': {
                    'days': 365,
                    'storage_class': 'GLACIER'
                }
            },
            sse_algorithm='AES256'  # SSE-KMS doesn't seem to work with access logs
        )

    # Create bucket for Terraform state (if applicable)
    if config['global']['terraform'].get('create_bucket', True):
        main_dict['resource']['aws_s3_bucket']['terraform_remote_state'] = generate_s3_bucket(
            bucket=config['global']['terraform']['tfstate_bucket'],
            logging=logging_bucket
        )

    # Setup Firehose Delivery Streams
    generate_firehose(logging_bucket, main_dict, config)

    # Configure global resources like Firehose alert delivery and alerts table
    global_module = {
        'source': 'modules/tf_stream_alert_globals',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': config['global']['account']['prefix'],
        'kms_key_arn': '${aws_kms_key.server_side_encryption.arn}',
        'alerts_table_read_capacity': (
            config['global']['infrastructure']['alerts_table']['read_capacity']),
        'alerts_table_write_capacity': (
            config['global']['infrastructure']['alerts_table']['write_capacity']),
        'rules_engine_timeout': config['lambda']['rules_engine_config']['timeout']
    }

    if config['global']['infrastructure']['rule_staging'].get('enabled'):
        global_module['enable_rule_staging'] = True
        global_module['rules_table_read_capacity'] = (
            config['global']['infrastructure']['rule_staging']['table']['read_capacity'])
        global_module['rules_table_write_capacity'] = (
            config['global']['infrastructure']['rule_staging']['table']['write_capacity'])

    main_dict['module']['globals'] = global_module

    # KMS Key and Alias creation
    main_dict['resource']['aws_kms_key']['server_side_encryption'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert S3 Server-Side Encryption',
        'policy': json.dumps({
            'Version': '2012-10-17',
            'Statement': [
                {
                    'Sid': 'Enable IAM User Permissions',
                    'Effect': 'Allow',
                    'Principal': {
                        'AWS': 'arn:aws:iam::{}:root'.format(
                            config['global']['account']['aws_account_id']
                        )
                    },
                    'Action': 'kms:*',
                    'Resource': '*'
                },
                {
                    'Sid': 'Allow principals in the account to use the key',
                    'Effect': 'Allow',
                    'Principal': '*',
                    'Action': ['kms:Decrypt', 'kms:GenerateDataKey*', 'kms:Encrypt'],
                    'Resource': '*',
                    'Condition': {
                        'StringEquals': {
                            'kms:CallerAccount': config['global']['account']['aws_account_id']
                        }
                    }
                }
            ]
        })
    }
    main_dict['resource']['aws_kms_alias']['server_side_encryption'] = {
        'name': 'alias/{}_server-side-encryption'.format(config['global']['account']['prefix']),
        'target_key_id': '${aws_kms_key.server_side_encryption.key_id}'
    }

    main_dict['resource']['aws_kms_key']['stream_alert_secrets'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert secret management'
    }
    main_dict['resource']['aws_kms_alias']['stream_alert_secrets'] = {
        'name': 'alias/{}'.format(config['global']['account']['kms_key_alias']),
        'target_key_id': '${aws_kms_key.stream_alert_secrets.key_id}'
    }

    # Global infrastructure settings
    infrastructure_config = config['global'].get('infrastructure')
    if infrastructure_config and 'monitoring' in infrastructure_config:
        if infrastructure_config['monitoring'].get('create_sns_topic'):
            main_dict['resource']['aws_sns_topic']['stream_alert_monitoring'] = {
                'name': DEFAULT_SNS_MONITORING_TOPIC
            }

    return main_dict
Exemplo n.º 5
0
def generate_main(config, init=False):
    """Generate the main.tf.json Terraform dict

    Args:
        config (CLIConfig): The loaded CLI config
        init (bool): Terraform is running in the init phase or not (optional)

    Returns:
        dict: main.tf.json Terraform dict
    """
    main_dict = infinitedict()

    # Configure provider along with the minimum version
    main_dict['provider']['aws'] = {'version': TERRAFORM_VERSIONS['provider']['aws']}

    # Configure Terraform version requirement
    main_dict['terraform']['required_version'] = TERRAFORM_VERSIONS['application']

    # Setup the Backend depending on the deployment phase.
    # When first setting up StreamAlert, the Terraform statefile
    # is stored locally.  After the first dependencies are created,
    # this moves to S3.
    if init:
        main_dict['terraform']['backend']['local'] = {
            'path': 'terraform.tfstate'}
    else:
        main_dict['terraform']['backend']['s3'] = {
            'bucket': '{}.streamalert.terraform.state'.format(
                config['global']['account']['prefix']),
            'key': 'stream_alert_state/terraform.tfstate',
            'region': config['global']['account']['region'],
            'encrypt': True,
            'acl': 'private',
            'kms_key_id': 'alias/{}'.format(config['global']['account']['kms_key_alias'])}

    logging_bucket = '{}.streamalert.s3-logging'.format(
        config['global']['account']['prefix'])
    logging_bucket_lifecycle = {
        'prefix': '/',
        'enabled': True,
        'transition': {
            'days': 30,
            'storage_class': 'GLACIER'}}

    # Configure initial S3 buckets
    main_dict['resource']['aws_s3_bucket'] = {
        'lambda_source': generate_s3_bucket(
            bucket=config['lambda']['rule_processor_config']['source_bucket'],
            logging=logging_bucket
        ),
        'stream_alert_secrets': generate_s3_bucket(
            bucket='{}.streamalert.secrets'.format(config['global']['account']['prefix']),
            logging=logging_bucket
        ),
        'terraform_remote_state': generate_s3_bucket(
            bucket=config['global']['terraform']['tfstate_bucket'],
            logging=logging_bucket
        ),
        'logging_bucket': generate_s3_bucket(
            bucket=logging_bucket,
            logging=logging_bucket,
            acl='log-delivery-write',
            lifecycle_rule=logging_bucket_lifecycle
        ),
        'streamalerts': generate_s3_bucket(
            bucket='{}.streamalerts'.format(config['global']['account']['prefix']),
            logging=logging_bucket
        )
    }

    # Setup Firehose Delivery Streams
    generate_firehose(config, main_dict, logging_bucket)

    # Configure global resources like Firehose alert delivery and alerts table
    main_dict['module']['globals'] = {
        'source': 'modules/tf_stream_alert_globals',
        'account_id': config['global']['account']['aws_account_id'],
        'region': config['global']['account']['region'],
        'prefix': config['global']['account']['prefix'],
        'alerts_table_read_capacity': (
            config['global']['infrastructure']['alerts_table']['read_capacity']),
        'alerts_table_write_capacity': (
            config['global']['infrastructure']['alerts_table']['write_capacity']),
        'rules_table_read_capacity': (
            config['global']['infrastructure']['rules_table']['read_capacity']),
        'rules_table_write_capacity': (
            config['global']['infrastructure']['rules_table']['write_capacity'])
    }

    # KMS Key and Alias creation
    main_dict['resource']['aws_kms_key']['stream_alert_secrets'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert secret management'
    }
    main_dict['resource']['aws_kms_alias']['stream_alert_secrets'] = {
        'name': 'alias/{}'.format(config['global']['account']['kms_key_alias']),
        'target_key_id': '${aws_kms_key.stream_alert_secrets.key_id}'
    }

    # Global infrastructure settings
    infrastructure_config = config['global'].get('infrastructure')
    if infrastructure_config and 'monitoring' in infrastructure_config:
        if infrastructure_config['monitoring'].get('create_sns_topic'):
            main_dict['resource']['aws_sns_topic']['stream_alert_monitoring'] = {
                'name': DEFAULT_SNS_MONITORING_TOPIC
            }

    # Add any global cloudwatch alarms to the main.tf
    monitoring_config = config['global']['infrastructure'].get('monitoring')
    if not monitoring_config:
        return main_dict

    global_metrics = monitoring_config.get('metric_alarms')
    if not global_metrics:
        return main_dict

    sns_topic_arn = monitoring_topic_arn(config)

    formatted_alarms = {}
    # Add global metric alarms for the rule and alert processors
    for func in FUNC_PREFIXES:
        if func not in global_metrics:
            continue

        for name, settings in global_metrics[func].iteritems():
            alarm_info = settings.copy()
            alarm_info['alarm_name'] = name
            alarm_info['namespace'] = 'StreamAlert'
            alarm_info['alarm_actions'] = [sns_topic_arn]
            # Terraform only allows certain characters in resource names
            acceptable_chars = ''.join([string.digits, string.letters, '_-'])
            name = filter(acceptable_chars.__contains__, name)
            formatted_alarms['metric_alarm_{}'.format(name)] = alarm_info

    if formatted_alarms:
        main_dict['resource']['aws_cloudwatch_metric_alarm'] = formatted_alarms

    return main_dict
Exemplo n.º 6
0
def generate_main(config, init=False):
    """Generate the main.tf.json Terraform dict

    Args:
        config (CLIConfig): The loaded CLI config
        init (bool): Terraform is running in the init phase or not (optional)

    Returns:
        dict: main.tf.json Terraform dict
    """
    main_dict = infinitedict()

    # Configure provider along with the minimum version
    main_dict['provider']['aws'] = {
        'version': TERRAFORM_VERSIONS['provider']['aws']
    }

    # Configure Terraform version requirement
    main_dict['terraform']['required_version'] = TERRAFORM_VERSIONS[
        'application']

    # Setup the Backend depending on the deployment phase.
    # When first setting up StreamAlert, the Terraform statefile
    # is stored locally.  After the first dependencies are created,
    # this moves to S3.
    if init:
        main_dict['terraform']['backend']['local'] = {
            'path': 'terraform.tfstate'
        }
    else:
        main_dict['terraform']['backend']['s3'] = {
            'bucket':
            config['global']['terraform']['tfstate_bucket'],
            'key':
            config['global']['terraform']['tfstate_s3_key'],
            'region':
            config['global']['account']['region'],
            'encrypt':
            True,
            'acl':
            'private',
            'kms_key_id':
            'alias/{}'.format(config['global']['account']['kms_key_alias'])
        }

    logging_bucket = config['global']['s3_access_logging']['logging_bucket']

    # Configure initial S3 buckets
    main_dict['resource']['aws_s3_bucket'] = {
        'stream_alert_secrets':
        generate_s3_bucket(bucket='{}.streamalert.secrets'.format(
            config['global']['account']['prefix']),
                           logging=logging_bucket),
        'streamalerts':
        generate_s3_bucket(bucket='{}.streamalerts'.format(
            config['global']['account']['prefix']),
                           logging=logging_bucket)
    }

    # Create bucket for S3 access logs (if applicable)
    if config['global']['s3_access_logging'].get('create_bucket', True):
        main_dict['resource']['aws_s3_bucket'][
            'logging_bucket'] = generate_s3_bucket(
                bucket=logging_bucket,
                logging=logging_bucket,
                acl='log-delivery-write',
                lifecycle_rule={
                    'prefix': '/',
                    'enabled': True,
                    'transition': {
                        'days': 365,
                        'storage_class': 'GLACIER'
                    }
                },
                sse_algorithm=
                'AES256'  # SSE-KMS doesn't seem to work with access logs
            )

    # Create bucket for Terraform state (if applicable)
    if config['global']['terraform'].get('create_bucket', True):
        main_dict['resource']['aws_s3_bucket'][
            'terraform_remote_state'] = generate_s3_bucket(
                bucket=config['global']['terraform']['tfstate_bucket'],
                logging=logging_bucket)

    # Setup Firehose Delivery Streams
    generate_firehose(config, main_dict, logging_bucket)

    # Configure global resources like Firehose alert delivery and alerts table
    global_module = {
        'source':
        'modules/tf_stream_alert_globals',
        'account_id':
        config['global']['account']['aws_account_id'],
        'region':
        config['global']['account']['region'],
        'prefix':
        config['global']['account']['prefix'],
        'kms_key_arn':
        '${aws_kms_key.server_side_encryption.arn}',
        'alerts_table_read_capacity':
        (config['global']['infrastructure']['alerts_table']['read_capacity']),
        'alerts_table_write_capacity':
        (config['global']['infrastructure']['alerts_table']['write_capacity'])
    }

    if config['global']['infrastructure']['rule_staging'].get('enabled'):
        global_module['enable_rule_staging'] = True
        global_module['rules_table_read_capacity'] = (
            config['global']['infrastructure']['rule_staging']['table']
            ['read_capacity'])
        global_module['rules_table_write_capacity'] = (
            config['global']['infrastructure']['rule_staging']['table']
            ['write_capacity'])

    main_dict['module']['globals'] = global_module

    # KMS Key and Alias creation
    main_dict['resource']['aws_kms_key']['server_side_encryption'] = {
        'enable_key_rotation':
        True,
        'description':
        'StreamAlert S3 Server-Side Encryption',
        'policy':
        json.dumps({
            'Version':
            '2012-10-17',
            'Statement': [{
                'Sid': 'Enable IAM User Permissions',
                'Effect': 'Allow',
                'Principal': {
                    'AWS':
                    'arn:aws:iam::{}:root'.format(
                        config['global']['account']['aws_account_id'])
                },
                'Action': 'kms:*',
                'Resource': '*'
            }, {
                'Sid':
                'Allow principals in the account to use the key',
                'Effect':
                'Allow',
                'Principal':
                '*',
                'Action':
                ['kms:Decrypt', 'kms:GenerateDataKey*', 'kms:Encrypt'],
                'Resource':
                '*',
                'Condition': {
                    'StringEquals': {
                        'kms:CallerAccount':
                        config['global']['account']['aws_account_id']
                    }
                }
            }]
        })
    }
    main_dict['resource']['aws_kms_alias']['server_side_encryption'] = {
        'name':
        'alias/{}_server-side-encryption'.format(
            config['global']['account']['prefix']),
        'target_key_id':
        '${aws_kms_key.server_side_encryption.key_id}'
    }

    main_dict['resource']['aws_kms_key']['stream_alert_secrets'] = {
        'enable_key_rotation': True,
        'description': 'StreamAlert secret management'
    }
    main_dict['resource']['aws_kms_alias']['stream_alert_secrets'] = {
        'name':
        'alias/{}'.format(config['global']['account']['kms_key_alias']),
        'target_key_id': '${aws_kms_key.stream_alert_secrets.key_id}'
    }

    # Global infrastructure settings
    infrastructure_config = config['global'].get('infrastructure')
    if infrastructure_config and 'monitoring' in infrastructure_config:
        if infrastructure_config['monitoring'].get('create_sns_topic'):
            main_dict['resource']['aws_sns_topic'][
                'stream_alert_monitoring'] = {
                    'name': DEFAULT_SNS_MONITORING_TOPIC
                }

    # Add any global cloudwatch alarms to the main.tf
    monitoring_config = config['global']['infrastructure'].get('monitoring')
    if not monitoring_config:
        return main_dict

    global_metrics = monitoring_config.get('metric_alarms')
    if not global_metrics:
        return main_dict

    sns_topic_arn = monitoring_topic_arn(config)

    formatted_alarms = {}
    # Add global metric alarms for the rule and alert processors
    for func in FUNC_PREFIXES:
        if func not in global_metrics:
            continue

        for name, settings in global_metrics[func].iteritems():
            alarm_info = settings.copy()
            alarm_info['alarm_name'] = name
            alarm_info['namespace'] = 'StreamAlert'
            alarm_info['alarm_actions'] = [sns_topic_arn]
            # Terraform only allows certain characters in resource names
            acceptable_chars = ''.join([string.digits, string.letters, '_-'])
            name = filter(acceptable_chars.__contains__, name)
            formatted_alarms['metric_alarm_{}'.format(name)] = alarm_info

    if formatted_alarms:
        main_dict['resource']['aws_cloudwatch_metric_alarm'] = formatted_alarms

    return main_dict