Esempio n. 1
0
    def setup(self):
        """Setup before each method"""
        config_data = basic_streamalert_config()

        self.fs_patcher = fake_filesystem_unittest.Patcher()
        self.fs_patcher.setUp()

        self.fs_patcher.fs.create_file('/conf/global.json',
                                       contents=json.dumps(
                                           config_data['global']))
        self.fs_patcher.fs.create_file('/conf/threat_intel.json',
                                       contents=json.dumps(
                                           config_data['threat_intel']))
        self.fs_patcher.fs.create_file('/conf/normalized_types.json',
                                       contents=json.dumps(
                                           config_data['normalized_types']))
        self.fs_patcher.fs.create_file('/conf/lambda.json',
                                       contents=json.dumps(
                                           config_data['lambda']))
        self.fs_patcher.fs.create_file('/conf/clusters/prod.json',
                                       contents=json.dumps(
                                           config_data['clusters']['prod']))

        # Create the config instance after creating the fake filesystem so that
        # CLIConfig uses our mocked config files instead of the real ones.
        self.config = CLIConfig()
Esempio n. 2
0
    def test_v1_config():
        """Test for the v1 config"""
        v1_config = {
            "account_id": "12345678911",
            "clusters": {
                "prod": "us-east-1"
            },
            "firehose_s3_bucket_suffix": "streamalert.results",
            "kinesis_settings": {
                "prod": [
                    1,
                    24
                ]
            },
            "kms_key_alias": "stream_alert_secrets",
            "lambda_function_prod_versions": {
                "prod": "$LATEST"
            },
            "lambda_handler": "main.handler",
            "lambda_settings": {
                "prod": [
                    10,
                    128
                ]
            },
            "lambda_source_bucket_name": "unit-testing.streamalert.source",
            "lambda_source_current_hash": "auto",
            "lambda_source_key": "auto",
            "flow_log_settings": {},
            "output_lambda_current_hash": "auto",
            "output_lambda_source_key": "auto",
            "prefix": "unit-testing",
            "region": "us-east-1",
            "tfstate_s3_key": "stream_alert_state/terraform.tfstate",
            "tfvars": "terraform.tfvars",
            "third_party_libs": [
                "jsonpath_rw",
                "netaddr"
            ]
        }

        v1_config_pretty = json.dumps(
            v1_config,
            indent=4,
            separators=(',', ': '),
            sort_keys=True
        )

        # mock the opening of `variables.json`
        with patch('__builtin__.open', mock_open(read_data=v1_config_pretty), create=True):
            cli_config = CLIConfig()

            assert_equal(cli_config.version, 2)
            assert_equal(cli_config['account']['aws_account_id'], '12345678911')
            assert_equal(cli_config['alert_processor_config']['source_bucket'],
                         'unit-testing.streamalert.source')
            assert_equal(cli_config['rule_processor_config']['third_party_libraries'],
                         ['jsonpath_rw', 'netaddr'])
            assert_equal(cli_config['terraform']['tfstate_s3_key'],
                         'stream_alert_state/terraform.tfstate')
Esempio n. 3
0
def test_load_config():
    """CLI - Load config"""
    config_data = {
        'global': {
            'account': {
                'aws_account_id': 'AWS_ACCOUNT_ID_GOES_HERE',
                'kms_key_alias': 'stream_alert_secrets',
                'prefix': 'unit-testing',
                'region': 'us-west-2'
            },
            'terraform': {
                'tfstate_bucket':
                'PREFIX_GOES_HERE.streamalert.terraform.state',
                'tfstate_s3_key': 'stream_alert_state/terraform.tfstate',
                'tfvars': 'terraform.tfvars'
            },
            'infrastructure': {
                'monitoring': {
                    'create_sns_topic': True
                }
            }
        },
        'lambda': {
            'alert_processor_config': {
                'handler': 'stream_alert.alert_processor.main.handler',
                'source_bucket': 'PREFIX_GOES_HERE.streamalert.source',
                'source_current_hash': '<auto_generated>',
                'source_object_key': '<auto_generated>',
                'third_party_libraries': []
            },
            'rule_processor_config': {
                'handler': 'stream_alert.rule_processor.main.handler',
                'source_bucket': 'PREFIX_GOES_HERE.streamalert.source',
                'source_current_hash': '<auto_generated>',
                'source_object_key': '<auto_generated>',
                'third_party_libraries': ['jsonpath_rw', 'netaddr']
            }
        }
    }

    global_file = 'conf/global.json'
    global_contents = json.dumps(config_data['global'], indent=2)

    lambda_file = 'conf/lambda.json'
    lambda_contents = json.dumps(config_data['lambda'], indent=2)

    with mock_open(global_file, global_contents):
        with mock_open(lambda_file, lambda_contents):
            # mock os call

            # test valid and invalid clusters

            config = CLIConfig()
            assert_equal(config['global']['account']['prefix'], 'unit-testing')
Esempio n. 4
0
    def test_aggregate_alarm_creation(self, log_mock):
        """CLI - Adding CloudWatch metric alarm, aggregate"""
        alarm_info = {
            'metric_target': 'aggregate',
            'metric_name': 'TotalRecords',
            'evaluation_periods': 1,
            'alarm_description': '',
            'alarm_name': 'Aggregate Unit Testing Total Records Alarm',
            'period': 300,
            'threshold': 100.0,
            'statistic': 'Sum',
            'clusters': {},
            'comparison_operator': 'LessThanThreshold'
        }

        with nested(*self.mocked_opens):
            config = CLIConfig()
            config.add_metric_alarm(alarm_info)
            log_mock.assert_called_with('Successfully added \'%s\' metric alarm to '
                                        '\'conf/global.json\'.',
                                        'Aggregate Unit Testing Total Records Alarm')
Esempio n. 5
0
    def test_toggle_metric(self, write_mock, log_mock):
        """CLI - Metric toggling"""
        with nested(*self.mocked_opens):
            config = CLIConfig()

            config.toggle_metrics(True, [], ['athena_partition_refresh'])
            write_mock.assert_called()

            del config.config['lambda']['athena_partition_refresh_config']
            config.toggle_metrics(True, [], ['athena_partition_refresh'])
            log_mock.assert_called_with('No Athena configuration found; please initialize first.')

            config.toggle_metrics(True, ['prod'], ['alert_processor'])
            write_mock.assert_called()
Esempio n. 6
0
    def test_cluster_alarm_creation(self, log_mock):
        """CLI - Adding CloudWatch metric alarm, cluster"""
        alarm_info = {
            'metric_target': 'cluster',
            'metric_name': 'TotalRecords',
            'evaluation_periods': 1,
            'alarm_description': '',
            'alarm_name': 'Prod Unit Testing Total Records Alarm',
            'period': 300,
            'threshold': 100.0,
            'statistic': 'Sum',
            'clusters': set(['prod']),
            'comparison_operator': 'LessThanThreshold'
        }

        with nested(*self.mocked_opens):
            config = CLIConfig()
            config.add_metric_alarm(alarm_info)
            log_mock.assert_called_with('Successfully added \'%s\' metric alarm for the '
                                        '\'%s\' function to \'conf/clusters/%s.json\'.',
                                        'Prod Unit Testing Total Records Alarm',
                                        'rule_processor',
                                        'prod')
You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from nose.tools import assert_equal

from stream_alert_cli.config import CLIConfig
from stream_alert_cli.terraform import athena

CONFIG = CLIConfig(config_path='tests/unit/conf')


def test_generate_athena():
    """CLI - Terraform Generate Athena"""

    CONFIG['lambda']['athena_partition_refresh_config'] = {
        'current_version': '$LATEST',
        'buckets': {
            'unit-testing.streamalerts': 'alerts',
            'unit-testing.streamalert.data': 'data'
        },
        'handler': 'main.handler',
        'timeout': '60',
        'memory': '128',
        'source_bucket': 'unit-testing.streamalert.source',
 def setup(self):
     """Create the CLIConfig and the expected template for these tests."""
     # pylint: disable=attribute-defined-outside-init
     self.config = dict(CLIConfig(config_path='tests/unit/conf'))
     self.rule_promo_config = self.config['lambda']['rule_promotion_config']
"""
from app_integrations.apps.app_base import get_app
from stream_alert.rule_processor.handler import StreamAlert
from stream_alert.alert_processor.outputs import get_output_dispatcher
from stream_alert.athena_partition_refresh.main import StreamAlertAthenaClient
from stream_alert_cli.apps import save_app_auth_info
from stream_alert_cli.config import CLIConfig
from stream_alert_cli.helpers import user_input
from stream_alert_cli.logger import LOGGER_CLI
from stream_alert_cli.manage_lambda.handler import lambda_handler
import stream_alert_cli.outputs as config_outputs
from stream_alert_cli.terraform._common import enabled_firehose_logs
from stream_alert_cli.terraform.handler import terraform_handler
from stream_alert_cli.test import stream_alert_test

CONFIG = CLIConfig()


def cli_runner(options):
    """Main Stream Alert CLI handler

    Args:
        options (dict): command line arguments passed from the argparser.
            Contains the following keys for terraform commands:
                (command, subcommand, target)
            Contains the following keys for lambda commands:
                (command, subcommand, env, func, source)
    """
    cli_load_message = ('Issues? Report here: '
                        'https://github.com/airbnb/streamalert/issues')
    LOGGER_CLI.info(cli_load_message)
Esempio n. 10
0
class TestCLIConfig(object):
    """Test class for CLIConfig"""
    def __init__(self):
        self.config = None
        self.fs_patcher = None

    def setup(self):
        """Setup before each method"""
        config_data = basic_streamalert_config()

        self.fs_patcher = fake_filesystem_unittest.Patcher()
        self.fs_patcher.setUp()

        self.fs_patcher.fs.CreateFile('/conf/global.json',
                                      contents=json.dumps(
                                          config_data['global']))
        self.fs_patcher.fs.CreateFile('/conf/lambda.json',
                                      contents=json.dumps(
                                          config_data['lambda']))
        self.fs_patcher.fs.CreateFile('/conf/clusters/prod.json',
                                      contents=json.dumps(
                                          config_data['clusters']['prod']))

        # Create the config instance after creating the fake filesystem so that
        # CLIConfig uses our mocked config files instead of the real ones.
        self.config = CLIConfig()

    def teardown(self):
        """Teardown after each method"""
        self.fs_patcher.tearDown()

    def test_load_config(self):
        """CLI - Load config"""
        assert_equal(self.config['global']['account']['prefix'],
                     'unit-testing')

    @patch('logging.Logger.error')
    @patch('stream_alert_cli.config.CLIConfig.write')
    def test_toggle_metric(self, write_mock, log_mock):
        """CLI - Metric toggling"""
        self.config.toggle_metrics(True, [], ['athena_partition_refresh'])
        write_mock.assert_called()

        del self.config.config['lambda']['athena_partition_refresh_config']
        self.config.toggle_metrics(True, [], ['athena_partition_refresh'])
        log_mock.assert_called_with(
            'No Athena configuration found; please initialize first.')

        self.config.toggle_metrics(True, ['prod'], ['alert_processor'])
        write_mock.assert_called()

    def test_aggregate_alarm_exists(self):
        """CLI - Aggregate alarm check"""
        result = self.config._alarm_exists(
            'Aggregate Unit Testing Failed Parses Alarm')
        assert_true(result)

    def test_cluster_alarm_exists(self):
        """CLI - Aggregate alarm check"""
        result = self.config._alarm_exists(
            'Prod Unit Testing Failed Parses Alarm')
        assert_true(result)

    @patch('stream_alert_cli.config.CLIConfig.write', Mock())
    @patch('logging.Logger.info')
    def test_cluster_alarm_creation(self, log_mock):
        """CLI - Adding CloudWatch metric alarm, cluster"""
        alarm_info = {
            'metric_target': 'cluster',
            'metric_name': 'TotalRecords',
            'evaluation_periods': 1,
            'alarm_description': '',
            'alarm_name': 'Prod Unit Testing Total Records Alarm',
            'period': 300,
            'threshold': 100.0,
            'statistic': 'Sum',
            'clusters': set(['prod']),
            'comparison_operator': 'LessThanThreshold'
        }

        self.config.add_metric_alarm(alarm_info)
        log_mock.assert_called_with(
            'Successfully added \'%s\' metric alarm for the '
            '\'%s\' function to \'conf/clusters/%s.json\'.',
            'Prod Unit Testing Total Records Alarm', 'rule_processor', 'prod')

    @patch('stream_alert_cli.config.CLIConfig.write', Mock())
    @patch('logging.Logger.info')
    def test_aggregate_alarm_creation(self, log_mock):
        """CLI - Adding CloudWatch metric alarm, aggregate"""
        alarm_info = {
            'metric_target': 'aggregate',
            'metric_name': 'TotalRecords',
            'evaluation_periods': 1,
            'alarm_description': '',
            'alarm_name': 'Aggregate Unit Testing Total Records Alarm',
            'period': 300,
            'threshold': 100.0,
            'statistic': 'Sum',
            'clusters': {},
            'comparison_operator': 'LessThanThreshold'
        }

        self.config.add_metric_alarm(alarm_info)
        log_mock.assert_called_with(
            'Successfully added \'%s\' metric alarm to '
            '\'conf/global.json\'.',
            'Aggregate Unit Testing Total Records Alarm')

    @patch('logging.Logger.info')
    @patch('stream_alert_cli.config.CLIConfig.write')
    def test_add_threat_intel_with_table_name(self, write_mock, log_mock):
        """CLI - Add Threat Intel config with default dynamodb table name"""
        threat_intel_info = {
            'command': 'threat_intel',
            'debug': 'False',
            'dynamodb_table': 'my_ioc_table',
            'subcommand': 'enable'
        }

        self.config.add_threat_intel(threat_intel_info)

        expected_config = {'enabled': True, 'dynamodb_table': 'my_ioc_table'}

        assert_equal(self.config['global']['threat_intel'], expected_config)
        write_mock.assert_called()
        log_mock.assert_called()

    @patch('logging.Logger.info')
    @patch('stream_alert_cli.config.CLIConfig.write')
    def test_add_threat_intel_without_table_name(self, write_mock, log_mock):
        """CLI - Add Threat Intel config without dynamodb table name from cli"""
        threat_intel_info = {
            'command': 'threat_intel',
            'debug': 'False',
            'subcommand': 'enable'
        }

        self.config.add_threat_intel(threat_intel_info)

        expected_config = {
            'enabled': True,
            'dynamodb_table':
            'unit-testing_streamalert_threat_intel_downloader'
        }

        assert_equal(self.config['global']['threat_intel'], expected_config)
        write_mock.assert_called()
        log_mock.assert_called()

    @patch('logging.Logger.info')
    @patch('stream_alert_cli.config.CLIConfig.write')
    def test_add_threat_intel_downloader(self, write_mock, log_mock):
        """CLI - Add Threat Intel Downloader config"""
        del self.config['lambda']['threat_intel_downloader_config']
        ti_downloader_info = {
            'autoscale': True,
            'command': 'threat_intel_downloader',
            'debug': False,
            'interval': 'rate(1 day)',
            'memory': '128',
            'subcommand': 'enable',
            'timeout': '240',
            'table_wcu': 25,
            'max_read_capacity': 100,
            'min_read_capacity': 5,
            'target_utilization': 70
        }
        result = self.config.add_threat_intel_downloader(ti_downloader_info)
        assert_true(result)
        expected_config = {
            'autoscale': True,
            'enabled': True,
            'current_version': '$LATEST',
            'handler': 'stream_alert.threat_intel_downloader.main.handler',
            'interval': 'rate(1 day)',
            'ioc_filters': [],
            'ioc_keys': [],
            'ioc_types': [],
            'excluded_sub_types': [],
            'log_level': 'info',
            'memory': '128',
            'source_bucket': 'unit-testing.streamalert.source',
            'source_current_hash': '<auto_generated>',
            'source_object_key': '<auto_generated>',
            'third_party_libraries': ['requests'],
            'table_rcu': 10,
            'table_wcu': 25,
            'timeout': '240',
            'max_read_capacity': 100,
            'min_read_capacity': 5,
            'target_utilization': 70
        }
        assert_equal(self.config['lambda']['threat_intel_downloader_config'],
                     expected_config)
        write_mock.assert_called()
        log_mock.assert_not_called()

        # no config changed if threat intel downloader already been enabled via CLI
        result = self.config.add_threat_intel_downloader(ti_downloader_info)
        assert_false(result)
        write_mock.assert_called_once()
        log_mock.assert_called_with(
            'Threat Intel Downloader has been enabled. '
            'Please edit config/lambda.json if you want to '
            'change lambda function settings.')
 def setUp(self):
     """Create the CLIConfig and the expected template for these tests."""
     self.config = dict(CLIConfig(config_path='tests/unit/conf'))
     self.alert_proc_config = self.config['lambda'][
         'alert_processor_config']
Esempio n. 12
0
 def setup(self):
     """Setup before each method"""
     self._logging_bucket_name = 'logging-bucket-name'
     self.config = CLIConfig(config_path='tests/unit/conf')
Esempio n. 13
0
 def setup(self):
     """Setup before each method"""
     self.cluster_dict = common.infinitedict()
     self.config = CLIConfig(config_path='tests/unit/conf')
Esempio n. 14
0
class TestTerraformGenerate(object):
    """Test class for the Terraform Cluster Generating"""

    # pylint: disable=no-self-use

    def __init__(self):
        self.cluster_dict = None
        self.config = None

    def setup(self):
        """Setup before each method"""
        self.cluster_dict = common.infinitedict()
        self.config = CLIConfig(config_path='tests/unit/conf')

    @staticmethod
    def test_generate_s3_bucket():
        """CLI - Terraform Generate S3 Bucket """
        bucket = generate.generate_s3_bucket(bucket='unit.test.bucket',
                                             logging='my.s3-logging.bucket',
                                             force_destroy=True)

        required_keys = {
            'bucket', 'acl', 'force_destroy', 'versioning', 'logging'
        }

        assert_equal(type(bucket), dict)
        assert_equal(bucket['bucket'], 'unit.test.bucket')
        assert_equal(set(bucket.keys()), required_keys)

    @staticmethod
    def test_generate_s3_bucket_lifecycle():
        """CLI - Terraform Generate S3 Bucket with Lifecycle"""
        bucket = generate.generate_s3_bucket(bucket='unit.test.bucket',
                                             logging='my.s3-logging.bucket',
                                             force_destroy=False,
                                             lifecycle_rule={
                                                 'prefix': 'logs/',
                                                 'enabled': True,
                                                 'transition': {
                                                     'days': 30,
                                                     'storage_class': 'GLACIER'
                                                 }
                                             })

        assert_equal(bucket['lifecycle_rule']['prefix'], 'logs/')
        assert_equal(bucket['force_destroy'], False)
        assert_equal(type(bucket['lifecycle_rule']), dict)
        assert_equal(type(bucket['versioning']), dict)

    def test_generate_main(self):
        """CLI - Terraform Generate Main"""
        tf_main = generate.generate_main(config=self.config, init=False)

        tf_main_expected = {
            'provider': {
                'aws': {
                    'version': generate.TERRAFORM_VERSIONS['provider']['aws']
                }
            },
            'terraform': {
                'required_version': generate.TERRAFORM_VERSIONS['application'],
                'backend': {
                    's3': {
                        'bucket': 'unit-testing.streamalert.terraform.state',
                        'key': 'stream_alert_state/terraform.tfstate',
                        'region': 'us-west-1',
                        'encrypt': True,
                        'acl': 'private',
                        'kms_key_id': 'alias/unit-testing'
                    }
                }
            },
            'resource': {
                'aws_kms_key': {
                    'stream_alert_secrets': {
                        'enable_key_rotation': True,
                        'description': 'StreamAlert secret management'
                    }
                },
                'aws_kms_alias': {
                    'stream_alert_secrets': {
                        'name':
                        'alias/unit-testing',
                        'target_key_id':
                        '${aws_kms_key.stream_alert_secrets.key_id}'
                    }
                },
                'aws_s3_bucket': {
                    'lambda_source': {
                        'bucket': 'unit.testing.source.bucket',
                        'acl': 'private',
                        'force_destroy': True,
                        'versioning': {
                            'enabled': True
                        },
                        'logging': {
                            'target_bucket':
                            'unit-testing.streamalert.s3-logging',
                            'target_prefix': 'unit.testing.source.bucket/'
                        }
                    },
                    'stream_alert_secrets': {
                        'bucket': 'unit-testing.streamalert.secrets',
                        'acl': 'private',
                        'force_destroy': True,
                        'versioning': {
                            'enabled': True
                        },
                        'logging': {
                            'target_bucket':
                            'unit-testing.streamalert.s3-logging',
                            'target_prefix':
                            'unit-testing.streamalert.secrets/'
                        }
                    },
                    'terraform_remote_state': {
                        'bucket': 'unit-testing.terraform.tfstate',
                        'acl': 'private',
                        'force_destroy': True,
                        'versioning': {
                            'enabled': True
                        },
                        'logging': {
                            'target_bucket':
                            'unit-testing.streamalert.s3-logging',
                            'target_prefix': 'unit-testing.terraform.tfstate/'
                        }
                    },
                    'logging_bucket': {
                        'bucket': 'unit-testing.streamalert.s3-logging',
                        'acl': 'log-delivery-write',
                        'force_destroy': True,
                        'versioning': {
                            'enabled': True
                        },
                        'logging': {
                            'target_bucket':
                            'unit-testing.streamalert.s3-logging',
                            'target_prefix':
                            'unit-testing.streamalert.s3-logging/'
                        },
                        'lifecycle_rule': {
                            'prefix': '/',
                            'enabled': True,
                            'transition': {
                                'days': 30,
                                'storage_class': 'GLACIER'
                            }
                        }
                    },
                    'streamalerts': {
                        'bucket': 'unit-testing.streamalerts',
                        'acl': 'private',
                        'force_destroy': True,
                        'versioning': {
                            'enabled': True
                        },
                        'logging': {
                            'target_bucket':
                            'unit-testing.streamalert.s3-logging',
                            'target_prefix': 'unit-testing.streamalerts/'
                        }
                    }
                },
                'aws_sns_topic': {
                    'stream_alert_monitoring': {
                        'name': 'stream_alert_monitoring'
                    }
                }
            }
        }

        assert_equal(tf_main['provider'], tf_main_expected['provider'])
        assert_equal(tf_main['terraform'], tf_main_expected['terraform'])
        assert_equal(tf_main['resource'], tf_main_expected['resource'])

    def test_generate_main_with_firehose(self):
        """CLI - Terraform Generate Main with Firehose Enabled"""
        self.config['global']['infrastructure']['firehose'] = {
            'enabled': True,
            's3_bucket_suffix': 'my-data',
            'buffer_size': 10,
            'buffer_interval': 650,
            'enabled_logs': ['cloudwatch']
        }
        tf_main = generate.generate_main(config=self.config, init=False)

        generated_modules = tf_main['module']
        expected_kinesis_modules = {
            'kinesis_firehose_setup',
            'kinesis_firehose_cloudwatch_test_match_types',
            'kinesis_firehose_cloudwatch_test_match_types_2'
        }

        assert_true(
            all([
                expected_module in generated_modules
                for expected_module in expected_kinesis_modules
            ]))

        assert_equal(generated_modules['kinesis_firehose_cloudwatch_test_match_types']\
                                      ['s3_bucket_name'], 'unit-testing.my-data')
        assert_equal(generated_modules['kinesis_firehose_cloudwatch_test_match_types']\
                                      ['buffer_size'], 10)
        assert_equal(generated_modules['kinesis_firehose_cloudwatch_test_match_types']\
                                      ['buffer_interval'], 650)

    def test_generate_stream_alert_test(self):
        """CLI - Terraform Generate StreamAlert - Test Cluster"""
        streamalert.generate_stream_alert('test', self.cluster_dict,
                                          self.config)

        expected_test_cluster = {
            'module': {
                'stream_alert_test': {
                    'source': 'modules/tf_stream_alert',
                    'account_id': '12345678910',
                    'region': 'us-west-1',
                    'prefix': 'unit-testing',
                    'cluster': 'test',
                    'dynamodb_ioc_table': 'test_table_name',
                    'threat_intel_enabled': False,
                    'rule_processor_enable_metrics': True,
                    'rule_processor_log_level': 'info',
                    'rule_processor_memory': 128,
                    'rule_processor_timeout': 25,
                    'rule_processor_version': '$LATEST',
                    'rule_processor_config': '${var.rule_processor_config}',
                }
            }
        }

        assert_equal(self.cluster_dict['module']['stream_alert_test'],
                     expected_test_cluster['module']['stream_alert_test'])

    def test_generate_stream_alert_advanced(self):
        """CLI - Terraform Generate StreamAlert - Advanced Cluster"""
        streamalert.generate_stream_alert('advanced', self.cluster_dict,
                                          self.config)

        expected_advanced_cluster = {
            'module': {
                'stream_alert_advanced': {
                    'source': 'modules/tf_stream_alert',
                    'account_id': '12345678910',
                    'region': 'us-west-1',
                    'prefix': 'unit-testing',
                    'cluster': 'advanced',
                    'dynamodb_ioc_table': 'test_table_name',
                    'threat_intel_enabled': False,
                    'rule_processor_enable_metrics': True,
                    'rule_processor_log_level': 'info',
                    'rule_processor_memory': 128,
                    'rule_processor_timeout': 25,
                    'rule_processor_version': '$LATEST',
                    'rule_processor_config': '${var.rule_processor_config}',
                    'input_sns_topics': ['my-sns-topic-name'],
                }
            }
        }

        assert_equal(
            self.cluster_dict['module']['stream_alert_advanced'],
            expected_advanced_cluster['module']['stream_alert_advanced'])

    def test_generate_flow_logs(self):
        """CLI - Terraform Generate Flow Logs"""
        cluster_name = 'advanced'
        flow_logs.generate_flow_logs(cluster_name, self.cluster_dict,
                                     self.config)

        flow_log_config = self.cluster_dict['module']['flow_logs_advanced']
        assert_equal(flow_log_config['flow_log_group_name'],
                     'unit-test-advanced')
        assert_equal(flow_log_config['vpcs'], ['vpc-id-1', 'vpc-id-2'])

    def test_generate_cloudtrail_basic(self):
        """CLI - Terraform Generate Cloudtrail Module - Legacy"""
        cluster_name = 'advanced'
        self.config['clusters']['advanced']['modules']['cloudtrail'] = {
            'enabled': True
        }
        result = cloudtrail.generate_cloudtrail(cluster_name,
                                                self.cluster_dict, self.config)
        # Reload the config
        self.config.load()

        assert_true(result)
        assert_equal(
            set(self.config['clusters']['advanced']['modules']
                ['cloudtrail'].keys()), {'enable_logging', 'enable_kinesis'})
        assert_equal(
            self.cluster_dict['module']['cloudtrail_advanced'], {
                'account_ids': ['12345678910'],
                'cluster': 'advanced',
                'kinesis_arn': '${module.kinesis_advanced.arn}',
                'prefix': 'unit-testing',
                'enable_logging': True,
                'enable_kinesis': True,
                'source': 'modules/tf_stream_alert_cloudtrail',
                's3_logging_bucket': 'unit-testing.streamalert.s3-logging',
                'existing_trail': False,
                'is_global_trail': True,
                'event_pattern': '{"account": ["12345678910"]}'
            })

    def test_generate_cloudtrail_all_options(self):
        """CLI - Terraform Generate Cloudtrail Module - All Options"""
        cluster_name = 'advanced'
        self.config['clusters']['advanced']['modules']['cloudtrail'] = {
            'enable_logging':
            True,
            'enable_kinesis':
            True,
            'existing_trail':
            False,
            'is_global_trail':
            False,
            'event_pattern':
            json.dumps({
                'source': ['aws.ec2'],
                'account': '12345678910',
                'detail': {
                    'state': ['running']
                }
            })
        }
        cloudtrail.generate_cloudtrail(cluster_name, self.cluster_dict,
                                       self.config)

        assert_equal('cloudtrail_advanced' in self.cluster_dict['module'],
                     True)
        assert_equal(
            self.cluster_dict['module']['cloudtrail_advanced'], {
                'account_ids': ['12345678910'],
                'cluster':
                'advanced',
                'existing_trail':
                False,
                'is_global_trail':
                False,
                'kinesis_arn':
                '${module.kinesis_advanced.arn}',
                'prefix':
                'unit-testing',
                'enable_logging':
                True,
                'enable_kinesis':
                True,
                'source':
                'modules/tf_stream_alert_cloudtrail',
                's3_logging_bucket':
                'unit-testing.streamalert.s3-logging',
                'event_pattern':
                '{"source": ["aws.ec2"], "account": "12345678910",'
                ' "detail": {"state": ["running"]}}'
            })

    @patch('stream_alert_cli.terraform.cloudtrail.LOGGER_CLI')
    def test_generate_cloudtrail_invalid_event_pattern(self, mock_logging):
        """CLI - Terraform Generate Cloudtrail Module - Invalid Event Pattern"""
        cluster_name = 'advanced'
        self.config['clusters']['advanced']['modules']['cloudtrail'] = {
            'enable_logging': True,
            'enable_kinesis': True,
            'existing_trail': False,
            'is_global_trail': False,
            'event_pattern': json.dumps({'invalid': ['aws.ec2']})
        }
        result = cloudtrail.generate_cloudtrail(cluster_name,
                                                self.cluster_dict, self.config)
        assert_false(result)
        assert_true(mock_logging.error.called)

    def test_generate_cluster_test(self):
        """CLI - Terraform Generate Test Cluster"""

        tf_cluster = generate.generate_cluster(config=self.config,
                                               cluster_name='test')

        cluster_keys = {'module', 'output'}

        test_modules = {
            'stream_alert_test', 'cloudwatch_monitoring_test', 'kinesis_test',
            'kinesis_events_test', 's3_events_unit-testing_test_0'
        }

        assert_equal(set(tf_cluster['module'].keys()), test_modules)
        assert_equal(set(tf_cluster.keys()), cluster_keys)

    def test_generate_cluster_advanced(self):
        """CLI - Terraform Generate Advanced Cluster"""

        tf_cluster = generate.generate_cluster(config=self.config,
                                               cluster_name='advanced')

        cluster_keys = {'module', 'output'}

        advanced_modules = {
            'stream_alert_advanced', 'cloudwatch_monitoring_advanced',
            'kinesis_advanced', 'kinesis_events_advanced',
            'flow_logs_advanced', 'cloudtrail_advanced',
            's3_events_unit-testing_advanced_1',
            's3_events_unit-testing_advanced_0'
        }

        assert_equal(set(tf_cluster['module'].keys()), advanced_modules)
        assert_equal(set(tf_cluster.keys()), cluster_keys)
Esempio n. 15
0
def cli_runner(args):
    """Main StreamAlert CLI handler

    Args:
        options (argparse.Namespace): command line arguments passed from the argparser.
            Contains the following keys for terraform commands:
                (command, subcommand, target)
            Contains the following keys for lambda commands:
                (command, subcommand, env, func, source)
    Returns:
        bool: False if errors occurred, True otherwise
    """
    config = CLIConfig()

    set_logger_levels(args.debug)

    LOGGER.info(
        'Issues? Report here: https://github.com/airbnb/streamalert/issues')

    cmds = {
        'app':
        lambda opts: app_handler(opts, config),
        'athena':
        lambda opts: athena_handler(opts, config),
        'build':
        lambda opts: terraform_build_handler(opts, config),
        'clean':
        lambda opts: terraform_clean_handler(),
        'configure':
        lambda opts: configure_handler(opts, config),
        'create-alarm':
        lambda opts: _create_alarm_handler(opts, config),
        'create-cluster-alarm':
        lambda opts: _create_alarm_handler(opts, config),
        'custom-metrics':
        lambda opts: _custom_metrics_handler(opts, config),
        'deploy':
        lambda opts: deploy_handler(opts, config),
        'destroy':
        lambda opts: terraform_destroy_handler(opts, config),
        'generate':
        lambda opts: terraform_generate_handler(config, check_creds=False),
        'init':
        lambda opts: terraform_init(opts, config),
        'kinesis':
        lambda opts: kinesis_handler(opts, config),
        'list-targets':
        lambda opts: terraform_list_targets(config),
        'output':
        lambda opts: output_handler(opts, config),
        'rollback':
        lambda opts: rollback_handler(opts, config),
        'rule-staging':
        lambda opts: rule_staging_handler(opts, config),
        'status':
        lambda opts: _status_handler(config),
        'test':
        lambda opts: test_handler(opts, config),
        'threat-intel':
        lambda opts: _threat_intel_handler(opts, config),
        'threat-intel-downloader':
        lambda opts: threat_intel_downloader_handler(opts, config),
    }

    result = cmds[args.command](args)
    LOGGER.info('Completed')
    return result
Esempio n. 16
0
 def test_load_config(self):
     """CLI - Load config"""
     with nested(*self.mocked_opens):
         config = CLIConfig()
         assert_equal(config['global']['account']['prefix'], 'unit-testing')
Esempio n. 17
0
class TestCLIConfig(object):
    """Test class for CLIConfig"""
    def __init__(self):
        self.config = None
        self.fs_patcher = None

    def setup(self):
        """Setup before each method"""
        config_data = basic_streamalert_config()

        self.fs_patcher = fake_filesystem_unittest.Patcher()
        self.fs_patcher.setUp()

        self.fs_patcher.fs.create_file('/conf/global.json',
                                       contents=json.dumps(
                                           config_data['global']))
        self.fs_patcher.fs.create_file('/conf/threat_intel.json',
                                       contents=json.dumps(
                                           config_data['threat_intel']))
        self.fs_patcher.fs.create_file('/conf/normalized_types.json',
                                       contents=json.dumps(
                                           config_data['normalized_types']))
        self.fs_patcher.fs.create_file('/conf/lambda.json',
                                       contents=json.dumps(
                                           config_data['lambda']))
        self.fs_patcher.fs.create_file('/conf/clusters/prod.json',
                                       contents=json.dumps(
                                           config_data['clusters']['prod']))

        # Create the config instance after creating the fake filesystem so that
        # CLIConfig uses our mocked config files instead of the real ones.
        self.config = CLIConfig()

    def teardown(self):
        """Teardown after each method"""
        self.fs_patcher.tearDown()

    def test_load_config(self):
        """CLI - Load config"""
        assert_equal(self.config['global']['account']['prefix'],
                     'unit-testing')

    def test_toggle_metric(self):
        """CLI - Metric toggling"""
        self.config.toggle_metrics('athena_partition_refresh', enabled=True)
        assert_equal(
            self.config['lambda']['athena_partition_refresh_config']
            ['enable_custom_metrics'], True)

        self.config.toggle_metrics('alert_processor', enabled=False)
        assert_equal(
            self.config['lambda']['alert_processor_config']
            ['enable_custom_metrics'], False)

    def test_aggregate_alarm_exists(self):
        """CLI - Aggregate alarm check"""
        result = self.config._alarm_exists(
            'Aggregate Unit Testing Failed Parses Alarm')
        assert_true(result)

    def test_cluster_alarm_exists(self):
        """CLI - Cluster alarm check"""
        result = self.config._alarm_exists(
            'Prod Unit Testing Failed Parses Alarm')
        assert_true(result)

    def test_cluster_alarm_creation(self):
        """CLI - Adding CloudWatch metric alarm, cluster"""
        alarm_info = {
            'function': 'classifier',
            'metric_name': 'TotalRecords',
            'evaluation_periods': 1,
            'alarm_description': '',
            'alarm_name': 'Prod Unit Testing Total Records Alarm',
            'period': 300,
            'threshold': 100.0,
            'statistic': 'Sum',
            'clusters': {'prod'},
            'comparison_operator': 'LessThanThreshold'
        }

        expected_result = {
            'Prod Unit Testing Total Records Alarm': {
                'metric_name': 'Classifier-TotalRecords-PROD',
                'evaluation_periods': 1,
                'alarm_description': '',
                'period': 300,
                'threshold': 100.0,
                'statistic': 'Sum',
                'comparison_operator': 'LessThanThreshold'
            },
            'Prod Unit Testing Failed Parses Alarm': {
                'alarm_description': '',
                'comparison_operator': 'GreaterThanOrEqualToThreshold',
                'evaluation_periods': 1,
                'metric_name': 'Classifier-FailedParses-PROD',
                'period': 300,
                'statistic': 'Sum',
                'threshold': 1.0
            }
        }

        self.config.add_metric_alarm(alarm_info)
        result = (self.config['clusters']['prod']['modules']['stream_alert']
                  ['classifier_config']['custom_metric_alarms'])

        assert_equal(result, expected_result)

    def test_aggregate_alarm_creation(self):
        """CLI - Adding CloudWatch metric alarm, aggregate"""
        alarm_info = {
            'function': 'classifier',
            'metric_name': 'TotalRecords',
            'evaluation_periods': 1,
            'alarm_description': '',
            'alarm_name': 'Aggregate Unit Testing Total Records Alarm',
            'period': 300,
            'threshold': 100.0,
            'statistic': 'Sum',
            'comparison_operator': 'LessThanThreshold'
        }

        expected_result = {
            'Aggregate Unit Testing Total Records Alarm': {
                'metric_name': 'Classifier-TotalRecords',
                'evaluation_periods': 1,
                'alarm_description': '',
                'period': 300,
                'threshold': 100.0,
                'statistic': 'Sum',
                'comparison_operator': 'LessThanThreshold'
            }
        }

        self.config.add_metric_alarm(alarm_info)
        result = self.config['lambda']['classifier_config'][
            'custom_metric_alarms']

        assert_equal(result, expected_result)

    def test_add_threat_intel_with_table_name(self):
        """CLI - Add Threat Intel config with default dynamodb table name"""
        threat_intel_info = {
            'command': 'threat-intel',
            'debug': 'False',
            'dynamodb_table_name': 'my_ioc_table',
            'enable': True
        }

        self.config.add_threat_intel(threat_intel_info)

        expected_config = {
            'enabled': True,
            'dynamodb_table_name': 'my_ioc_table',
            'excluded_iocs': {},
            'normalized_ioc_types': {
                'ip': ['sourceAddress', 'destinationAddress']
            }
        }

        assert_equal(self.config['threat_intel'], expected_config)

    def test_add_threat_intel_without_table_name(self):
        """CLI - Add Threat Intel config without dynamodb table name from cli"""
        threat_intel_info = {
            'command': 'threat-intel',
            'debug': 'False',
            'dynamodb_table_name': None,
            'enable': True
        }

        del self.config['threat_intel']['dynamodb_table_name']

        self.config.add_threat_intel(threat_intel_info)

        expected_config = {
            'enabled': True,
            'dynamodb_table_name':
            'unit-testing_streamalert_threat_intel_downloader',
            'excluded_iocs': {},
            'normalized_ioc_types': {
                'ip': ['sourceAddress', 'destinationAddress']
            }
        }

        assert_equal(self.config['threat_intel'], expected_config)

    @patch('logging.Logger.info')
    @patch('stream_alert_cli.config.CLIConfig.write')
    def test_add_threat_intel_downloader(self, write_mock, log_mock):
        """CLI - Add Threat Intel Downloader config"""
        del self.config['lambda']['threat_intel_downloader_config']
        ti_downloader_info = {
            'autoscale': True,
            'command': 'threat_intel_downloader',
            'debug': False,
            'interval': 'rate(1 day)',
            'memory': '128',
            'subcommand': 'enable',
            'timeout': '240',
            'table_wcu': 25,
            'max_read_capacity': 100,
            'min_read_capacity': 5,
            'target_utilization': 70
        }
        result = self.config.add_threat_intel_downloader(ti_downloader_info)
        assert_true(result)
        expected_config = {
            'autoscale': True,
            'enabled': True,
            'interval': 'rate(1 day)',
            'ioc_filters': [],
            'ioc_keys': [],
            'ioc_types': [],
            'excluded_sub_types': [],
            'log_level': 'info',
            'memory': '128',
            'third_party_libraries': ['requests'],
            'table_rcu': 10,
            'table_wcu': 25,
            'timeout': '240',
            'max_read_capacity': 100,
            'min_read_capacity': 5,
            'target_utilization': 70
        }
        assert_equal(self.config['lambda']['threat_intel_downloader_config'],
                     expected_config)
        write_mock.assert_called()
        log_mock.assert_not_called()

        # no config changed if threat intel downloader already been enabled via CLI
        result = self.config.add_threat_intel_downloader(ti_downloader_info)
        assert_false(result)
        write_mock.assert_called_once()
        log_mock.assert_called_with(
            'Threat Intel Downloader has been enabled. '
            'Please edit config/lambda.json if you want to '
            'change lambda function settings.')
Esempio n. 18
0
 def test_cluster_alarm_exists(self):
     """CLI - Aggregate alarm check"""
     with nested(*self.mocked_opens):
         config = CLIConfig()
         result = config._alarm_exists('Prod Unit Testing Failed Parses Alarm')
         assert_true(result)