Пример #1
0
 def test_rollback_all(self, mock_helper):
     """CLI - Lambda rollback all"""
     mock_helper.return_value = True
     funcs = [
         'alert', 'alert_merger', 'apps', 'athena', 'classifier', 'rule',
         'rule_promo', 'scheduled_queries', 'threat_intel_downloader'
     ]
     assert_equal(
         rollback.RollbackCommand.handler(
             MockOptions(None, funcs),
             MockCLIConfig(config=basic_streamalert_config())), True)
     mock_helper.assert_has_calls([
         mock.call(mock.ANY, 'unit-test_streamalert_alert_processor'),
         mock.call(mock.ANY, 'unit-test_streamalert_alert_merger'),
         mock.call(mock.ANY,
                   'unit-test_corp_box_admin_events_box_collector_app'),
         mock.call(mock.ANY,
                   'unit-test_corp_duo_admin_duo_admin_collector_app'),
         mock.call(mock.ANY, 'unit-test_streamalert_athena_partitioner'),
         mock.call(mock.ANY, 'unit-test_corp_streamalert_classifier'),
         mock.call(mock.ANY, 'unit-test_prod_streamalert_classifier'),
         mock.call(mock.ANY, 'unit-test_streamalert_rules_engine'),
         mock.call(mock.ANY, 'unit-test_streamalert_rule_promotion'),
         mock.call(mock.ANY,
                   'unit-test_streamalert_scheduled_queries_runner'),
         mock.call(mock.ANY,
                   'unit-test_streamalert_threat_intel_downloader')
     ])
Пример #2
0
    def setup(self):
        """Setup before each method"""
        config_data = basic_streamalert_config()

        self.fs_patcher = fake_filesystem_unittest.Patcher()
        self.fs_patcher.setUp()

        self.fs_patcher.fs.create_file('./conf/global.json',
                                       contents=json.dumps(
                                           config_data['global']))
        self.fs_patcher.fs.create_file('./conf/threat_intel.json',
                                       contents=json.dumps(
                                           config_data['threat_intel']))
        self.fs_patcher.fs.create_file('./conf/normalized_types.json',
                                       contents=json.dumps(
                                           config_data['normalized_types']))
        self.fs_patcher.fs.create_file('./conf/lambda.json',
                                       contents=json.dumps(
                                           config_data['lambda']))
        self.fs_patcher.fs.create_file('./conf/clusters/prod.json',
                                       contents=json.dumps(
                                           config_data['clusters']['prod']))

        # Create the config instance after creating the fake filesystem so that
        # CLIConfig uses our mocked config files instead of the real ones.
        self.config = CLIConfig('./conf/')
Пример #3
0
    def test_config_no_logs_key(self):
        """Shared - Config Validator - No Logs Key in Source"""
        # Load a valid config
        config = basic_streamalert_config()

        # Remove everything from the sources entry
        config['clusters']['prod']['data_sources']['kinesis']['stream_1'] = {}

        assert_raises(ConfigError, _validate_config, config)
Пример #4
0
    def test_config_invalid_datasources(self):
        """Shared - Config Validator - Invalid Datasources"""
        # Load a valid config
        config = basic_streamalert_config()

        # Set the sources value to contain an invalid data source ('sqs')
        config['clusters']['prod']['data_sources'] = {'sqs': {'queue_1': {}}}

        assert_raises(ConfigError, _validate_config, config)
Пример #5
0
    def test_config_empty_logs_list(self):
        """Shared - Config Validator - Empty Logs List in Source"""
        # Load a valid config
        config = basic_streamalert_config()

        # Set the logs key to an empty list
        config['clusters']['prod']['data_sources']['kinesis']['stream_1'] = []

        assert_raises(ConfigError, _validate_config, config)
Пример #6
0
    def test_config_no_parsers(self):
        """Shared - Config Validator - No Parser in Log"""
        # Load a valid config
        config = basic_streamalert_config()

        # Remove the 'parser' keys from the config
        config['logs']['json_log'].pop('parser')
        config['logs']['csv_log'].pop('parser')

        assert_raises(ConfigError, _validate_config, config)
Пример #7
0
    def test_config_ioc_types_no_normalized_types(self):
        """Shared - Config Validator - IOC Types, Without Normalized Types"""
        # Load a valid config
        config = basic_streamalert_config()

        # Set the sources value to contain an invalid data source ('sqs')
        config['threat_intel'] = {'normalized_ioc_types': {'ip': ['foobar']}}
        if 'normalized_types' in config:
            del config['normalized_types']

        assert_raises(ConfigError, _validate_config, config)
Пример #8
0
    def test_config_invalid_ioc_types(self):
        """Shared - Config Validator - IOC Types, Invalid"""
        # Load a valid config
        config = basic_streamalert_config()

        # Set the sources value to contain an invalid data source ('sqs')
        config['threat_intel'] = {
            'normalized_ioc_types': {'ip': ['foobar']}
        }

        config['normalized_types'] = {'log_type': {'sourceAddress': ['ip_address']}}

        assert_raises(ConfigError, _validate_config, config)
Пример #9
0
 def test_lambda_terraform_targets(self):
     """CLI - Deploy, Lambda Terraform Targets"""
     config = basic_streamalert_config()
     functions = ['rule', 'classifier']
     clusters = ['prod']
     result = deploy._lambda_terraform_targets(config, functions, clusters)
     expected_result = {
         'module.rules_engine_iam',
         'module.rules_engine_lambda',
         'module.classifier_prod_iam',
         'module.classifier_prod_lambda',
     }
     assert_equal(result, expected_result)
Пример #10
0
 def test_rollback_subset(self, mock_helper):
     """CLI - Lambda rollback apps and rule"""
     mock_helper.return_value = True
     assert_equal(
         rollback.rollback_handler(
             MockOptions(None, ['apps', 'rule']),
             MockCLIConfig(config=basic_streamalert_config())), True)
     mock_helper.assert_has_calls([
         mock.call(mock.ANY,
                   'unit-testing_corp_box_admin_events_box_collector_app'),
         mock.call(mock.ANY,
                   'unit-testing_corp_duo_admin_duo_admin_collector_app'),
         mock.call(mock.ANY, 'unit-testing_streamalert_rules_engine')
     ])
Пример #11
0
    def test_lambda_terraform_targets_invalid_target(self, log_mock):
        """CLI - Deploy, Lambda Terraform Targets, Invalid Target"""
        config = basic_streamalert_config()

        # The scheduled_queries function is not enabled
        functions = ['scheduled_queries']
        clusters = []
        result = deploy._lambda_terraform_targets(config, functions, clusters)

        assert_equal(result, set())
        log_mock.assert_called_with(
            'Function is not enabled and will be ignored: %s',
            'scheduled_queries'
        )
Пример #12
0
    def setUp(self):
        self.setUpPyfakefs()

        config_data = basic_streamalert_config()

        mock_cluster_contents = '{"data_sources": {}, "classifier_config": {"foo": "bar"}}'

        # Add config files which should be loaded
        self.fs.create_file('conf/clusters/prod.json',
                            contents=mock_cluster_contents)
        self.fs.create_file('conf/clusters/dev.json',
                            contents=mock_cluster_contents)
        self.fs.create_file('conf/global.json', contents='{}')
        self.fs.create_file('conf/lambda.json', contents='{}')
        self.fs.create_file('conf/logs.json', contents='{}')
        self.fs.create_file('conf/outputs.json', contents='{}')
        self.fs.create_file('conf/threat_intel.json',
                            contents=json.dumps(config_data['threat_intel']))
        self.fs.create_file('conf/normalized_types.json',
                            contents=json.dumps(
                                config_data['normalized_types']))
        self.fs.create_file(
            'conf/schemas/csv.json',
            contents=
            '{"csv_log2": {"schema": {"data": "string","uid": "integer"},"parser": "csv"}}'
        )

        # Create similar structure but with schemas folder instead of logs.json and 2 clusters.
        self.fs.create_file('conf_schemas/clusters/prod.json',
                            contents=mock_cluster_contents)
        self.fs.create_file('conf_schemas/clusters/dev.json',
                            contents=mock_cluster_contents)
        self.fs.create_file('conf_schemas/global.json', contents='{}')
        self.fs.create_file('conf_schemas/lambda.json', contents='{}')
        self.fs.create_file('conf_schemas/outputs.json', contents='{}')
        self.fs.create_file(
            'conf_schemas/schemas/csv.json',
            contents=
            '{"csv_log": {"schema": {"data": "string","uid": "integer"},"parser": "csv"}}'
        )
        self.fs.create_file(
            'conf_schemas/schemas/json.json',
            contents=
            '{"json_log": {"schema": {"name": "string"},"parser": "json"}}')
        self.fs.create_file(
            'conf_schemas/schemas/json_log_with_dots.json',
            contents=
            '{"json:log.with.dots": {"schema": {"name": "string"},"parser": "json"}}'
        )
Пример #13
0
    def setUp(self):
        self.setUpPyfakefs()

        config_data = basic_streamalert_config()

        # Add config files which should be loaded
        self.fs.create_file('conf/clusters/prod.json', contents='{}')
        self.fs.create_file('conf/clusters/dev.json', contents='{}')
        self.fs.create_file('conf/global.json', contents='{}')
        self.fs.create_file('conf/lambda.json', contents='{}')
        self.fs.create_file('conf/logs.json', contents='{}')
        self.fs.create_file('conf/outputs.json', contents='{}')
        self.fs.create_file('conf/sources.json', contents='{}')
        self.fs.create_file(
            'conf/threat_intel.json',
            contents=json.dumps(config_data['threat_intel'])
        )
        self.fs.create_file(
            'conf/normalized_types.json',
            contents=json.dumps(config_data['normalized_types'])
        )
Пример #14
0
 def test_rollback_all(self, mock_helper):
     """CLI - Lambda rollback all"""
     mock_helper.return_value = True
     assert_equal(
         rollback.rollback_handler(
             MockOptions(None, ['all']),
             MockCLIConfig(config=basic_streamalert_config())), True)
     mock_helper.assert_has_calls([
         mock.call(mock.ANY, 'unit-testing_streamalert_alert_processor'),
         mock.call(mock.ANY, 'unit-testing_streamalert_alert_merger'),
         mock.call(mock.ANY,
                   'unit-testing_corp_box_admin_events_box_collector_app'),
         mock.call(mock.ANY,
                   'unit-testing_corp_duo_admin_duo_admin_collector_app'),
         mock.call(mock.ANY,
                   'unit-testing_streamalert_athena_partition_refresh'),
         mock.call(mock.ANY, 'unit-testing_corp_streamalert_classifier'),
         mock.call(mock.ANY, 'unit-testing_prod_streamalert_classifier'),
         mock.call(mock.ANY, 'unit-testing_streamalert_rules_engine'),
         mock.call(mock.ANY,
                   'unit-testing_streamalert_threat_intel_downloader')
     ])
Пример #15
0
 def test_config_duplicate_sources(self):
     """Shared - Config Validator - Duplicate Data Sources in Cluster Configs"""
     config = basic_streamalert_config()
     config['clusters']['dev'] = config['clusters']['prod']
     assert_raises(ConfigError, _validate_config, config)
Пример #16
0
 def test_missing_streamalert_module(self):
     """Shared - Config Validator, Missing streamalert Module"""
     config = basic_streamalert_config()
     del config['clusters']['prod']['classifier_config']
     assert_raises(ConfigError, _validate_config, config)
Пример #17
0
 def test_load_schemas():
     """Shared - Config Loading - Schemas"""
     # Load from separate dir where logs.json doesn't exist
     config = load_config(conf_dir='conf_schemas')
     basic_config = basic_streamalert_config()
     assert_equal(config['logs'], basic_config['logs'])