Exemple #1
0
def test_load_config_invalid():
    """Config Validator - Load Config - Invalid"""
    m = mock_open()
    with patch('__builtin__.open', m, create=True):
        with open('conf/logs.json', 'w') as conf_logs:
            conf_logs.write('test logs string that will throw an error')
        with open('conf/sources.json', 'w') as conf_sources:
            conf_sources.write('test sources string that will throw an error')
        load_config()
Exemple #2
0
    def __init__(self, context, enable_alert_processor=True):
        """
        Args:
            context: An AWS context object which provides metadata on the currently
                executing lambda function.
            enable_alert_processor (bool): If the user wants to send the alerts using their
                own methods, 'enable_alert_processor' can be set to False to suppress
                sending with the StreamAlert alert processor.
        """
        # Load the config. Validation occurs during load, which will
        # raise exceptions on any ConfigErrors
        config = load_config()

        # Load the environment from the context arn
        self.env = load_env(context)

        # Instantiate the sink here to handle sending the triggered alerts to the
        # alert processor
        self.sinker = StreamSink(self.env)

        # Instantiate a classifier that is used for this run
        self.classifier = StreamClassifier(config=config)

        self.enable_alert_processor = enable_alert_processor
        self._failed_record_count = 0
        self._alerts = []
 def test_load_from_config_with_cluster_env(self):
     """Threat Intel - Test load_from_config to read cluster env variable"""
     with patch.dict('os.environ', {'CLUSTER': 'advanced'}):
         config = load_config('tests/unit/conf')
         config['global']['threat_intel']['enabled'] = True
         threat_intel = StreamThreatIntel.load_from_config(config)
         assert_is_instance(threat_intel, StreamThreatIntel)
         assert_equal(config['clusters'].keys(), ['advanced'])
 def test_load_from_config_with_cluster_env_2(self):
     """Threat Intel - Test load_from_config with threat intel disabled in cluster"""
     with patch.dict('os.environ', {'CLUSTER': 'test'}):
         config = load_config('tests/unit/conf')
         config['global']['threat_intel']['enabled'] = True
         threat_intel = StreamThreatIntel.load_from_config(config)
         assert_false(isinstance(threat_intel, StreamThreatIntel))
         assert_equal(config['clusters'].keys(), ['test'])
 def setup(self):
     """Setup before each method"""
     # Clear out the cached matchers and rules to avoid conflicts with production code
     Matcher._matchers.clear()
     Rule._rules.clear()
     self.config = load_config('tests/unit/conf')
     self.config['global']['threat_intel']['enabled'] = False
     self.rules_engine = RulesEngine(self.config)
 def setup(self):
     """Setup before each method"""
     # Clear out the cached matchers and rules to avoid conflicts with production code
     StreamRules._StreamRules__matchers.clear()  # pylint: disable=no-member
     StreamRules._StreamRules__rules.clear()  # pylint: disable=no-member
     self.config = load_config('tests/unit/conf')
     self.config['global']['threat_intel']['enabled'] = False
     self.rules_engine = StreamRules(self.config)
Exemple #7
0
 def setup(self):
     """Setup before each method"""
     self.env = {
         'lambda_region': 'us-east-1',
         'account_id': '123456789012',
         'lambda_function_name': 'test_kinesis_stream',
         'lambda_alias': 'production'
     }
     self.config = load_config('test/unit/conf')
     self.log_metadata = self.config['logs']
 def setup(self):
     self.env = {
         'lambda_region': 'us-east-1',
         'account_id': '123456789012',
         'lambda_function_name': 'stream_alert_test',
         'lambda_alias': 'production'
     }
     self.config = load_config('test/unit/conf')
     self.log_metadata = self.config['logs']
     pass
Exemple #9
0
    def test_load_enabled_sources_invalid_log(self, mock_logging):
        """StreamAlertFirehose - Load Enabled Sources - Invalid Log"""
        config = load_config('tests/unit/conf')
        firehose_config = {'enabled_logs': ['log-that-doesnt-exist']}

        sa_firehose = StreamAlertFirehose(region='us-east-1',
                                          firehose_config=firehose_config,
                                          log_sources=config['logs'])

        assert_equal(len(sa_firehose._enabled_logs), 0)
        assert_true(mock_logging.error.called)
Exemple #10
0
    def test_load_enabled_sources(self):
        """StreamAlertFirehose - Load Enabled Sources"""
        config = load_config('tests/unit/conf')
        firehose_config = {
            'enabled_logs':
            ['json:regex_key_with_envelope', 'test_cloudtrail', 'cloudwatch']
        }  # expands to 2 logs

        sa_firehose = StreamAlertFirehose(region='us-east-1',
                                          firehose_config=firehose_config,
                                          log_sources=config['logs'])

        assert_equal(len(sa_firehose._enabled_logs), 4)
        # Make sure the subtitution works properly
        assert_true(all([':' not in log for log in sa_firehose.enabled_logs]))
        assert_false(sa_firehose.enabled_log_source('test_inspec'))
Exemple #11
0
    def run(self, event, context):
        """StreamAlert Lambda function handler.

        Loads the configuration for the StreamAlert function which contains:
        available data sources, log formats, parser modes, and sinks.  Classifies
        logs sent into the stream into a parsed type.  Matches records against
        rules.

        Args:
            event: An AWS event mapped to a specific source/entity (kinesis stream or
                an s3 bucket event) containing data emitted to the stream.
            context: An AWS context object which provides metadata on the currently
                executing lambda function.

        Returns:
            None
        """
        logger.debug('Number of Records: %d', len(event.get('Records', [])))

        config = load_config()
        env = load_env(context)

        for record in event.get('Records', []):
            payload = StreamPayload(raw_record=record)
            classifier = StreamClassifier(config=config)
            classifier.map_source(payload)

            # If the kinesis stream or s3 bucket is not in our config,
            # go onto the next record
            if not payload.valid_source:
                continue

            if payload.service == 's3':
                self.s3_process(payload, classifier)
            elif payload.service == 'kinesis':
                self.kinesis_process(payload, classifier)
            else:
                logger.info('Unsupported service: %s', payload.service)

        # returns the list of generated alerts
        if self.return_alerts:
            return self.alerts
        # send alerts to SNS
        self.send_alerts(env, payload)
Exemple #12
0
    def run(self, event):
        """StreamAlert Lambda function handler.

        Loads the configuration for the StreamAlert function which contains:
        available data sources, log formats, parser modes, and sinks.  Classifies
        logs sent into the stream into a parsed type.  Matches records against
        rules.

        Args:
            event: An AWS event mapped to a specific source/entity (kinesis stream or
                an s3 bucket event) containing data emitted to the stream.

        Returns:
            None
        """
        LOGGER.debug('Number of Records: %d', len(event.get('Records', [])))

        config = load_config()

        for record in event.get('Records', []):
            payload = StreamPayload(raw_record=record)
            classifier = StreamClassifier(config=config)

            # If the kinesis stream, s3 bucket, or sns topic is not in our config,
            # go onto the next record
            if not classifier.map_source(payload):
                continue

            if payload.service == 's3':
                self._s3_process(payload, classifier)
            elif payload.service == 'kinesis':
                self._kinesis_process(payload, classifier)
            elif payload.service == 'sns':
                self._sns_process(payload, classifier)
            else:
                LOGGER.info('Unsupported service: %s', payload.service)

        LOGGER.debug('%s alerts triggered', len(self.alerts))
        LOGGER.debug('\n%s\n', json.dumps(self.alerts, indent=4))

        if self.return_alerts:
            return self.alerts
Exemple #13
0
    def test_rule(self, rule_name, test_record, formatted_record):
        """Feed formatted records into StreamAlert and check for alerts
        Args:
            rule_name [str]: The rule name being tested
            test_record [dict]: A single record to test
            formatted_record [dict]: A dictionary that includes the 'data' from the
                test record, formatted into a structure that is resemblant of how
                an incoming record from a service would format it.
                See test/integration/templates for example of how each service
                formats records.

        Returns:
            [list] alerts that hit for this rule
            [integer] count of expected alerts for this rule
            [bool] boolean where False indicates errors occurred during processing
        """
        event = {'Records': [formatted_record]}

        expected_alert_count = test_record.get('trigger_count')
        if not expected_alert_count:
            expected_alert_count = 1 if test_record['trigger'] else 0

        # Run the rule processor. Passing mocked context object with fake
        # values and False for suppressing sending of alerts
        processor = StreamAlert(self.context, False)
        all_records_matched_schema = processor.run(event)

        if not all_records_matched_schema:
            payload = StreamPayload(raw_record=formatted_record)
            classifier = StreamClassifier(config=load_config())
            classifier.map_source(payload)
            logs = classifier._log_metadata()
            self.analyze_record_delta(logs, rule_name, test_record)

        alerts = processor.get_alerts()

        # we only want alerts for the specific rule being tested
        alerts = [alert for alert in alerts
                  if alert['rule_name'] == rule_name]

        return alerts, expected_alert_count, all_records_matched_schema
Exemple #14
0
    def __init__(self, context, enable_alert_processor=True):
        """Initializer

        Args:
            context (dict): An AWS context object which provides metadata on the currently
                executing lambda function.
            enable_alert_processor (bool): If the user wants to send the alerts using their
                own methods, 'enable_alert_processor' can be set to False to suppress
                sending with the StreamAlert alert processor.
        """
        # Load the config. Validation occurs during load, which will
        # raise exceptions on any ConfigErrors
        StreamAlert.config = StreamAlert.config or load_config()

        # Load the environment from the context arn
        self.env = load_env(context)

        # Instantiate the sink here to handle sending the triggered alerts to the
        # alert processor
        self.sinker = StreamSink(self.env)

        # Instantiate a classifier that is used for this run
        self.classifier = StreamClassifier(config=self.config)

        self.enable_alert_processor = enable_alert_processor
        self._failed_record_count = 0
        self._processed_size = 0
        self._alerts = []

        # Create a dictionary to hold parsed payloads by log type.
        # Firehose needs this information to send to its corresponding
        # delivery stream.
        self.categorized_payloads = defaultdict(list)

        # Firehose client initialization
        self.firehose_client = None

        # create an instance of the StreamRules class that gets cached in the
        # StreamAlert class as an instance property
        self._rule_engine = StreamRules(self.config)
Exemple #15
0
    def __init__(self, context):
        """Initializer

        Args:
            context (dict): An AWS context object which provides metadata on the currently
                executing lambda function.
        """
        # Load the config. Validation occurs during load, which will
        # raise exceptions on any ConfigErrors
        StreamAlert.config = StreamAlert.config or load_config()

        # Load the environment from the context arn
        self.env = load_env(context)

        # Instantiate the send_alerts here to handle sending the triggered alerts to the
        # alert processor
        self.alert_forwarder = AlertForwarder()

        # Instantiate a classifier that is used for this run
        self.classifier = StreamClassifier(config=self.config)

        self._failed_record_count = 0
        self._processed_record_count = 0
        self._processed_size = 0
        self._alerts = []

        rule_import_paths = [
            item for location in {'rule_locations', 'matcher_locations'}
            for item in self.config['global']['general'][location]
        ]

        # Create an instance of the StreamRules class that gets cached in the
        # StreamAlert class as an instance property
        self._rules_engine = RulesEngine(self.config, *rule_import_paths)

        # Firehose client attribute
        self._firehose_client = None
 def setup(self):
     """Setup before each method"""
     # load config
     self.config = load_config('test/unit/conf')
     # load JSON parser class
     self.parser_class = get_parser('gzip-json')
 def setup(self):
     """Setup before each method"""
     config = load_config('tests/unit/conf')
     self.classifier = sa_classifier.StreamClassifier(config)
Exemple #18
0
 def setup_class(cls):
     """Setup the class before any methods"""
     # load config
     cls.config = load_config('tests/unit/conf')
     # load the parser class
     cls.parser_class = get_parser(cls._parser_type())
Exemple #19
0
def test_config_valid_types():
    """Config Validator - valid normalized types"""
    # Load a valid config
    config = load_config()

    _validate_config(config)
Exemple #20
0
 def setup_class(cls):
     """Setup the class before any methods"""
     # load config
     cls.config = load_config('test/unit/conf')
     # load JSON parser class
     cls.parser_class = get_parser('kv')
 def setup(self):
     """Setup before each method"""
     # Clear out the cached matchers and rules to avoid conflicts with production code
     self.config = load_config('tests/unit/conf')
     self.config['global']['threat_intel']['enabled'] = True
     self.threat_intel = StreamThreatIntel.load_from_config(self.config)
 def setup_class(cls):
     """Setup the class before any methods"""
     context = get_mock_context()
     cls.env = load_env(context)
     cls.config = load_config('tests/unit/conf')
 def setup(self):
     """Setup before each method"""
     self.config = load_config('test/unit/conf')
Exemple #24
0
def test_load_config_invalid():
    """Config Validator - Load Config - Invalid"""
    mocker = mock_open(read_data='test string that will throw an error')
    with patch('__builtin__.open', mocker):
        load_config()