def test_run_threat_intel_enabled(self, mock_threat_intel, mock_query): # pylint: disable=no-self-use """StreamAlert Class - Run SA when threat intel enabled""" @rule(datatypes=['sourceAddress'], outputs=['s3:sample_bucket']) def match_ipaddress(_): # pylint: disable=unused-variable """Testing dummy rule""" return True mock_threat_intel.return_value = StreamThreatIntel('test_table_name', 'us-east-1') mock_query.return_value = ([], []) sa_handler = StreamAlert(get_mock_context(), False) event = { 'account': 123456, 'region': '123456123456', 'source': '1.1.1.2', 'detail': { 'eventName': 'ConsoleLogin', 'sourceIPAddress': '1.1.1.2', 'recipientAccountId': '654321' } } events = [] for i in range(10): event['source'] = '1.1.1.{}'.format(i) events.append(event) kinesis_events = { 'Records': [make_kinesis_raw_record('test_kinesis_stream', json.dumps(event)) for event in events] } passed = sa_handler.run(kinesis_events) assert_true(passed) assert_equal(mock_query.call_count, 1)
def test_rule(rule_name, test_record, formatted_record): """Feed formatted records into StreamAlert and check for alerts Args: rule_name [str]: The rule name being tested test_record [dict]: A single record to test formatted_record [dict]: A properly formatted version of record for the service to be tested Returns: [bool] boolean indicating if this rule passed """ event = {'Records': [formatted_record]} expected_alert_count = test_record.get('trigger_count') if not expected_alert_count: expected_alert_count = (0, 1)[test_record['trigger']] # Run the rule processor. Passing 'None' for context # will load a mocked object later alerts = StreamAlert(None, True).run(event) # we only want alerts for the specific rule being tested alerts = [ alert for alert in alerts if alert['metadata']['rule_name'] == rule_name ] return alerts, expected_alert_count
def __init__(self, context, config, print_output): """RuleProcessorTester initializer Args: print_output (bool): Whether this processor test should print results to stdout. This is set to false when the alert processor is explicitly being testing alone, and set to true for rule processor tests and end-to-end tests. Warnings and errors captrued during rule processor testing will still be written to stdout regardless of this setting. """ # Create the RuleProcessor. Passing a mocked context object with fake # values and False for suppressing sending of alerts to alert processor self.processor = StreamAlert(context) self.cli_config = config # Use a list of status_messages to store pass/fail/warning info self.status_messages = [] self.total_tests = 0 self.all_tests_passed = True self.print_output = print_output # Configure mocks for Firehose and DDB helpers.setup_mock_firehose_delivery_streams(config) helpers.setup_mock_dynamodb_ioc_table(config) # Create a cache map of parsers to parser classes self.parsers = {} # Patch the tmp shredding as to not slow down testing patch( 'stream_alert.rule_processor.payload.S3Payload._shred_temp_directory' ).start() # Patch random_bool to always return true patch('helpers.base.random_bool', return_value=True).start()
def test_rule(rule_name, test_record, formatted_record): """Feed formatted records into StreamAlert and check for alerts Args: rule_name: The rule name being tested test_record: A single record to test formatted_record: A properly formatted version of record for the service to be tested Returns: boolean indicating if this rule passed """ event = {'Records': [formatted_record]} trigger_count = test_record.get('trigger_count') if trigger_count: expected_alert_count = trigger_count else: expected_alert_count = (0, 1)[test_record['trigger']] alerts = StreamAlert(return_alerts=True).run(event, None) # we only want alerts for the specific rule passed in matched_alert_count = len( [x for x in alerts if x['rule_name'] == rule_name]) report_output([test_record['service'], test_record['description']], matched_alert_count != expected_alert_count) return matched_alert_count == expected_alert_count
def handler(event, context): """Main Lambda handler function""" try: StreamAlert(context).run(event) except Exception: LOGGER.error('Invocation event: %s', json.dumps(event)) raise
def __init__(self, context, print_output): """RuleProcessorTester initializer Args: print_output (bool): Whether this processor test should print results to stdout. This is set to false when the alert processor is explicitly being testing alone, and set to true for rule processor tests and end-to-end tests. Warnings and errors captrued during rule processor testing will still be written to stdout regardless of this setting. """ # Create the RuleProcessor. Passing a mocked context object with fake # values and False for suppressing sending of alerts to alert processor self.processor = StreamAlert(context, False) # Use a list of status_messages to store pass/fail/warning info self.status_messages = [] self.total_tests = 0 self.all_tests_passed = True self.print_output = print_output
def test_rule(self, rule_name, test_record, formatted_record): """Feed formatted records into StreamAlert and check for alerts Args: rule_name [str]: The rule name being tested test_record [dict]: A single record to test formatted_record [dict]: A dictionary that includes the 'data' from the test record, formatted into a structure that is resemblant of how an incoming record from a service would format it. See test/integration/templates for example of how each service formats records. Returns: [list] alerts that hit for this rule [integer] count of expected alerts for this rule [bool] boolean where False indicates errors occurred during processing """ event = {'Records': [formatted_record]} expected_alert_count = test_record.get('trigger_count') if not expected_alert_count: expected_alert_count = 1 if test_record['trigger'] else 0 # Run the rule processor. Passing mocked context object with fake # values and False for suppressing sending of alerts processor = StreamAlert(self.context, False) all_records_matched_schema = processor.run(event) if not all_records_matched_schema: payload = StreamPayload(raw_record=formatted_record) classifier = StreamClassifier(config=load_config()) classifier.map_source(payload) logs = classifier._log_metadata() self.analyze_record_delta(logs, rule_name, test_record) alerts = processor.get_alerts() # we only want alerts for the specific rule being tested alerts = [alert for alert in alerts if alert['rule_name'] == rule_name] return alerts, expected_alert_count, all_records_matched_schema
def test_rule(rule_name, test_record, formatted_record): """Feed formatted records into StreamAlert and check for alerts Args: rule_name: The rule name being tested test_record: A single record to test formatted_record: A properly formatted version of record for the service to be tested Returns: boolean indicating if this rule passed """ event = {'Records': [formatted_record]} trigger_count = test_record.get('trigger_count') if trigger_count: expected_alert_count = trigger_count else: expected_alert_count = (0, 1)[test_record['trigger']] # Start mocked sns BOTO_MOCKER_SNS.start() # Create the topic used for the mocking of alert sending boto3.client('sns', region_name='us-east-1').create_topic(Name='test_streamalerts') # Run the rule processor. Passing 'None' for context will load a mocked object later alerts = StreamAlert(None, True).run(event) # Stop mocked sns BOTO_MOCKER_SNS.stop() # we only want alerts for the specific rule passed in matched_alert_count = len([x for x in alerts if x['metadata']['rule_name'] == rule_name]) report_output([test_record['service'], test_record['description']], matched_alert_count != expected_alert_count) return matched_alert_count == expected_alert_count
def handler(event, context): """Main Lambda handler function""" StreamAlert(context).run(event)
def setup(self): """Setup before each method""" self.__sa_handler = StreamAlert(get_mock_context(), False)
def test_do_not_invoke_threat_intel(self, load_intelligence_mock): """StreamAlert Class - Invoke load_intelligence""" self.__sa_handler = StreamAlert(get_mock_context(), False) load_intelligence_mock.assert_called()
def test_run_config_error(): """StreamAlert Class - Run, Config Error""" mock = mock_open( read_data='non-json string that will raise an exception') with patch('__builtin__.open', mock): StreamAlert(get_mock_context())