def test_process_types_config(self): """Threat Intel - Test process_types_config method""" test_config = { 'types': { 'log_src1': { 'normalizedTypeFoo:ioc_foo': ['foo1', 'foo2'], 'normalizedTypeBar:ioc_bar': ['bar1', 'bar2'], 'normalizedTypePan': ['pan1'] }, 'log_src2': { 'normalizedTypePing:ioc_ping': ['ping1', 'ping2'], 'normalizedTypePong:ioc_pong': ['pong1', 'pong2'] } } } expected_result = { 'log_src1': { 'normalizedTypeBar': ['bar1', 'bar2'], 'normalizedTypeFoo': ['foo1', 'foo2'], 'normalizedTypePan': ['pan1'] }, 'log_src2': { 'normalizedTypePing': ['ping1', 'ping2'], 'normalizedTypePong': ['pong1', 'pong2'] } } StreamThreatIntel._process_types_config(test_config['types']) assert_equal(StreamThreatIntel.normalized_type_mapping(), expected_result)
def is_ioc(rec, lowercase_ioc=True): """Detect is any data in a record matching to known IOC Args: rec (dict): The parsed payload of any log lowercase_ioc (bool): Indicate if IOCs in IOC files are in lowercase or uppercase. If true, it will convert data found in the record to lowercase. This flag is implemented to achieve case-insensitive comparison between IOCs and related data in the record. Returns: (bool): Returns True if data matching to any IOCs, otherwise returns False. """ intel = StreamThreatIntel.get_intelligence() datatypes_ioc_mapping = StreamThreatIntel.get_config() if not (datatypes_ioc_mapping and rec.get(NORMALIZATION_KEY)): return False for datatype in rec[NORMALIZATION_KEY]: if datatype not in datatypes_ioc_mapping: continue results = fetch_values_by_datatype(rec, datatype) for result in results: if isinstance(result, str): result = result.lower() if lowercase_ioc else result.upper() if (intel.get(datatypes_ioc_mapping[datatype]) and result in intel[datatypes_ioc_mapping[datatype]]): insert_ioc_info(rec, datatypes_ioc_mapping[datatype], result) if StreamThreatIntel.IOC_KEY in rec: return True return False
def is_ioc(rec): """Detect is any data in a record matching to known IOC""" intel = StreamThreatIntel.get_intelligence() datatypes_ioc_mapping = StreamThreatIntel.get_config() if not (datatypes_ioc_mapping and rec.get('normalized_types')): return False for datatype in rec['normalized_types']: if datatype not in datatypes_ioc_mapping: continue results = fetch_values_by_datatype(rec, datatype) for result in results: if (intel.get(datatypes_ioc_mapping[datatype]) and result in intel[datatypes_ioc_mapping[datatype]]): if StreamThreatIntel.IOC_KEY in rec: rec[StreamThreatIntel.IOC_KEY].append({ 'type': datatypes_ioc_mapping[datatype], 'value': result }) else: rec.update({ StreamThreatIntel.IOC_KEY: { 'type': datatypes_ioc_mapping[datatype], 'value': result } }) if StreamThreatIntel.IOC_KEY in rec: return True return False
def test_from_config(self): """Threat Intel - Test load_config method""" test_config = { 'global': { 'account': { 'region': 'us-east-1' }, 'threat_intel': { 'dynamodb_table': 'test_table_name', 'enabled': True } } } threat_intel = StreamThreatIntel.load_from_config(test_config) assert_true(isinstance(threat_intel, StreamThreatIntel)) test_config = { 'global': { 'account': { 'region': 'us-east-1' }, 'threat_intel': { 'dynamodb_table': 'test_table_name', 'enabled': False } } } threat_intel = StreamThreatIntel.load_from_config(test_config) assert_false(threat_intel) test_config = { 'types': { 'log_src1': { 'normalizedTypeFoo:ioc_foo': ['foo1', 'foo2'], 'normalizedTypeBar:ioc_bar': ['bar1', 'bar2'] }, 'log_src2': { 'normalizedTypePing:ioc_ping': ['ping1', 'ping2'], 'normalizedTypePong:ioc_pong': ['pong1', 'pong2'] } } } StreamThreatIntel.load_from_config(test_config) expected_result = { 'log_src1': { 'normalizedTypeBar': ['bar1', 'bar2'], 'normalizedTypeFoo': ['foo1', 'foo2'] }, 'log_src2': { 'normalizedTypePing': ['ping1', 'ping2'], 'normalizedTypePong': ['pong1', 'pong2'] } } assert_equal(StreamThreatIntel.normalized_type_mapping(), expected_result)
def setup(): """Setup before each method""" test_config = { 'threat_intel': { 'enabled': True, 'mapping': { 'sourceAddress': 'ip', 'destinationDomain': 'domain', 'fileHash': 'md5' } } } StreamThreatIntel.load_intelligence(test_config, 'tests/unit/fixtures')
def test_no_config_loaded(self): """Threat Intel - No datatypes_ioc_mapping config loaded if it is disabled""" test_config = { 'threat_intel': { 'enabled': False, 'mapping': { 'sourceAddress': 'ip', 'destinationDomain': 'domain', 'fileHash': 'md5' } } } StreamThreatIntel.load_intelligence(test_config, 'tests/unit/fixtures') datatypes_ioc_mapping = StreamThreatIntel.get_config() assert_equal(len(datatypes_ioc_mapping), 0)
def test_do_not_load_intelligence(self): """Threat Intel - Do not load intelligence to memory when it is disabled""" test_config = { 'threat_intel': { 'enabled': False, 'mapping': { 'sourceAddress': 'ip', 'destinationDomain': 'domain', 'fileHash': 'md5' } } } StreamThreatIntel.load_intelligence(test_config, 'tests/unit/fixtures') intelligence = StreamThreatIntel._StreamThreatIntel__intelligence # pylint: disable=no-member assert_equal(len(intelligence), 0)
def test_run_threat_intel_enabled(self, mock_threat_intel, mock_query): # pylint: disable=no-self-use """StreamAlert Class - Run SA when threat intel enabled""" @rule(datatypes=['sourceAddress'], outputs=['s3:sample_bucket']) def match_ipaddress(_): # pylint: disable=unused-variable """Testing dummy rule""" return True mock_threat_intel.return_value = StreamThreatIntel('test_table_name', 'us-east-1') mock_query.return_value = ([], []) sa_handler = StreamAlert(get_mock_context(), False) event = { 'account': 123456, 'region': '123456123456', 'source': '1.1.1.2', 'detail': { 'eventName': 'ConsoleLogin', 'sourceIPAddress': '1.1.1.2', 'recipientAccountId': '654321' } } events = [] for i in range(10): event['source'] = '1.1.1.{}'.format(i) events.append(event) kinesis_events = { 'Records': [make_kinesis_raw_record('test_kinesis_stream', json.dumps(event)) for event in events] } passed = sa_handler.run(kinesis_events) assert_true(passed) assert_equal(mock_query.call_count, 1)
def test_threat_detection(self, mock_client): """Threat Intel - Test threat_detection method""" mock_client.return_value = MockDynamoDBClient() records = mock_normalized_records() threat_intel = StreamThreatIntel.load_from_config(self.config) assert_equal(len(threat_intel.threat_detection(records)), 2)
def test_segment(self): """Threat Intel - Test _segment method to segment a list to sub-list""" # it should only return 1 sub-list when length of list less than MAX_QUERY_CNT (100) test_list = [item for item in range(55)] result = StreamThreatIntel._segment(test_list) assert_equal(len(result), 1) assert_equal(len(result[0]), 55) # it should return multiple sub-list when len of list more than MAX_QUERY_CNT (100) test_list = [item for item in range(345)] result = StreamThreatIntel._segment(test_list) assert_equal(len(result), 4) assert_equal(len(result[0]), 100) assert_equal(len(result[1]), 100) assert_equal(len(result[2]), 100) assert_equal(len(result[3]), 45)
def test_process_ioc_with_clienterror(self, mock_client): """Threat Intel - Test private method process_ioc""" mock_client.return_value = MockDynamoDBClient(exception=True) threat_intel = StreamThreatIntel.load_from_config(self.config) ioc_collections = [StreamIoc(value='1.1.1.2', ioc_type='ip')] threat_intel._process_ioc(ioc_collections)
def test_deserialize(self): """Threat Intel - Test method to convert dynamodb types to python types""" test_dynamodb_data = [{ 'ioc_value': { 'S': '1.1.1.2' }, 'sub_type': { 'S': 'mal_ip' } }, { 'test_number': { 'N': 10 }, 'test_type': { 'S': 'test_type' } }] result = StreamThreatIntel._deserialize(test_dynamodb_data) expect_result = [{ 'ioc_value': '1.1.1.2', 'sub_type': 'mal_ip' }, { 'test_number': 10, 'test_type': 'test_type' }] assert_equal(result, expect_result)
def test_short_circuit_without_exclude_list(self, is_excluded_ioc): """Threat Intel - ensure we skip threat intel exclusion if there are no excluded_iocs""" self.config = load_config('tests/unit/conf') self.config['global']['threat_intel']['enabled'] = True del self.config['global']['threat_intel']['excluded_iocs'] self.threat_intel = StreamThreatIntel.load_from_config(self.config) records = [{ 'account': 12345, 'region': '123456123456', 'detail': { 'eventType': 'AwsConsoleSignIn', 'eventName': 'ConsoleLogin', 'userIdentity': { 'userName': '******', 'type': 'Root', 'principalId': '12345', }, 'sourceIPAddress': '8.8.8.8', 'recipientAccountId': '12345' }, 'source': '8.8.8.8', 'streamalert:normalization': { 'sourceAddress': [['detail', 'sourceIPAddress'], ['source']], 'usernNme': [['detail', 'userIdentity', 'userName']] }, 'id': '12345' }] records = mock_normalized_records(records) for record in records: result = self.threat_intel._extract_ioc_from_record(record) assert_equal(len(result), 1) assert_equal(result[0].value, '8.8.8.8') assert not is_excluded_ioc.called
def test_load_from_config_with_cluster_env(self): """Threat Intel - Test load_from_config to read cluster""" config = load_config('tests/unit/conf') config['global']['threat_intel']['enabled'] = True threat_intel = StreamThreatIntel.load_from_config(config) assert_is_instance(threat_intel, StreamThreatIntel) assert_true('advanced' in config['clusters'].keys())
def test_query_with_duplicated_value(self, mock_client): """Threat Intel - Test query value includes dumplicated value""" mock_client.return_value = MockDynamoDBClient() threat_intel = StreamThreatIntel.load_from_config(self.config) test_values = ['1.1.1.2', 'EVIL.com', 'evil.com', 'abcdef0123456789'] threat_intel._query(test_values)
def test_load_from_config_with_cluster_env(self): """Threat Intel - Test load_from_config to read cluster env variable""" with patch.dict('os.environ', {'CLUSTER': 'advanced'}): config = load_config('tests/unit/conf') config['global']['threat_intel']['enabled'] = True threat_intel = StreamThreatIntel.load_from_config(config) assert_is_instance(threat_intel, StreamThreatIntel) assert_equal(config['clusters'].keys(), ['advanced'])
def test_query_with_empty_value(self, mock_client): """Threat Intel - Test query value includes empty value""" mock_client.return_value = MockDynamoDBClient() threat_intel = StreamThreatIntel.load_from_config(self.config) test_values = ['1.1.1.2', '', 'evil.com', 'abcdef0123456789'] result, _ = threat_intel._query(test_values) assert_equal(len(result), 3)
def test_load_from_config_with_cluster_env_2(self): """Threat Intel - Test load_from_config with threat intel disabled in cluster""" with patch.dict('os.environ', {'CLUSTER': 'test'}): config = load_config('tests/unit/conf') config['global']['threat_intel']['enabled'] = True threat_intel = StreamThreatIntel.load_from_config(config) assert_false(isinstance(threat_intel, StreamThreatIntel)) assert_equal(config['clusters'].keys(), ['test'])
def test_read_compressed_files(self): """Theat Intel - Read compressed csv.gz files into a dictionary""" intelligence = StreamThreatIntel.read_compressed_files('tests/unit/fixtures') assert_is_instance(intelligence, dict) assert_list_equal(sorted(intelligence.keys()), sorted(['domain', 'md5', 'ip'])) assert_equal(len(intelligence['domain']), 10) assert_equal(len(intelligence['md5']), 10) assert_equal(len(intelligence['ip']), 10)
def _parse(self, payload): """Parse a record into a declared type. Args: payload: A StreamAlert payload object Sets: payload.log_source: The detected log name from the data_sources config. payload.type: The record's type. payload.records: The parsed records as a list. Returns: bool: the success of the parse. """ schema_matches = self._process_log_schemas(payload) if not schema_matches: return False if LOGGER_DEBUG_ENABLED: LOGGER.debug( 'Schema Matched Records:\n%s', json.dumps([ schema_match.parsed_data for schema_match in schema_matches ], indent=2)) schema_match = self._check_schema_match(schema_matches) if LOGGER_DEBUG_ENABLED: LOGGER.debug('Log name: %s', schema_match.log_name) LOGGER.debug('Parsed data:\n%s', json.dumps(schema_match.parsed_data, indent=2)) for parsed_data_value in schema_match.parsed_data: # Convert data types per the schema # Use the root schema for the parser due to updates caused by # configuration settings such as envelope_keys and optional_keys try: if not self._convert_type(parsed_data_value, schema_match.root_schema): return False except KeyError: LOGGER.error('The payload is mis-classified. Payload [%s]', parsed_data_value) return False normalized_types = StreamThreatIntel.normalized_type_mapping() payload.log_source = schema_match.log_name payload.type = schema_match.parser.type() payload.records = schema_match.parsed_data payload.normalized_types = normalized_types.get( payload.log_source.split(':')[0]) return True
def test_load_intelligence(self): """Threat Intel - Load intelligence to memory""" test_config = { 'threat_intel': { 'enabled': True, 'mapping': { 'sourceAddress': 'ip', 'destinationDomain': 'domain', 'fileHash': 'md5' } } } StreamThreatIntel.load_intelligence(test_config, 'tests/unit/fixtures') intelligence = StreamThreatIntel._StreamThreatIntel__intelligence # pylint: disable=no-member expected_keys = ['domain', 'md5', 'ip'] assert_items_equal(intelligence.keys(), expected_keys) assert_equal(len(intelligence['domain']), 10) assert_equal(len(intelligence['md5']), 10) assert_equal(len(intelligence['ip']), 10)
def test_get_intelligence(self): """Threat Intel - get intelligence dictionary""" test_config = { 'threat_intel': { 'enabled': True, 'mapping': { 'sourceAddress': 'ip', 'destinationDomain': 'domain', 'fileHash': 'md5' } } } StreamThreatIntel.load_intelligence(test_config, 'tests/unit/fixtures') intelligence = StreamThreatIntel.get_intelligence() expected_keys = ['domain', 'md5', 'ip'] assert_items_equal(intelligence.keys(), expected_keys) assert_equal(len(intelligence['domain']), 10) assert_equal(len(intelligence['md5']), 10) assert_equal(len(intelligence['ip']), 10)
def __init__(self, config, *rule_paths): """Initialize a RulesEngine instance to cache a StreamThreatIntel instance.""" self._threat_intel = StreamThreatIntel.load_from_config(config) self._required_outputs_set = resources.get_required_outputs() import_folders(*rule_paths) self._load_rule_table(config) lookup_tables = LookupTables.load_lookup_tables(config) if lookup_tables: RulesEngine._LOOKUP_TABLES = lookup_tables.download_s3_objects()
def test_get_config(self): """Threat Intel - get intelligence dictionary""" test_config = { 'threat_intel': { 'enabled': True, 'mapping': { 'sourceAddress': 'ip', 'destinationDomain': 'domain', 'fileHash': 'md5' } } } StreamThreatIntel.load_intelligence(test_config, 'tests/unit/fixtures') datatypes_ioc_mapping = StreamThreatIntel.get_config() expected_keys = ['sourceAddress', 'destinationDomain', 'fileHash'] assert_items_equal(datatypes_ioc_mapping.keys(), expected_keys) assert_equal(datatypes_ioc_mapping['sourceAddress'], 'ip') assert_equal(datatypes_ioc_mapping['destinationDomain'], 'domain') assert_equal(datatypes_ioc_mapping['fileHash'], 'md5')
def test_query(self, mock_client): """Threat Intel - Test DynamoDB query method with batch_get_item""" mock_client.return_value = MockDynamoDBClient() threat_intel = StreamThreatIntel.load_from_config(self.config) test_values = ['1.1.1.2', '2.2.2.2', 'evil.com', 'abcdef0123456789'] result, unprocessed_keys = threat_intel._query(test_values) assert_equal(len(result), 2) assert_false(unprocessed_keys) assert_equal(result[0], {'ioc_value': '1.1.1.2', 'sub_type': 'mal_ip'}) assert_equal(result[1], {'ioc_value': 'evil.com', 'sub_type': 'c2_domain'})
def test_process_ioc_with_clienterror(self, log_mock, mock_client): """Threat Intel - Test private method process_ioc with Error""" mock_client.return_value = MockDynamoDBClient(exception=True) threat_intel = StreamThreatIntel.load_from_config(self.config) ioc_collections = [StreamIoc(value='1.1.1.2', ioc_type='ip')] threat_intel._process_ioc(ioc_collections) log_mock.assert_called_with( 'An error occurred while quering dynamodb table. Error is: %s', {'Error': { 'Code': 400, 'Message': 'raising test exception' }})
def test_process_ioc(self, mock_client): """Threat Intel - Test private method process_ioc""" mock_client.return_value = MockDynamoDBClient() threat_intel = StreamThreatIntel.load_from_config(self.config) ioc_collections = [ StreamIoc(value='1.1.1.2', ioc_type='ip'), StreamIoc(value='2.2.2.2', ioc_type='ip'), StreamIoc(value='evil.com', ioc_type='domain') ] threat_intel._process_ioc(ioc_collections) assert_true(ioc_collections[0].is_ioc) assert_false(ioc_collections[1].is_ioc) assert_true(ioc_collections[2].is_ioc)
def test_process_ioc_with_unprocessed_keys(self, mock_client): """Threat Intel - Test private method process_ioc when response has UnprocessedKeys""" mock_client.return_value = MockDynamoDBClient(unprocesed_keys=True) threat_intel = StreamThreatIntel.load_from_config(self.config) ioc_collections = [ StreamIoc(value='1.1.1.2', ioc_type='ip'), StreamIoc(value='foo', ioc_type='domain'), StreamIoc(value='bar', ioc_type='domain') ] threat_intel._process_ioc(ioc_collections) assert_true(ioc_collections[0].is_ioc) assert_false(ioc_collections[1].is_ioc) assert_false(ioc_collections[2].is_ioc)
def __init__(self, context, enable_alert_processor=True): """Initializer Args: context (dict): An AWS context object which provides metadata on the currently executing lambda function. enable_alert_processor (bool): If the user wants to send the alerts using their own methods, 'enable_alert_processor' can be set to False to suppress sending with the StreamAlert alert processor. """ # Load the config. Validation occurs during load, which will # raise exceptions on any ConfigErrors StreamAlert.config = StreamAlert.config or load_config() # Load the environment from the context arn self.env = load_env(context) # Instantiate the sink here to handle sending the triggered alerts to the # alert processor self.sinker = StreamSink(self.env) # Instantiate a classifier that is used for this run self.classifier = StreamClassifier(config=self.config) self.enable_alert_processor = enable_alert_processor self._failed_record_count = 0 self._processed_size = 0 self._alerts = [] # Create a dictionary to hold parsed payloads by log type. # Firehose needs this information to send to its corresponding # delivery stream. self.categorized_payloads = defaultdict(list) # Firehose client initialization self.firehose_client = None StreamThreatIntel.load_intelligence(self.config)
def test_threat_detection_with_empty_ioc_value(self, mock_client): """Threat Intel - Test threat_detection with record contains empty/duplicated value""" records = [ { 'account': 12345, 'region': '123456123456', 'detail': { 'eventName': 'ConsoleLogin', 'userIdentity': { 'userName': '******', 'accountId': '12345' }, 'sourceIPAddress': None, 'recipientAccountId': '12345' }, 'source': '1.1.1.2', 'streamalert:normalization': { 'sourceAddress': [['detail', 'sourceIPAddress'], ['source']], 'usernNme': [['detail', 'userIdentity', 'userName']] } }, { 'domain': 'evil.com', 'pc_name': 'test-pc', 'date': 'Dec 1st, 2016', 'data': 'ABCDEF', 'streamalert:normalization': { 'destinationDomain': [['domain']] } }, { 'domain': 'EVIL.com', 'pc_name': 'test-pc', 'date': 'Dec 1st, 2016', 'data': 'ABCDEF', 'streamalert:normalization': { 'destinationDomain': [['domain']] } }, ] mock_client.return_value = MockDynamoDBClient() threat_intel = StreamThreatIntel.load_from_config(self.config) records = mock_normalized_records(records) assert_equal(len(threat_intel.threat_detection(records)), 3)