def _test_cases(cls): yield param(data=None, expected=TypeError) yield param(data=['section1.option1=value1', 'section1.option2=value2', 'section2.option1=value3'], expected={'section1': {'option1': 'value1', 'option2': 'value2'}, 'section2': {'option1': 'value3'}}) yield param(data=[], expected={})
class Test_monkeypached_Formatter_formats_utc_time(unittest.TestCase): # note: Formatter is monkey-patched when n6lib is being imported # [see: the n6lib.log_helpers.early_Formatter_class_monkeypatching() # function and its call in n6lib/__init__.py] # TODO: cover also possibilities of: # * customized `Formatter.default_time_format` # * customized `Formatter.default_msec_format` @foreach( param(expected='2015-05-14 20:36:58,123 UTC', ).label('default'), param( expected='15-05-14T20:36:58 UTC', custom_datefmt='%y-%m-%dT%H:%M:%S', ).label('with custom datefmt'), param( expected='2015-05-14 20:36:58,123 <UNCERTAIN TIMEZONE>', custom_converter=(lambda t: time.gmtime(t)), ).label('with custom converter'), param( expected='15-05-14T20:36:58 <UNCERTAIN TIMEZONE>', custom_datefmt='%y-%m-%dT%H:%M:%S', custom_converter=(lambda t: time.gmtime(t)), ).label('with custom datefmt and custom converter'), ) def test(self, expected, custom_datefmt=None, custom_converter=None): record = MagicMock() record.created = 1431635818 record.msecs = 123 formatter = logging.Formatter(datefmt=custom_datefmt) if custom_converter is not None: formatter.converter = custom_converter formatted = formatter.formatTime(record, custom_datefmt) self.assertEqual(expected, formatted)
def initial_state_and_orig_data_variants(): yield param( initial_state={ 'newest_row_time': '2019-08-20 01:00:00', 'newest_rows': { '"2019-08-20 01:00:00","http://www.example2.com","XX2",' '"222b2222b222bb2b22b22222bb222222",' '"2b2b22bb22b2b22bb22b2222222bbbbbbbbbbbbbb222222b2b2b2b2b2b2b2b22",' '"ExampleNick2"', '"2019-08-20 01:00:00","http://www.example1.com","XX1",' '"111a1111a111aa1a11a11111aa111111",' '"1a1a11aa11a1a11aa11a1111111aaaaaaaaaaaaaa111111a1a1a1a1a1a1a1a11",' '"ExampleNick1"' }, }, orig_data= ('# row which should be ignored by collector\n' '"2019-08-20 03:00:00","http://www.example5.com","XX5",' '"555e5555e555ee5e55e55555ee555555",' '"5e5e55ee55e5e55ee55e5555555eeeeeeeeeeeeee555555e5e5e5e5e5e5e5e55",' '"ExampleNick5"\n' '"2019-08-20 03:00:00","http://www.example4.com","XX4",' '"444d4444d444dd4d44d44444dd444444",' '"4d4d44dd44d4d44dd44d4444444dddddddddddddd444444d4d4d4d4d4d4d4d44",' '"ExampleNick4"\n' '"2019-08-20 02:00:00","http://www.example3.com","XX3",' '"333c3333c333cc3c33c33333cc333333",' '"3c3c33cc33c3c33cc33c3333333cccccccccccccc333333c3c3c3c3c3c3c3c33",' '"ExampleNick3"\n' '"2019-08-20 01:00:00","http://www.example2.com","XX2",' '"222b2222b222bb2b22b22222bb222222",' '"2b2b22bb22b2b22bb22b2222222bbbbbbbbbbbbbb222222b2b2b2b2b2b2b2b22",' '"ExampleNick2"\n' '"2019-08-20 01:00:00","http://www.example1.com","XX1",' '"111a1111a111aa1a11a11111aa111111",' '"1a1a11aa11a1a11aa11a1111111aaaaaaaaaaaaaa111111a1a1a1a1a1a1a1a11",' '"ExampleNick1"\n' '"2019-08-20 00:00:00","http://www.example0.com","XX0",' '"000a0000a000aa0a00a00000aa000000",' '"0a0a00aa00a0a00aa00a0000000aaaaaaaaaaaaaa000000a0a0a0a0a0a0a0a00",' '"ExampleNick0"'), ) yield param( initial_state=sentinel.NO_STATE, orig_data= ('# row which should be ignored by collector\n' '"2019-08-20 03:00:00","http://www.example5.com","XX5",' '"555e5555e555ee5e55e55555ee555555",' '"5e5e55ee55e5e55ee55e5555555eeeeeeeeeeeeee555555e5e5e5e5e5e5e5e55",' '"ExampleNick5"\n' '"2019-08-20 03:00:00","http://www.example4.com","XX4",' '"444d4444d444dd4d44d44444dd444444",' '"4d4d44dd44d4d44dd44d4444444dddddddddddddd444444d4d4d4d4d4d4d4d44",' '"ExampleNick4"\n' '"2019-08-20 02:00:00","http://www.example3.com","XX3",' '"333c3333c333cc3c33c33333cc333333",' '"3c3c33cc33c3c33cc33c3333333cccccccccccccc333333c3c3c3c3c3c3c3c33",' '"ExampleNick3"\n'), )
def initial_state_and_orig_data_variants(): yield param( initial_state={ 'newest_row_time': '2019-08-20 01:00:00', 'newest_rows': { '2019-08-20 01:00:00,1.1.1.1,447,2019-08-20,ExampleName1', '2019-08-20 01:00:00,2.2.2.2,447,2019-08-20,ExampleName2' }, }, orig_data=( '# row which should be ignored by collector\n' '2019-08-20 03:00:00,5.5.5.5,447,2019-08-20,ExampleName5\n' '2019-08-20 03:00:00,4.4.4.4,447,2019-08-20,ExampleName4\n' '2019-08-20 02:00:00,3.3.3.3,447,2019-08-20,ExampleName3\n' '2019-08-20 01:00:00,2.2.2.2,447,2019-08-20,ExampleName2\n' '2019-08-20 01:00:00,1.1.1.1,447,2019-08-20,ExampleName1\n' '2019-08-20 00:00:00,0.0.0.0,447,2019-08-20,ExampleName0'), ) yield param( initial_state=sentinel.NO_STATE, orig_data=( '# row which should be ignored by collector\n' '2019-08-20 03:00:00,5.5.5.5,447,2019-08-20,ExampleName5\n' '2019-08-20 03:00:00,4.4.4.4,447,2019-08-20,ExampleName4\n' '2019-08-20 02:00:00,3.3.3.3,447,2019-08-20,ExampleName3\n'), )
def _asn_cc_params(): yield param({'asn': [3213, 1234, 3333]}).label('asn') yield param({'cc': ['DE', 'UK']}).label('cc') yield param({ 'asn': [3213, 1234, 3333], 'cc': ['PL', 'EN'] }).label('asn_cc')
def _get_possible_vals(): with standard_config_patch: random_event_config = RandomEvent().config config_fqdn_vals = 'possible_domains' config_url_vals = 'possible_url' possible_fqdns = random_event_config.get(config_fqdn_vals) possible_urls = random_event_config.get(config_url_vals) for fqdn in possible_fqdns: yield param(val=fqdn, possible_vals=possible_fqdns).label('fqdn') for url in possible_urls: yield param(val=url, possible_vals=possible_urls).label('url')
class Test_EventsQueryProcessor___get_key_to_query_func(unittest.TestCase): @foreach( param(data_spec_class=N6DataSpec), param(data_spec_class=N6InsideDataSpec), ) def test(self, data_spec_class): cls = _EventsQueryProcessor data_spec = data_spec_class() _get_key_to_query_func = cls._get_key_to_query_func.func # getting it without memoization with patch.object(cls, 'queried_model_class') as qmc_mock: key_to_query_func = _get_key_to_query_func(cls, data_spec) key_query = qmc_mock.key_query self.assertEqual( key_to_query_func, { 'active.max': qmc_mock.active_bl_query, 'active.min': qmc_mock.active_bl_query, 'active.until': qmc_mock.active_bl_query, 'asn': key_query, 'category': key_query, 'cc': key_query, 'confidence': key_query, 'dip': key_query, 'dport': key_query, 'fqdn': key_query, 'fqdn.sub': qmc_mock.like_query, 'id': key_query, 'ip': key_query, 'ip.net': qmc_mock.ip_net_query, 'md5': key_query, 'modified.max': qmc_mock.modified_query, 'modified.min': qmc_mock.modified_query, 'modified.until': qmc_mock.modified_query, 'name': key_query, 'origin': key_query, 'proto': key_query, 'replaces': key_query, 'restriction': key_query, 'rid': key_query, 'sha1': key_query, 'sha256': key_query, 'source': key_query, 'sport': key_query, 'status': key_query, 'target': key_query, 'url': key_query, 'url.sub': qmc_mock.like_query, 'url.b64': qmc_mock.url_b64_experimental_query, })
def ok_cases(cls): for str_values in (True, False): for whitespace_surrounded_values in (True, False): for unpacked_single_values in (True, False): for request_params, expected_db_obj in cls.basic_cases(): if whitespace_surrounded_values: request_params = { key: [u' \t {} \n '.format(v) for v in val] for key, val in request_params.iteritems()} if str_values: request_params = { key: [v.encode('utf-8') for v in val] for key, val in request_params.iteritems()} if unpacked_single_values: request_params = { key: (val[0] if len(val) == 1 else val) for key, val in request_params.iteritems()} yield param( request_params=request_params, expected_db_obj=expected_db_obj, ).label('ok:{}{}{}/{}'.format( 's' if str_values else '-', 'u' if unpacked_single_values else '-', 'w' if whitespace_surrounded_values else '-', len(request_params)))
def get_password_hash_or_none_cases(cls): yield param( passphrase=('password'), expect_verifies_ok=(True), ) yield param( passphrase=('wXzxsa23}pX'), expect_verifies_ok=(True), ) yield param( passphrase=(None), expect_verifies_ok=(None), ) yield param( passphrase=(''), expect_verifies_ok=(None), )
def initial_state_and_orig_data_variants(): yield param( initial_state={ 'newest_row_time': '2019-08-20 01:00:00', 'newest_rows': { '2019-08-20 01:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName1', '2019-08-20 01:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName2' }, }, orig_data= ('# row which should be ignored by collector\n' '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName5\n' '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName4\n' '2019-08-20 02:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName3\n' '2019-08-20 01:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName2\n' '2019-08-20 01:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName1\n' '2019-08-20 00:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName0' ), ) yield param( initial_state=sentinel.NO_STATE, orig_data= ('# row which should be ignored by collector\n' '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName5\n' '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName4\n' '2019-08-20 02:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName3\n' ), ) yield param( initial_state={ # legacy form of state 'time': '2019-08-20 02:00:00', }, orig_data= ('# row which should be ignored by collector\n' '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName5\n' '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName4\n' '2019-08-20 02:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName3\n' '2019-08-20 01:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName2\n' '2019-08-20 01:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName1\n' '2019-08-20 00:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName0' ), )
class TestBaseUrlDownloaderCollector___try_to_set_http_last_modified( unittest.TestCase): def setUp(self): self.instance = object.__new__(BaseUrlDownloaderCollector) self.instance._http_last_modified = None # as in BaseUrlDownloaderCollector.__init__() # related to the fixed bug #6673 @foreach( param( headers={ 'Last-Modified': 'Sun, 06 Nov 2019 08:49:37 GMT' }, expected__http_last_modified=datetime.datetime( 2019, 11, 6, 8, 49, 37), ).label('preferred RFC-7231 format of header Last-Modified'), param( headers={ 'Last-Modified': 'Sunday, 06-Nov-19 08:49:37 GMT' }, expected__http_last_modified=datetime.datetime( 2019, 11, 6, 8, 49, 37), ).label('old RFC-850 format of header Last-Modified'), param( headers={ 'Last-Modified': 'Sun Nov 6 08:49:37 2019' }, expected__http_last_modified=datetime.datetime( 2019, 11, 6, 8, 49, 37), ).label('old ANSI C asctime() format of header Last-Modified'), param( headers={ 'Last-Modified': 'foo bar' }, expected__http_last_modified=None, ).label('unsupported format of header Last-Modified'), param( headers={}, expected__http_last_modified=None, ).label('no header Last-Modified'), ) def test(self, headers, expected__http_last_modified): self.instance._try_to_set_http_last_modified(headers) self.assertEqual(self.instance._http_last_modified, expected__http_last_modified)
def cases(): yield param( config_content=''' [abusech_urlhaus_payloads_urls] source=abuse-ch cache_dir=~/.n6cache url=https://www.example.com download_retries=5 ''', expected_publish_output_calls=[ call( # routing_key 'abuse-ch.urlhaus-payloads-urls', # body ('"2019-08-20 03:00:00","http://www.example5.com","XX5",' '"555e5555e555ee5e55e55555ee555555",' '"5e5e55ee55e5e55ee55e5555555eeeeeeeeeeeeee555555e5e5e5e5e5e5e5e55",' '"ExampleNick5"\n' '"2019-08-20 03:00:00","http://www.example4.com","XX4",' '"444d4444d444dd4d44d44444dd444444",' '"4d4d44dd44d4d44dd44d4444444dddddddddddddd444444d4d4d4d4d4d4d4d44",' '"ExampleNick4"\n' '"2019-08-20 02:00:00","http://www.example3.com","XX3",' '"333c3333c333cc3c33c33333cc333333",' '"3c3c33cc33c3c33cc33c3333333cccccccccccccc333333c3c3c3c3c3c3c3c33",' '"ExampleNick3"'), # prop_kwargs { 'timestamp': ANY, 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', 'headers': {} }, ), ], expected_saved_state={ 'newest_row_time': '2019-08-20 03:00:00', 'newest_rows': { '"2019-08-20 03:00:00","http://www.example5.com","XX5",' '"555e5555e555ee5e55e55555ee555555",' '"5e5e55ee55e5e55ee55e5555555eeeeeeeeeeeeee555555e5e5e5e5e5e5e5e55",' '"ExampleNick5"', '"2019-08-20 03:00:00","http://www.example4.com","XX4",' '"444d4444d444dd4d44d44444dd444444",' '"4d4d44dd44d4d44dd44d4444444dddddddddddddd444444d4d4d4d4d4d4d4d44",' '"ExampleNick4"' }, }, )
class TestBadIpsCollector(unittest.TestCase): @foreach([ param(ips_string='12.12.12.12\n', category_root='sql', category_leaf='sql-injection', result='12.12.12.12;sql-injection sql attack'), param( ips_string='123.123.123.123\n321.321.321.321\n99.88.77.66\n', category_root='ssh', category_leaf='test', result= '123.123.123.123;test ssh attack\n321.321.321.321;test ssh attack\n' + '99.88.77.66;test ssh attack'), ]) def test_badips_collector_ip_list_formatting(self, ips_string, category_root, category_leaf, result): self.assertEqual( result, BadipsServerExploitCollector._add_fields_name( ips=ips_string, category_root=category_root, category_leaf=category_leaf))
def cases(): yield param( config_content=''' [abusech_ransomware] source=abuse-ch cache_dir=~/.n6cache url=https://www.example.com download_retries=5 ''', expected_publish_output_calls=[ call( # routing_key 'abuse-ch.ransomware', # body ('"2018-08-09 03:00:00", "ZZ5", "XX5", "4.4.4.4", ' '"http://www.example_5.com", "offline", "", ' '"4.4.4.4", "55555", "YY5"\n' '"2018-08-09 03:00:00", "ZZ4", "XX4", "3.3.3.3", ' '"http://www.example_4.com", "offline", "", ' '"3.3.3.3", "44444", "YY4"\n' '"2018-08-09 02:00:00", "ZZ3", "XX3", "2.2.2.2", ' '"http://www.example_3.com", "offline", "", ' '"2.2.2.2", "33333", "YY3"'), # prop_kwargs { 'timestamp': ANY, 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', 'headers': {} }, ), ], expected_saved_state={ 'newest_row_time': '2018-08-09 03:00:00', 'newest_rows': { '"2018-08-09 03:00:00", "ZZ4", "XX4", "3.3.3.3", ' '"http://www.example_4.com", "offline", "", ' '"3.3.3.3", "44444", "YY4"', '"2018-08-09 03:00:00", "ZZ5", "XX5", "4.4.4.4", ' '"http://www.example_5.com", "offline", "", ' '"4.4.4.4", "55555", "YY5"' }, }, )
def _publishing_test_cases(): newer_event = [json.loads(event_14_02)] all_events = [json.loads(event_10_02), json.loads(event_14_02)] yield param(state=None, expected_events=all_events, expected_samples=all_samples).label('All events and samples, no previous state.') yield param(state={'events_publishing_datetime': _str_to_datetime('2017-02-09 12:00:00'), 'samples_publishing_datetime': _str_to_datetime('2017-02-09 12:00:00'), 'last_published_samples': []}, expected_events=all_events, expected_samples=all_samples).label('All events and samples with a state.') yield param(state={'events_publishing_datetime': _str_to_datetime('2017-02-12 12:00:00'), 'samples_publishing_datetime': _str_to_datetime('2017-02-09 12:00:00'), 'last_published_samples': []}, expected_events=newer_event, expected_samples=all_samples).label('Events after 2017-02-12 and overdue samples.') yield param(state={'events_publishing_datetime': _str_to_datetime('2017-02-15 12:00:00'), 'samples_publishing_datetime': _str_to_datetime('2017-02-09 12:00:00'), 'last_published_samples': []}, expected_events=None, expected_samples=all_samples).label('No new events, overdue samples after ' '2017-02-09.') yield param(state={'events_publishing_datetime': _str_to_datetime('2017-02-15 12:00:00'), 'samples_publishing_datetime': _str_to_datetime('2017-02-12 12:00:00'), 'last_published_samples': []}, expected_events=None, expected_samples=samples_14_02).label('No new events, overdue samples after ' '2017-02-12.') yield param(state={'events_publishing_datetime': _str_to_datetime('2017-02-15 12:00:00'), 'samples_publishing_datetime': _str_to_datetime('2017-02-11 12:00:00'), 'last_published_samples': [144646, 144653]}, expected_events=None, expected_samples={x: samples_14_02[x] for x in samples_14_02_ids - {144646, 144653}}).label( 'No new events, overdue samples after 2017-02-11, partially downloaded.') yield param(state={'events_publishing_datetime': _str_to_datetime('2017-02-11 12:00:00'), 'samples_publishing_datetime': _str_to_datetime('2017-02-09 12:00:00'), 'last_published_samples': [144656, 144659]}, expected_events=newer_event, expected_samples={x: all_samples[x] for x in all_samples_ids - {144656, 144659}}).label( 'Events after 2017-02-11, overdue samples after 2017-02-09, partially downloaded.')
def cases(): yield param( config_content=''' [abusech_ssl_blacklist] source=abuse-ch cache_dir=~/.n6cache url=https://www.example.com download_retries=5 ''', expected_publish_output_calls=[ call( # routing_key 'abuse-ch.ssl-blacklist.201902', # body ('2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName5\n' '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName4\n' '2019-08-20 02:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName3' ), # prop_kwargs { 'timestamp': ANY, 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', 'headers': {} }, ), ], expected_saved_state={ 'newest_row_time': '2019-08-20 03:00:00', 'newest_rows': { '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName5', '2019-08-20 03:00:00,f0a0k0e0d0s0h0a010000000000a0a0a00000000,ExampleName4' }, }, )
def cases(): yield param( config_content=''' [abusech_feodotracker] source=abuse-ch cache_dir=~/.n6cache url=https://www.example.com download_retries=5 ''', expected_publish_output_calls=[ call( # routing_key 'abuse-ch.feodotracker.201908', # body ('2019-08-20 03:00:00,5.5.5.5,447,2019-08-20,ExampleName5\n' '2019-08-20 03:00:00,4.4.4.4,447,2019-08-20,ExampleName4\n' '2019-08-20 02:00:00,3.3.3.3,447,2019-08-20,ExampleName3' ), # prop_kwargs { 'timestamp': ANY, 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', 'headers': {} }, ), ], expected_saved_state={ 'newest_row_time': '2019-08-20 03:00:00', 'newest_rows': { '2019-08-20 03:00:00,5.5.5.5,447,2019-08-20,ExampleName5', '2019-08-20 03:00:00,4.4.4.4,447,2019-08-20,ExampleName4' }, }, )
class Test__n6NormalizedData(unittest.TestCase): def setUp(self): self.mock = MagicMock() self.meth = MethodProxy(n6NormalizedData, self.mock) def test_class_attrs(self): instrumented_attr_names = { name for name, obj in vars(n6NormalizedData).items() if isinstance(obj, sqlalchemy.orm.attributes.InstrumentedAttribute)} column_names_to_sql_reprs = { str(name): str(self._get_sql_repr(obj)) for name, obj in n6NormalizedData._n6columns.items()} self.assertEqual( n6NormalizedData.__tablename__, 'event') self.assertEqual( instrumented_attr_names, { 'clients', 'address', 'ip', 'asn', 'cc', ###'ipv6', ###'rdns', ###'dir', 'category', 'confidence', 'count', 'custom', 'dip', 'dport', ###'email', 'expires', 'fqdn', ###'iban', 'id', ###'injects', 'md5', 'modified', 'name', 'origin', ###'phone', 'proto', ###'registrar', 'replaces', 'restriction', 'rid', 'sha1', 'sha256', 'source', 'sport', 'status', 'target', 'time', 'until', 'url', ###'url_pattern', ###'username', ###'x509fp_sha1', }) self.assertEqual( instrumented_attr_names - {'clients'}, set(n6NormalizedData._n6columns)) self.assertEqual( column_names_to_sql_reprs, { 'address': 'address TEXT', # note: in actual db it is MEDIUMTEXT 'ip': 'ip INTEGER UNSIGNED NOT NULL', 'asn': 'asn INTEGER UNSIGNED', 'cc': 'cc VARCHAR(2)', ###'ipv6': '', ###'rdns': '', ###'dir': '', 'category': ( "category ENUM('amplifier','bots','backdoor','cnc'," "'deface','dns-query','dos-attacker','dos-victim','flow'," "'flow-anomaly','fraud','leak','malurl','malware-action','other','phish'," "'proxy','sandbox-url','scam','scanning','server-exploit','spam'," "'spam-url','tor','vulnerable','webinject') NOT NULL"), 'confidence': "confidence ENUM('low','medium','high') NOT NULL", 'count': 'count SMALLINT', 'custom': 'custom TEXT', # note: in actual db it is MEDIUMTEXT 'dip': 'dip INTEGER UNSIGNED', 'dport': 'dport INTEGER', ###'email': '', 'expires': 'expires DATETIME', 'fqdn': 'fqdn VARCHAR(255)', ###'iban': '', 'id': 'id BINARY(16) NOT NULL', ###'injects': '', 'md5': 'md5 BINARY(16)', 'modified': 'modified DATETIME', 'name': 'name VARCHAR(255)', 'origin': ( "origin ENUM('c2','dropzone','proxy','p2p-crawler'," "'p2p-drone','sinkhole','sandbox','honeypot'," "'darknet','av','ids','waf')"), ###'phone': '', 'proto': "proto ENUM('tcp','udp','icmp')", ###'registrar': '', 'replaces': 'replaces BINARY(16)', 'restriction': "restriction ENUM('public','need-to-know','internal') NOT NULL", 'rid': 'rid BINARY(16) NOT NULL', 'sha1': 'sha1 BINARY(20)', 'sha256': 'sha256 BINARY(32)', 'source': 'source VARCHAR(32) NOT NULL', 'sport': 'sport INTEGER', 'status': "status ENUM('active','delisted','expired','replaced')", 'target': 'target VARCHAR(100)', 'time': 'time DATETIME NOT NULL', 'until': 'until DATETIME', 'url': 'url VARCHAR(2048)', ###'url_pattern': '', ###'username': '', ###'x509fp_sha1': '', }) def _get_sql_repr(self, col): type_name = ( str(col.type) if not isinstance(col.type, sqlalchemy.types.Enum) else 'ENUM({0})'.format(','.join( "'{0}'".format(e) for e in col.type.enums))) r = '{0} {1}'.format(col.name, type_name) if isinstance(col.type, IPAddress): self.assertTrue(col.type.impl.mapping['mysql'].unsigned) r += ' UNSIGNED' self.assertIsInstance(col.nullable, bool) if not col.nullable: r += ' NOT NULL' return r def test_init_and_attrs_1(self): obj = self.obj = n6NormalizedData( id=sen.event_id, ip=sen.some_ip_addr, dport=sen.some_port_number, time='2014-04-01 01:07:42+02:00', ) self.assertEqual(obj.id, sen.event_id) self.assertEqual(obj.ip, sen.some_ip_addr) self.assertEqual(obj.dport, sen.some_port_number) self.assertEqual( obj.time, datetime.datetime(2014, 3, 31, 23, 7, 42)) for name in n6NormalizedData._n6columns: if name in ('id', 'ip', 'dport', 'time'): continue val = getattr(obj, name) self.assertIsNone(val) self.assertIsInstance( obj.clients, sqlalchemy.orm.collections.InstrumentedList) self.assertEqual(obj.clients, []) self.client1 = MagicMock() self.client1.client = 'c1' self.client2 = MagicMock() self.client2.client = 'c2' obj.clients.append(self.client2) obj.clients.append(self.client1) self.assertEqual(obj.clients, [self.client2, self.client1]) def test_init_and_attrs_2(self): obj = self.obj = n6NormalizedData( time='2014-04-01 01:07:42+02:00', expires='2015-04-01 01:07:43+02:00', until='2015-04-01 01:07:43+02:00', ) self.assertIsNone(obj.id) self.assertEqual(obj.ip, '0.0.0.0') # "no IP" placeholder self.assertEqual( obj.time, datetime.datetime(2014, 3, 31, 23, 7, 42)) self.assertEqual( obj.expires, datetime.datetime(2015, 3, 31, 23, 7, 43)) ### THIS IS A PROBLEM -- TO BE SOLVED IN #3113: self.assertEqual( obj.until, '2015-04-01 01:07:43+02:00') def test__key_query(self): self.mock.some_key.in_.return_value = sen.result act_result = self.meth.key_query('some_key', sen.value) self.assertIs(act_result, sen.result) self.mock.some_key.in_.assert_called_once_with(sen.value) @foreach( param( key='url.sub', mapped_to='url', result=sen.or_result), param( key='fqdn.sub', mapped_to='fqdn', result=sen.or_result), param( key='fqdn.illegal', exc_type=KeyError), param( key='illegal', exc_type=KeyError), ) @patch('n6lib.db_events.or_', return_value=sen.or_result) def test__like_query(self, or_mock, key, mapped_to=None, result=None, exc_type=None, **kwargs): value = [ u'val', u'ążź', # (ticket #8043 - `UnicodeEncodeError: 'ascii' codec can't encode...`) ] if exc_type is None: assert result is not None getattr(self.mock, mapped_to).like.side_effect = [sen.term1, sen.term2] act_result = self.meth.like_query(key, value) self.assertIs(act_result, result) or_mock.assert_called_once_with(sen.term1, sen.term2) self.assertEqual(self.mock.mock_calls, [ getattr(call, mapped_to).like(u'%val%'), getattr(call, mapped_to).like(u'%ążź%'), ]) else: with self.assertRaises(exc_type): self.meth.like_query(key, value) @foreach( param( value=[('10.20.30.41', 24), ('10.20.30.41', 32)], min_max_ips=[(169090560, 169090815), (169090601, 169090601)], result=sen.or_result), param( value=[('10.20.30.41', 24)], min_max_ips=[(169090560, 169090815)], result=sen.or_result), param( value=[('10.20.30.441', 24), ('10.20.30.41', 32)], exc_type=socket.error), param( value=[('10.20.30.441', 24)], exc_type=socket.error), param( value=[None], exc_type=TypeError), param( value=('10.20.30.41', 24), exc_type=ValueError), param( value=None, exc_type=TypeError), ) @patch('n6lib.db_events.and_', return_value=sen.and_result) @patch('n6lib.db_events.or_', return_value=sen.or_result) def test__ip_net_query(self, or_mock, and_mock, value=None, min_max_ips=None, result=None, exc_type=None, **kwargs): key = MagicMock() key.__ne__.side_effect = (lambda k: k != 'ip.net') if exc_type is None: assert result is not None self.mock.ip.__ge__.side_effect = (lambda min_ip: (sen.term_ge, min_ip)) self.mock.ip.__le__.side_effect = (lambda max_ip: (sen.term_le, max_ip)) act_result = self.meth.ip_net_query(key, value) self.assertIs(act_result, result) or_mock.assert_called_once_with(*(len(value) * [sen.and_result])) self.assertEqual( and_mock.mock_calls, [call( (sen.term_ge, min_ip), (sen.term_le, max_ip)) for min_ip, max_ip in min_max_ips]) else: with self.assertRaises(exc_type): self.meth.ip_net_query(key, value) # the only operation on the key was one unequality test (against 'ip.net') key.__ne__.assert_called_once_with('ip.net') @foreach( param(key='active.min', cmp_meth_name='__ge__'), param(key='active.max', cmp_meth_name='__le__'), param(key='active.until', cmp_meth_name='__lt__'), param(key='active.illegal', exc_type=AssertionError), param(key='illegal', exc_type=AssertionError), ) @patch('n6lib.db_events.null', return_value=sen.Null) @patch('n6lib.db_events.or_', return_value=sen.or_result) @patch('n6lib.db_events.and_', return_value=sen.and_result) def test__active_bl_query(self, and_mock, or_mock, null_mock, key, cmp_meth_name=None, exc_type=None, **kwargs): value = [sen.val] if exc_type is None: self.mock.expires.is_.return_value = sen.expires_is_result self.mock.expires.isnot.return_value = sen.expires_isnot_result getattr(self.mock.expires, cmp_meth_name).return_value = sen.expires_cmp_result getattr(self.mock.time, cmp_meth_name).return_value = sen.time_cmp_result act_result = self.meth.active_bl_query(key, value) self.assertIs(act_result, sen.or_result) if key == 'active.min': assert cmp_meth_name == '__ge__' or_mock.assert_called_once_with(sen.expires_cmp_result, sen.time_cmp_result) self.assertEqual(self.mock.expires.is_.mock_calls, []) self.assertEqual(self.mock.expires.isnot.mock_calls, []) else: assert ( (key == 'active.max' and cmp_meth_name == '__le__') or (key == 'active.until' and cmp_meth_name == '__lt__')) or_mock.assert_called_once_with(sen.and_result, sen.and_result) self.assertEqual(and_mock.mock_calls, [ call(sen.expires_isnot_result, sen.expires_cmp_result), call(sen.expires_is_result, sen.time_cmp_result), ]) self.mock.expires.is_.assert_called_once_with(sen.Null) self.mock.expires.isnot.assert_called_once_with(sen.Null) getattr(self.mock.expires, cmp_meth_name).assert_called_once_with(sen.val) getattr(self.mock.time, cmp_meth_name).assert_called_once_with(sen.val) else: with self.assertRaises(exc_type): self.meth.active_bl_query(key, value) @foreach( param('modified.min', cmp_meth_name='__ge__'), param('modified.max', cmp_meth_name='__le__'), param('modified.until', cmp_meth_name='__lt__'), param('modified.illegal', exc_type=AssertionError), param('illegal', exc_type=AssertionError), ) def test__modified_query(self, key, cmp_meth_name=None, exc_type=None): value = [sen.val] if exc_type is None: getattr(self.mock.modified, cmp_meth_name).return_value = sen.result act_result = self.meth.modified_query(key, value) self.assertIs(act_result, sen.result) getattr(self.mock.modified, cmp_meth_name).assert_called_once_with(sen.val) else: with self.assertRaises(exc_type): self.meth.modified_query(key, value) def test__to_raw_result_dict__1(self): self.test_init_and_attrs_1() d = self.obj.to_raw_result_dict() self.assertEqual(d, { 'id': sen.event_id, 'ip': sen.some_ip_addr, 'dport': sen.some_port_number, 'time': datetime.datetime(2014, 3, 31, 23, 7, 42), 'client': ['c1', 'c2'], }) def test__to_raw_result_dict__2(self): self.test_init_and_attrs_2() d = self.obj.to_raw_result_dict() self.assertEqual(d, { # note that ip='0.0.0.0' has been removed 'time': datetime.datetime(2014, 3, 31, 23, 7, 42), 'expires': datetime.datetime(2015, 3, 31, 23, 7, 43), ### THIS IS A PROBLEM -- TO BE SOLVED IN #3113: 'until': '2015-04-01 01:07:43+02:00', })
class Test__exiting_on_exception(unittest.TestCase): @foreach( param( raised_exc=ValueError, expected_regex_pattern=r'^FATAL ERROR!.*\bValueError\b', ), param( raised_exc=ValueError('foobar'), expected_regex_pattern=r'^FATAL ERROR!.*\bValueError\b.*\bfoobar\b', ), param( raised_exc=Exception, expected_regex_pattern=r'^FATAL ERROR!.*\bException\b', ), param( raised_exc=Exception('foobar'), expected_regex_pattern=r'^FATAL ERROR!.*\bException\b.*\bfoobar\b', ), param( raised_exc=BaseException, expected_regex_pattern=r'^FATAL ERROR!.*\bBaseException\b', ), param( raised_exc=BaseException('foobar'), expected_regex_pattern= r'^FATAL ERROR!.*\bBaseException\b.*\bfoobar\b', ), ) def test_with_various_exceptions(self, raised_exc, expected_regex_pattern): m = MagicMock() expected_regex = re.compile(expected_regex_pattern, re.DOTALL) @exiting_on_exception def some_callable(*args, **kwargs): m(*args, **kwargs) raise raised_exc with self.assertRaisesRegexp(SystemExit, expected_regex) as cm: some_callable(42, b='spam') self.assertEqual(m.mock_calls, [call(42, b='spam')]) assert 'FATAL ERROR' in str(cm.exception), 'bug in the test' @foreach( param( raised_exc=SystemExit, expected_exc_class=SystemExit, expected_exc_args=(), is_expected_the_same_exc=False, ), param( raised_exc=SystemExit(0), expected_exc_class=SystemExit, expected_exc_args=(0, ), is_expected_the_same_exc=True, ), param( raised_exc=SystemExit(1), expected_exc_class=SystemExit, expected_exc_args=(1, ), is_expected_the_same_exc=True, ), param( raised_exc=SystemExit('foobar'), expected_exc_class=SystemExit, expected_exc_args=('foobar', ), is_expected_the_same_exc=True, ), param( raised_exc=KeyboardInterrupt, expected_exc_class=KeyboardInterrupt, expected_exc_args=(), is_expected_the_same_exc=False, ), param( raised_exc=KeyboardInterrupt('foobar', 'spamham'), expected_exc_class=KeyboardInterrupt, expected_exc_args=('foobar', 'spamham'), is_expected_the_same_exc=True, ), ) def test_with_SystemExit_or_KeyboardInterrupt(self, raised_exc, expected_exc_class, expected_exc_args, is_expected_the_same_exc): m = MagicMock() @exiting_on_exception def some_callable(*args, **kwargs): m(*args, **kwargs) raise raised_exc with self.assertRaises(expected_exc_class) as cm: some_callable(42, b='spam') self.assertEqual(m.mock_calls, [call(42, b='spam')]) self.assertEqual(cm.exception.args, expected_exc_args) if is_expected_the_same_exc: self.assertIs(cm.exception, raised_exc) else: self.assertIsNot(cm.exception, raised_exc) self.assertNotIn('FATAL ERROR', str(cm.exception)) def test_without_any_exception(self): m = MagicMock() @exiting_on_exception def some_callable(*args, **kwargs): m(*args, **kwargs) return sentinel.result result = some_callable(42, b='spam') self.assertIs(result, sentinel.result) self.assertEqual(m.mock_calls, [call(42, b='spam')])
class TestDevFakeUserAuthenticationPolicy(unittest.TestCase): _param_side_settings = [ param(side_settings={}), param(side_settings={'foo': 'bar'}), param(side_settings={'foo.bar': 'spam.ham'}), param(side_settings={'dev_fake_auth': 'true'}), param(side_settings={'dev_fake_auth': 'false'}), param(side_settings={'dev_fake_auth': "doesn't matter"}), ] @foreach(_param_side_settings) @foreach( param( settings={}, expected_unauthenticated_userid='example.org,[email protected]', ), param( settings={'dev_fake_auth.org_id': 'nask.waw.pl'}, expected_unauthenticated_userid='nask.waw.pl,[email protected]', ), param( settings={'dev_fake_auth.user_id': '*****@*****.**'}, expected_unauthenticated_userid='example.org,[email protected]', ), param( settings={ 'dev_fake_auth.org_id': 'nask.waw.pl', 'dev_fake_auth.user_id': '*****@*****.**', }, expected_unauthenticated_userid='nask.waw.pl,[email protected]', ), ) def test_ok(self, settings, side_settings, expected_unauthenticated_userid): given_settings = dict(settings, **side_settings) policy_instance = DevFakeUserAuthenticationPolicy(given_settings) self.assertIsInstance(policy_instance, DevFakeUserAuthenticationPolicy) self.assertEqual(policy_instance.unauthenticated_userid(sen.request), expected_unauthenticated_userid) @foreach(_param_side_settings) @foreach( param(settings={}), param(settings={'dev_fake_auth.org_id': 'nask.waw.pl'}), param(settings={'dev_fake_auth.user_id': '*****@*****.**'}), param( settings={ 'dev_fake_auth.org_id': 'nask.waw.pl', 'dev_fake_auth.user_id': '*****@*****.**', }), ) def test_config_error(self, settings, side_settings): given_settings = dict(settings, **side_settings) given_settings['dev_fake_auth.illegal_opt'] = 'whatever' with self.assertRaises(ConfigError): DevFakeUserAuthenticationPolicy(given_settings)
class TestInstantiationOfAuthenticationPolicies(unittest.TestCase): _param_policy_class = [ param(policy_class=BaseUserAuthenticationPolicy), param(policy_class=SSLUserAuthenticationPolicy), param(policy_class=LoginOrSSLUserAuthenticationPolicy), ] _param_side_settings = [ param(side_settings={}), param(side_settings={'foo': 'bar'}), param( side_settings={ 'foo': 'bar', 'foo.bar': 'spam.ham', 'dev_fake_auth.org_id': 'nask.waw.pl', 'dev_fake_auth.user_id': "all that doesn't matter", }), ] @foreach(_param_policy_class) @foreach(_param_side_settings) @foreach( param(settings={}), param(settings={'dev_fake_auth': 'false'}), param(settings={'dev_fake_auth': 'No'}), param(settings={'dev_fake_auth': 'OFF'}), ) def test_no_dev_fake_auth(self, policy_class, settings, side_settings): given_settings = dict(settings, **side_settings) policy_instance = policy_class(given_settings) self.assertIs(policy_instance.__class__, policy_class) @foreach(_param_policy_class) @foreach(_param_side_settings) @foreach( param(settings={'dev_fake_auth': 'true'}), param(settings={'dev_fake_auth': 'YES'}), param(settings={'dev_fake_auth': 'On'}), ) def test_with_dev_fake_auth(self, policy_class, settings, side_settings): given_settings = dict(settings, **side_settings) with patch( 'n6lib.pyramid_commons._pyramid_commons.DevFakeUserAuthenticationPolicy', return_value=sen.DevFakeUserAuthenticationPolicy_instance, ) as DevFakeUserAuthenticationPolicy_mock: policy_instance = policy_class(given_settings) self.assertIs(policy_instance, sen.DevFakeUserAuthenticationPolicy_instance) self.assertEqual(DevFakeUserAuthenticationPolicy_mock.mock_calls, [ call(given_settings), ]) @foreach(_param_policy_class) @foreach(_param_side_settings) def test_config_error(self, policy_class, side_settings): settings = {'dev_fake_auth': 'illegalvalue'} given_settings = dict(settings, **side_settings) with self.assertRaises(ConfigError): policy_class(given_settings)
class TestDeviceRequestPostViewBase(unittest.TestCase): SOME_REQUEST_ID = '0123456789abcdef0123456789ab' def test__get_default_http_methods(self): result = DeviceRequestPostViewBase.get_default_http_methods() self.assertEqual(result, 'POST') def test__make_response__ok(self): expected_status_code = 200 expected_content_type = 'application/json' expected_deserialized_body = {'request_id': self.SOME_REQUEST_ID} self._prepare_for_make_response() self.manage_api.make_new_request_case.side_effect = ( self._side_effect__make_new_request_case__ok) response = self.view.make_response() self.assertIsInstance(response, Response) self.assertEqual(response.status_code, expected_status_code) self.assertEqual(response.content_type, expected_content_type) self.assertEqual(json.loads(response.body), expected_deserialized_body) self.assertEqual(self.manage_api.mock_calls, [ call.make_new_request_case( sen.csr_pem, sen.auth_data, ), ]) @foreach( param( ManageAPIClientError(remote_user_error_label='not-a-csr'), expected_exc_class=HTTPBadRequest, expected_exc_regexp='not a valid CSR', expected_status_code=400, ), param( ManageAPIClientError( remote_user_error_label='csr-non-compliance'), expected_exc_class=HTTPForbidden, expected_exc_regexp='CSR does not comply', expected_status_code=403, ), param( ManageAPIClientError( remote_user_error_label='csr-for-different-user'), expected_exc_class=HTTPForbidden, expected_exc_regexp='CSR subject does not match.*user', expected_status_code=403, ), param( ManageAPIError('tralalala'), expected_exc_class=ManageAPIError, expected_exc_regexp='tralalala', ), ) def test__make_response__err(self, side_effect__make_new_request_case, expected_exc_class, expected_exc_regexp, expected_status_code=None): self._prepare_for_make_response() self.manage_api.make_new_request_case.side_effect = side_effect__make_new_request_case with self.assertRaisesRegexp(expected_exc_class, expected_exc_regexp) as cm: self.view.make_response() if expected_status_code is not None: self.assertIsInstance(cm.exception, Response) self.assertEqual(cm.exception.status_code, expected_status_code) self.assertEqual(self.manage_api.mock_calls, [ call.make_new_request_case( sen.csr_pem, sen.auth_data, ), ]) def _prepare_for_make_response(self): # request mock self.request = MagicMock() self.request.body = sen.csr_pem self.request.auth_data = sen.auth_data self.manage_api = self.request.registry.manage_api # view instance configurator = MagicMock() configurator.registry.settings = {} concrete_cls = DeviceRequestPostViewBase.concrete_view_class( resource_id=sen.resource_id, config=configurator) self.view = concrete_cls(sen.context, self.request) def _side_effect__make_new_request_case__ok(self, csr_pem, auth_data): return RequestCase(self.SOME_REQUEST_ID, csr_pem, sen.sender_dn)
class TestDeviceRequestGetViewBase(unittest.TestCase): SOME_URL_PREFIX = '/some-url/path' SOME_CERT_CA_LABEL = 'client-2' SOME_CERT_SERIAL_NUMBER = '0123456789abcdef0123' SOME_CERT_DN = 'n6cert-serial-hex={0},cn={1},cn=root,dc=n6,dc=cert,dc=pl'.format( SOME_CERT_SERIAL_NUMBER, SOME_CERT_CA_LABEL) SOME_CERT_PEM = '<<< (-: \n instead of some certificate pem \r\n :-) >>>' DT_TODAY = datetime.datetime.utcnow() DT_TOMMOROW = DT_TODAY + datetime.timedelta(days=1) DT_YESTERDAY = DT_TODAY - datetime.timedelta(days=1) DT_DAY_BEFORE_YESTERDAY = DT_TODAY - datetime.timedelta(days=2) PARAMS__REQ_EXPIRY_SETTING__OK = [ param(settings={}, ), param( # finalized_request_case_expiry_days<=0 means: no expiration settings={ 'device_request_resource.finalized_request_case_expiry_days': '0' }, ), param(settings={ 'device_request_resource.finalized_request_case_expiry_days': '1' }, ), param(settings={ 'device_request_resource.finalized_request_case_expiry_days': '30' }, ), param( # finalized_request_case_expiry_days<=0 means: no expiration settings={ 'device_request_resource.finalized_request_case_expiry_days': '0' }, status_changed_on=DT_DAY_BEFORE_YESTERDAY, ), param( # finalized_request_case_expiry_days<=0 means: no expiration settings={ 'device_request_resource.finalized_request_case_expiry_days': '-1' }, status_changed_on=DT_DAY_BEFORE_YESTERDAY, ), param( settings={ 'device_request_resource.finalized_request_case_expiry_days': '30' }, status_changed_on=DT_DAY_BEFORE_YESTERDAY, ), ] PARAMS__REQ_EXPIRY_SETTING__EXPIRED = [ param( settings={ 'device_request_resource.finalized_request_case_expiry_days': '1' }, status_changed_on=DT_DAY_BEFORE_YESTERDAY, ), ] def test__validate_url_pattern__ok(self): url_pattern = self.SOME_URL_PREFIX + '/{request_id}' none = DeviceRequestGetViewBase.validate_url_pattern(url_pattern) self.assertIsNone(none) @foreach( '{request_id}', '/foo{request_id}', '/{request_id}foo', '/{foo}', '/foo', ) def test__validate_url_pattern__err(self, suffix): url_pattern = self.SOME_URL_PREFIX + suffix with self.assertRaises(HTTPServerError): DeviceRequestGetViewBase.validate_url_pattern(url_pattern) def test__get_default_http_methods(self): result = DeviceRequestGetViewBase.get_default_http_methods() self.assertEqual(result, 'GET') @foreach(PARAMS__REQ_EXPIRY_SETTING__OK) def test__make_response__ok_200(self, settings, status_changed_on=sen.NOT_SET): expected_status_code = 200 expected_content_type = 'text/plain' expected_body = self.SOME_CERT_PEM self._prepare_for_make_response_calling_get_cert(settings) self.cert_dn = self.SOME_CERT_DN self.status = 'finalized' self.status_changed_on = status_changed_on response = self.view.make_response() self.assertIsInstance(response, Response) self.assertEqual(response.status_code, expected_status_code) self.assertEqual(response.content_type, expected_content_type) self.assertEqual(response.body, expected_body) self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) self.assertEqual(self.cert_data.mock_calls, [call.ensure_cert_verified()]) @foreach(PARAMS__REQ_EXPIRY_SETTING__OK + PARAMS__REQ_EXPIRY_SETTING__EXPIRED) @foreach( param(status='new'), param(status='registered'), ) def test__make_response__ok_202(self, status, settings, status_changed_on=sen.NOT_SET): expected_status_code = 202 expected_content_type = 'text/plain' expected_body = '' self._prepare_for_make_response(settings) self.status = status self.status_changed_on = status_changed_on response = self.view.make_response() self.assertIsInstance(response, Response) self.assertEqual(response.status_code, expected_status_code) self.assertEqual(response.content_type, expected_content_type) self.assertEqual(response.body, expected_body) self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) def test__make_response__err_404(self): expected_exc_class = HTTPNotFound expected_exc_regexp = 'remote certificate request case not found' expected_status_code = 404 self._prepare_for_make_response( settings={}, expected_exc_class=expected_exc_class) self.manage_api.get_request_case.side_effect = ManageAPIClientError( remote_user_error_label='request-id-not-found') with self.assertRaisesRegexp(expected_exc_class, expected_exc_regexp) as cm: self.view.make_response() self.assertEqual(cm.exception.status_code, expected_status_code) self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) @foreach(PARAMS__REQ_EXPIRY_SETTING__OK + PARAMS__REQ_EXPIRY_SETTING__EXPIRED) def test__make_response__err_410_req_cancelled(self, settings, status_changed_on=sen. NOT_SET): expected_exc_class = HTTPGone expected_exc_regexp = 'remote certificate request case is cancelled' expected_status_code = 410 self._prepare_for_make_response(settings, expected_exc_class) self.status = 'cancelled' self.status_changed_on = status_changed_on with self.assertRaisesRegexp(expected_exc_class, expected_exc_regexp) as cm: self.view.make_response() self.assertEqual(cm.exception.status_code, expected_status_code) self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) @foreach(PARAMS__REQ_EXPIRY_SETTING__EXPIRED) def test__make_response__err_410_req_expired(self, settings, status_changed_on=sen. NOT_SET): expected_exc_class = HTTPGone expected_exc_regexp = 'remote certificate request case expired' expected_status_code = 410 self._prepare_for_make_response(settings, expected_exc_class) self.cert_dn = self.SOME_CERT_DN self.status = 'finalized' self.status_changed_on = status_changed_on with self.assertRaisesRegexp(expected_exc_class, expected_exc_regexp) as cm: self.view.make_response() self.assertEqual(cm.exception.status_code, expected_status_code) self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) @foreach(PARAMS__REQ_EXPIRY_SETTING__OK) @foreach( param( 'certificate already expired', cert_expires_on=DT_YESTERDAY, # <- expired cert_revoked_on=None, ), param( 'certificate has been revoked', cert_expires_on=DT_TOMMOROW, cert_revoked_on=DT_YESTERDAY, # <- revoked ), ) def test__make_response__err_410_cert_inactive( self, expected_exc_regexp, cert_expires_on, cert_revoked_on, settings, status_changed_on=sen.NOT_SET): expected_exc_class = HTTPGone expected_status_code = 410 self._prepare_for_make_response_calling_get_cert( settings, expected_exc_class) self.cert_expires_on = cert_expires_on self.cert_revoked_on = cert_revoked_on self.cert_dn = self.SOME_CERT_DN self.status = 'finalized' self.status_changed_on = status_changed_on with self.assertRaisesRegexp(expected_exc_class, expected_exc_regexp) as cm: self.view.make_response() self.assertEqual(cm.exception.status_code, expected_status_code) self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) self.assertEqual(self.cert_data.mock_calls, [call.ensure_cert_verified()]) def test__make_response__err_ManageAPIError_from_get_request_case( self): expected_exc_class = ManageAPIError expected_exc_regexp = 'tralalala' self._prepare_for_make_response( settings={}, expected_exc_class=expected_exc_class) self.manage_api.get_request_case.side_effect = expected_exc_class( expected_exc_regexp) with self.assertRaisesRegexp(expected_exc_class, expected_exc_regexp): self.view.make_response() self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) def test__make_response__err_ManageAPIError_from_get_cert(self): expected_exc_class = ManageAPIError expected_exc_regexp = 'tralalala' self._prepare_for_make_response_calling_get_cert( settings={}, expected_exc_class=expected_exc_class) self.cert_dn = self.SOME_CERT_DN self.status = 'finalized' self.manage_api.get_cert.side_effect = expected_exc_class( expected_exc_regexp) with self.assertRaisesRegexp(expected_exc_class, expected_exc_regexp): self.view.make_response() self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) def test__make_response__err_ManageAPIError_from_ensure_cert_verified( self): expected_exc_class = ManageAPIError expected_exc_regexp = 'tralalala' self._prepare_for_make_response_calling_get_cert( settings={}, expected_exc_class=expected_exc_class) self.cert_dn = self.SOME_CERT_DN self.status = 'finalized' self.cert_data.ensure_cert_verified.side_effect = expected_exc_class( expected_exc_regexp) with self.assertRaisesRegexp(expected_exc_class, expected_exc_regexp): self.view.make_response() self.assertEqual(self.manage_api.mock_calls, self.expected_manage_api_calls) self.assertEqual(self.cert_data.mock_calls, [call.ensure_cert_verified()]) def _prepare_for_make_response(self, settings, expected_exc_class=None): # request mock self.request = MagicMock() self.request.matchdict = {'request_id': sen.request_id} self.manage_api = self.request.registry.manage_api self.manage_api.__enter__.return_value = self.manage_api self.manage_api.get_request_case.side_effect = self._side_effect__get_request_case # view instance self.configurator = MagicMock() self.configurator.registry.settings = settings concrete_cls = DeviceRequestGetViewBase.concrete_view_class( resource_id=sen.resource_id, config=self.configurator) self.view = concrete_cls(sen.context, self.request) # defaults for stuff used in _side_effect__get_request_case() self.cert_dn = None self.status = sen.NOT_SET self.status_changed_on = sen.NOT_SET # expected ManageAPI method calls self.expected_manage_api_calls = [ call.__enter__(), call.get_request_case(sen.request_id), call.__exit__(expected_exc_class, ANY, ANY), ] def _prepare_for_make_response_calling_get_cert( self, settings=None, expected_exc_class=None): self._prepare_for_make_response(settings) self.manage_api.get_cert.side_effect = self._side_effect__get_cert # to be returned by _side_effect__get_cert() self.cert_data = MagicMock() # defaults for stuff used in _side_effect__get_cert() self.cert_pem = self.SOME_CERT_PEM self.cert_expires_on = self.DT_TOMMOROW self.cert_revoked_on = None # expected ManageAPI method calls (overwritten) self.expected_manage_api_calls = [ call.__enter__(), call.get_request_case(sen.request_id), call.get_cert(self.SOME_CERT_CA_LABEL, self.SOME_CERT_SERIAL_NUMBER), call.__exit__(expected_exc_class, ANY, ANY), ] def _side_effect__get_request_case(self, request_id): request_case = RequestCase(request_id, sen.csr_pem, sen.sender_dn) request_case.cert_dn = self.cert_dn request_case.status = self.status if self.status_changed_on is not sen.NOT_SET: request_case._status_changed_on = self.status_changed_on return request_case def _side_effect__get_cert(self, ca_label, serial_number): cert_data = self.cert_data cert_data.cert_pem = self.cert_pem cert_data.expires_on = self.cert_expires_on cert_data.revoked_on = self.cert_revoked_on return cert_data
sample_access_info_items = { 'queries_limit': None, 'window': 3600, 'request_parameters': None, 'max_days_old': 100, 'results_limit': None, } access_info_to_response = [ param( res_limits={ '/search/events': sample_access_info_items, }, full_access=True, is_authenticated=True, is_cert_available=True, expected_response={ 'available_resources': ['/search/events'], 'certificate_fetched': True, 'full_access': True, 'authenticated': True, }, ), # `full_access` is hidden if False param( res_limits={ '/search/events': sample_access_info_items, '/report/inside': sample_access_info_items, }, full_access=False, is_authenticated=True, is_cert_available=False,
def _min_tlp_and_expected_restriction(): yield param('white', 'need-to-know') yield param('green', 'need-to-know') yield param('amber', 'internal') yield param('red', 'internal')
def _categories(): yield param({'category': ['flow', 'scam']}).label('non_dip_categories') yield param({'category': ['bots', 'cnc']}).label('dip_categories')
class TestN6RegistrationView(RequestHelperMixin, DBConnectionPatchMixin, unittest.TestCase): def setUp(self): self.added_to_session = [] self.session_mock = MagicMock() self.session_mock.add.side_effect = self.added_to_session.append self.auth_db_connector_mock = MagicMock() self.auth_db_connector_mock.get_current_session.return_value = self.session_mock self.pyramid_config = self.prepare_pyramid_testing() self._set_up_auth_apis() def _set_up_auth_apis(self): self.patch('n6lib.auth_db.api.SQLAuthDBConnector', return_value=self.auth_db_connector_mock) self.pyramid_config.registry.auth_query_api = AuthQueryAPI(sen.settings) self.pyramid_config.registry.auth_manage_api = AuthManageAPI(sen.settings) @staticmethod def basic_cases(): request_params_base = dict( org_id=[u'example.com'], email=[u'*****@*****.**'], actual_name=[u'Śome Ńąmę'], submitter_title=[u'CEO'], submitter_firstname_and_surname=[u'Marian Examplówski'], csr=[u'-----BEGIN CERTIFICATE REQUEST-----\nabc\n-----END CERTIFICATE REQUEST-----'], ) expected_db_obj_attributes_base = dict( submitted_on=AnyInstanceOf(datetime.datetime), modified_on=AnyInstanceOf(datetime.datetime), status=REGISTRATION_REQUEST_STATUS_NEW, org_id=u'example.com', email=u'*****@*****.**', actual_name=u'Śome Ńąmę', submitter_title=u'CEO', submitter_firstname_and_surname=u'Marian Examplówski', csr=u'-----BEGIN CERTIFICATE REQUEST-----\nabc\n-----END CERTIFICATE REQUEST-----', ) yield ( dict(request_params_base), AnyInstanceOfWhoseVarsInclude(models.RegistrationRequest, **dict( expected_db_obj_attributes_base, )), ) yield ( dict( request_params_base, notification_language=[u'EN'], ), AnyInstanceOfWhoseVarsInclude(models.RegistrationRequest, **dict( expected_db_obj_attributes_base, email_notification_language=u'EN', )), ) yield ( dict( request_params_base, notification_emails=[u'foo@bar'], ), AnyInstanceOfWhoseVarsInclude(models.RegistrationRequest, **dict( expected_db_obj_attributes_base, email_notification_addresses=[AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestEMailNotificationAddress, email=u'foo@bar'), ], )), ) yield ( dict( request_params_base, notification_emails=[u'spam@ham', u'foo@bar'], ), AnyInstanceOfWhoseVarsInclude(models.RegistrationRequest, **dict( expected_db_obj_attributes_base, email_notification_addresses=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestEMailNotificationAddress, email=u'foo@bar', ), AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestEMailNotificationAddress, email=u'spam@ham', ), ], )), ) yield ( dict( request_params_base, notification_language=[u'EN'], notification_emails=[u'foo@bar'], asns=[u'42'], fqdns=[u'foo.example.org'], ip_networks=[u'1.2.3.4/24'], ), AnyInstanceOfWhoseVarsInclude(models.RegistrationRequest, **dict( expected_db_obj_attributes_base, email_notification_language=u'EN', email_notification_addresses=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestEMailNotificationAddress, email=u'foo@bar', ), ], asns=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestASN, asn=42, ), ], fqdns=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestFQDN, fqdn=u'foo.example.org', ), ], ip_networks=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestIPNetwork, ip_network=u'1.2.3.4/24', ), ], )), ) yield ( dict( request_params_base, notification_language=[u'EN'], notification_emails=[u'spam@ham', u'foo@bar'], asns=[u'1.1', u'42', u'65537'], # note: `1.1` and `65537` means the same fqdns=[u'foo.example.org', u'baz.ham', u'example.net'], ip_networks=[u'10.20.30.40/24', u'192.168.0.3/32'], ), AnyInstanceOfWhoseVarsInclude(models.RegistrationRequest, **dict( expected_db_obj_attributes_base, email_notification_language=u'EN', email_notification_addresses=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestEMailNotificationAddress, email=u'foo@bar', ), AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestEMailNotificationAddress, email=u'spam@ham', ), ], asns=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestASN, asn=42, ), AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestASN, asn=65537, ), ], fqdns=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestFQDN, fqdn=u'baz.ham', ), AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestFQDN, fqdn=u'example.net', ), AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestFQDN, fqdn=u'foo.example.org', ), ], ip_networks=[ AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestIPNetwork, ip_network=u'10.20.30.40/24', ), AnyInstanceOfWhoseVarsInclude( models.RegistrationRequestIPNetwork, ip_network=u'192.168.0.3/32', ), ], )), ) @paramseq def ok_cases(cls): for str_values in (True, False): for whitespace_surrounded_values in (True, False): for unpacked_single_values in (True, False): for request_params, expected_db_obj in cls.basic_cases(): if whitespace_surrounded_values: request_params = { key: [u' \t {} \n '.format(v) for v in val] for key, val in request_params.iteritems()} if str_values: request_params = { key: [v.encode('utf-8') for v in val] for key, val in request_params.iteritems()} if unpacked_single_values: request_params = { key: (val[0] if len(val) == 1 else val) for key, val in request_params.iteritems()} yield param( request_params=request_params, expected_db_obj=expected_db_obj, ).label('ok:{}{}{}/{}'.format( 's' if str_values else '-', 'u' if unpacked_single_values else '-', 'w' if whitespace_surrounded_values else '-', len(request_params))) @foreach(ok_cases) def test_ok(self, request_params, expected_db_obj): req = self.create_request(N6RegistrationView, **request_params) response = req.perform() self._assert_response_ok(response) self._assert_db_operations_as_expected(expected_db_obj=expected_db_obj) # TODO: more cleaning error cases... @foreach( param( with_set={'org_id': u'blabla@not-valid'}, with_deleted=(), expected_exc_type=ParamValueCleaningError, ), param( with_set={}, with_deleted={'org_id'}, expected_exc_type=ParamKeyCleaningError, ), ) def test_kwargs_cleaning_error(self, with_set, with_deleted, expected_exc_type): request_params, _ = next(self.basic_cases()) request_params.update(with_set) for key in with_deleted: del request_params[key] req = self.create_request(N6RegistrationView, **request_params) with self.assertRaises(expected_exc_type): req.perform() self._assert_db_not_touched() # TODO: more cleaning error cases... @foreach( param( exc_type_from_add=DataAPIError, expected_exc_type=AuthDatabaseAPIClientError, ).label('client data error'), param( exc_type_from_add=ZeroDivisionError, expected_exc_type=ZeroDivisionError, ).label('internal error'), ) def test_later_error(self, exc_type_from_add, expected_exc_type): self.session_mock.add.side_effect = exc_type_from_add valid_request_params, _ = next(self.basic_cases()) req = self.create_request(N6RegistrationView, **valid_request_params) with self.assertRaises(expected_exc_type): req.perform() self._assert_db_operations_as_expected(expected_exc_type=exc_type_from_add) def _assert_response_ok(self, response): self.assertEqual(response.status_code, 200) self.assertEqual(response.body, 'ok') self.assertEqual(response.content_type, 'text/plain') self.assertEqual(response.charset, 'UTF-8') def _assert_db_operations_as_expected(self, expected_db_obj=None, expected_exc_type=None): self.assertEqual(self.auth_db_connector_mock.mock_calls, [ call.__enter__(), call.get_current_session(), call.get_current_session().add(ANY), call.__exit__(expected_exc_type, ANY, ANY), ]) if expected_db_obj is None: self.assertEqual(self.added_to_session, []) else: self.assertEqual(len(self.added_to_session), 1) self.assertEqual(self.added_to_session[0], expected_db_obj) def _assert_db_not_touched(self): self.assertEqual(self.auth_db_connector_mock.mock_calls, []) self.assertEqual(self.added_to_session, [])
class _TestAbuseChSSLBlacklistBase(unittest.TestCase): """ Base test case class for checking `get_output_data_body()` method of AbuseChSSLBlacklistCollector and AbuseChSSLBlacklistDyreCollector. """ regular_rss = [ ('<?xml version="1.0" encoding="ISO-8859-1" ?>\n' '<rss version="2.0">\n' '<channel>\n' '<item>\n' '<title>5fcb5b418f779a542b7148f2ddea211495787733 (2016-10-17 12:44:04' ')</title>\n<link>https://sslbl.abuse.ch/intel/5fcb5b418f779a542b7148f2ddea211' '495787733</link>\n<description>SHA1: 5fcb5b418f779a542b7148f2ddea211495787733' 'e, Common Name: facenoplays.com, Issuer: COMODO RSA' 'Domain Validation Secure Server CA</description>\n<guid>https://sslb' 'l.abuse.ch/intel/5fcb5b418f779a542b7148f2ddea211495787733&id=838' '4156e3b53194b118b9fe8c9d26709</guid>\n</item>\n</channel>\n</rss>\n' ), ('<?xml version="1.0" encoding="ISO-8859-1" ?>\n' '<rss version="2.0">\n' '<channel>\n' '<item>\n' '<title>6800af01d6a5b83dc3e8c8d649101f7872719fce (2016-10-17 12:44:04' ')</title>\n<link>https://sslbl.abuse.ch/intel/6800af01d6a5b83dc3e8c8' 'd649101f7872719fce</link>\n<description>SHA1: 6800af01d6a5b83dc3e8c8' 'd649101f7872719fce, Common Name: facenoplays.com, Issuer: COMODO RSA' 'Domain Validation Secure Server CA</description>\n<guid>https://sslb' 'l.abuse.ch/intel/6800af01d6a5b83dc3e8c8d649101f7872719fce&id=838' '4156e3b53194b118b9fe8c9d26709</guid>\n</item>\n</channel>\n</rss>\n' ), ('<?xml version="1.0" encoding="ISO-8859-1" ?>\n' '<rss version="2.0">\n' '<channel>\n' '<item>\n' '<title>e03e335629b882f1f03f091123511eaa3fc2d6b1 (2016-10-14 11:13:35)<' '/title>\n<link>https://sslbl.abuse.ch/intel/e03e335629b882f1f03f0911235' '11eaa3fc2d6b1</link>\n<description>SHA1: e03e335629b882f1f03f091123511e' 'aa3fc2d6b1, Common Name: C=GB, ST=Berkshire, L=Newbury, O=My Company L' 'td, Issuer: C=GB, ST=Berkshire, L=Newbury, O=My Company Ltd</descripti' 'on>\n<guid>https://sslbl.abuse.ch/intel/e03e335629b882f1f03f091123511ea' 'a3fc2d6b1&id=758994d35dd23c61dacd6902c32cab9e</guid>\n</item>\n</cha' 'nnel>\n</rss>\n'), ('<?xml version="1.0" encoding="ISO-8859-1" ?>\n' '<rss version="2.0">\n' '<channel>\n' '<item>\n' '<title>dcbe920e3d0cba40be80fba5e23a6b4f9a706dd4 (2016-10-07 04:51:52)<' '/title>\n<link>https://sslbl.abuse.ch/intel/dcbe920e3d0cba40be80fba5e23' 'a6b4f9a706dd4</link>\n<description>SHA1: dcbe920e3d0cba40be80fba5e23a6b' '4f9a706dd4, Common Name: C=US, ST=Denial, L=Springfield, O=Dis, Issuer' ': C=US, ST=Denial, L=Springfield, O=Dis</description>\n<guid>https://ss' 'lbl.abuse.ch/intel/dcbe920e3d0cba40be80fba5e23a6b4f9a706dd4&id=aa8' '822242d2ed85df15ba6db737add3d</guid>\n</item>\n</channel>\n</rss>\n' ), ] invalid_rss = [ ('<?xml version="1.0" encoding="ISO-8859-1" ?>\n' '<rss version="2.0">\n' '<channel>\n' '<item>\n' '<title>6800af01d6a5b83dc3e8c8d649101f7872719fce (2016-10-17 12:44:04)<' '/title>\n<description>SHA1: 6800af01d6a5b83dc3e8c8d649101f7872719fce, C' 'ommon Name: facenoplays.com, Issuer: COMODO RSA Domain Validation Secu' 're Server CA</description>\n<guid>https://sslbl.abuse.ch/intel/6800af01' 'd6a5b83dc3e8c8d649101f7872719fce&id=8384156e3b53194b118b9fe8c9d267' '09</guid>\n</item>\n'), ] results = [ { "https://sslbl.abuse.ch/intel/5fcb5b418f779a542b7148f2ddea211495787733": { "subject": "OU=Domain Control Validated, OU=PositiveSSL, CN=facenoplays.com", "issuer": "C=GB, ST=Greater Manchester, L=Salford, O=COMODO CA Limited, " "CN=COMODO RSA Domain Validation Secure Server CA", "fingerprint": "6800af01d6a5b83dc3e8c8d649101f7872719fce", "name": "Gozi MITM", "timestamp": "2016-10-17 12:44:04", }, }, { "https://sslbl.abuse.ch/intel/6800af01d6a5b83dc3e8c8d649101f7872719fce": { "subject": "C=GB, ST=Yorks, L=York, O=MyCompany Ltd., OU=IT, CN=localhost", "issuer": "C=GB, ST=Yorks, L=York, O=MyCompany Ltd., OU=IT, CN=localhost", "fingerprint": "5fcb5b418f779a542b7148f2ddea211495787733", "name": "ZeuS C&C", "timestamp": "2016-10-17 11:52:40", "binaries": [ [ "2016-10-13 16:27:10", "76b609dac79e76fe7b5a78af35c5a2d6", "52.77.110.77", "443", ], [ "2016-10-10 17:29:57", "9096210f20753c836378ca7aa18c3d25", "52.77.110.77", "443", ], ], }, }, { "https://sslbl.abuse.ch/intel/e03e335629b882f1f03f091123511eaa3fc2d6b1": { "subject": "C=GB, ST=Yorks, L=York, O=MyCompany Ltd., OU=IT, CN=localhost", "issuer": "C=GB, ST=Yorks, L=York, O=MyCompany Ltd., OU=IT, CN=localhost", "fingerprint": "e03e335629b882f1f03f091123511eaa3fc2d6b1", "name": "ZeuS C&C", "timestamp": "2016-10-17 11:52:40", "binaries": [ [ "2016-10-07 19:55:38", "d9e83ed20a652e7629b753e20336f7a4", "52.77.110.77", "443", ], ], }, }, ] detail_page = ( '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http:/' '/www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n<html xmlns="http:' '//www.w3.org/1999/xhtml">\n<head>\n' '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />' '\n<meta name="robots" content="all" />\n' '<meta name="description" content="The SSL Blacklist is a collection of' ' SHA1 fingerprints of malicious SSL certificates that are being used b' 'y specific botnet C&C channels to control infected computers" />\n<' 'meta name="keywords" content="SSL, blacklist, blocklist, database, fin' 'gerprint, sha1, suricata, ids, ips, intrusion detection, prevention, s' 'nort" />\n<link href="/css/layout.css" rel="stylesheet" type="text/css"' ' />\n<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico" ' '/>\n<script type="text/javascript" src="/js/sorttable.js"></script>\n<ti' 'tle>SSL Blacklist :: Blacklist</title></head>\n<body>\n<div class="MainC' 'ontainer">\n<div class="Header"></div>\n<div class="navigation"><a href=' '"/" target="_parent" title="SSL Blacklist Home">Home</a> | <a href="/b' 'lacklist/" target="_parent" title="SSL Blacklist">SSL Blacklist</a> | ' '<a href="http://www.abuse.ch/?page_id=4727" target="_blank" title="Con' 'tact abuse.ch">Contact</a></div>\n<h1>SSL Certificate Information</h1>' '\n<table class="tlstable">\n<tr bgcolor="#ffffff"><th>Subject Common Nam' 'e:</th><td>facenoplays.com</td></tr>\n<tr bgcolor="#D8D8D8"><th>Subject' ':</th><td>OU=Domain Control Validated, OU=PositiveSSL, CN=facenoplays.' 'com</td></tr>\n<tr bgcolor="#ffffff"><th>Issuer Common Name:</th><td>CO' 'MODO RSA Domain Validation Secure Server CA</td></tr>\n<tr bgcolor="#D8' 'D8D8"><th>Issuer:</th><td>C=GB, ST=Greater Manchester, L=Salford, O=CO' 'MODO CA Limited, CN=COMODO RSA Domain Validation Secure Server CA</td>' '</tr>\n<tr bgcolor="#ffffff"><th>Fingerprint (SHA1):</th><td>6800af01d6' 'a5b83dc3e8c8d649101f7872719fce</td></tr>\n<tr bgcolor="red"><th>Status:' '</th><td><strong>Blacklisted</strong> (Reason: Gozi MITM, Listing date' ': 2016-10-17 12:44:04)</td></tr>\n</table>\n<br /><h2>Associated malware' ' binaries</h2>\n<p>This SSL certificate was spotted passively or by usi' 'ng scanning techniques. Therefore SSLBL is not able to provide any ref' 'erencing malware binaries.</p>\n<div class="footer">Copyright © 20' '16 - sslbl.abuse.ch</div>\n</div>\n</body>\n</html>\n') binaries_page = ( '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http:/' '/www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n<html xmlns="http:' '//www.w3.org/1999/xhtml">\n<head>\n' '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />' '\n<meta name="robots" content="all" />\n' '<meta name="description" content="The SSL Blacklist is a collection of' ' SHA1 fingerprints of malicious SSL certificates that are being used b' 'y specific botnet C&C channels to control infected computers" />\n<' 'meta name="keywords" content="SSL, blacklist, blocklist, database, fin' 'gerprint, sha1, suricata, ids, ips, intrusion detection, prevention, s' 'nort" />\n<link href="/css/layout.css" rel="stylesheet" type="text/css"' ' />\n<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico" ' '/>\n<script type="text/javascript" src="/js/sorttable.js"></script>\n<ti' 'tle>SSL Blacklist :: Blacklist</title></head>\n<body>\n<div class="MainC' 'ontainer">\n<div class="Header"></div>\n<div class="navigation"><a href=' '"/" target="_parent" title="SSL Blacklist Home">Home</a> | <a href="/b' 'lacklist/" target="_parent" title="SSL Blacklist">SSL Blacklist</a> | ' '<a href="http://www.abuse.ch/?page_id=4727" target="_blank" title="Con' 'tact abuse.ch">Contact</a></div>\n<h1>SSL Certificate Information</h1>' '\n<table class="tlstable">\n<tr bgcolor="#ffffff"><th>Subject Common Nam' 'e:</th><td>localhost</td></tr>\n<tr bgcolor="#D8D8D8"><th>Subject:</th>' '<td>C=GB, ST=Yorks, L=York, O=MyCompany Ltd., OU=IT, CN=localhost</td>' '</tr>\n<tr bgcolor="#ffffff"><th>Issuer Common Name:</th><td>localhost<' '/td></tr>\n<tr bgcolor="#D8D8D8"><th>Issuer:</th><td>C=GB, ST=Yorks, L=' 'York, O=MyCompany Ltd., OU=IT, CN=localhost</td></tr>\n<tr bgcolor="#ff' 'ffff"><th>SSL Version:</th><td>TLSv1</td></tr>\n<tr bgcolor="#D8D8D8"><' 'th>Fingerprint (SHA1):</th><td>5fcb5b418f779a542b7148f2ddea21149578773' '3</td></tr>\n<tr bgcolor="red"><th>Status:</th><td><strong>Blacklisted<' '/strong> (Reason: ZeuS C&C, Listing date: 2016-10-17 11:52:40)</td' '></tr>\n</table>\n<br /><h2>Associated malware binaries</h2>\n<table clas' 's="sortable">\n<tr><th>Timestamp (UTC)</th><th>Malware binary (MD5 hash' ')</th><th>DstIP</th><th>DstPort</th></tr>\n<tr bgcolor="#D8D8D8">' '<td>2016-10-13 16:27:10</td><td>76b609dac79' 'e76fe7b5a78af35c5a2d6</td><td>52.77.110.77</td><td>443</td></tr>\n' '<tr bgcolor="#ffffff"><td>2016-10-10 17:29:57</td><td>9096210f20753c83637' '8ca7aa18c3d25</td><td>52.77.110.77</td><td>443</td></tr>' '</table>\n<p># of referencing malware binaries: <strong>4</strong>' '</p>\n<div class="footer">Copyright © 2016 - sslbl.ab' 'use.ch</div>\n</div>\n</body>\n</html>\n') updated_page = ( '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http:/' '/www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n<html xmlns="http:' '//www.w3.org/1999/xhtml">\n<head>\n' '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />' '\n<meta name="robots" content="all" />\n' '<meta name="description" content="The SSL Blacklist is a collection of' ' SHA1 fingerprints of malicious SSL certificates that are being used b' 'y specific botnet C&C channels to control infected computers" />\n<' 'meta name="keywords" content="SSL, blacklist, blocklist, database, fin' 'gerprint, sha1, suricata, ids, ips, intrusion detection, prevention, s' 'nort" />\n<link href="/css/layout.css" rel="stylesheet" type="text/css"' ' />\n<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico" ' '/>\n<script type="text/javascript" src="/js/sorttable.js"></script>\n<ti' 'tle>SSL Blacklist :: Blacklist</title></head>\n<body>\n<div class="MainC' 'ontainer">\n<div class="Header"></div>\n<div class="navigation"><a href=' '"/" target="_parent" title="SSL Blacklist Home">Home</a> | <a href="/b' 'lacklist/" target="_parent" title="SSL Blacklist">SSL Blacklist</a> | ' '<a href="http://www.abuse.ch/?page_id=4727" target="_blank" title="Con' 'tact abuse.ch">Contact</a></div>\n<h1>SSL Certificate Information</h1>' '\n<table class="tlstable">\n<tr bgcolor="#ffffff"><th>Subject Common Nam' 'e:</th><td>localhost</td></tr>\n<tr bgcolor="#D8D8D8"><th>Subject:</th>' '<td>C=GB, ST=Yorks, L=York, O=MyCompany Ltd., OU=IT, CN=localhost</td>' '</tr>\n<tr bgcolor="#ffffff"><th>Issuer Common Name:</th><td>localhost<' '/td></tr>\n<tr bgcolor="#D8D8D8"><th>Issuer:</th><td>C=GB, ST=Yorks, L=' 'York, O=MyCompany Ltd., OU=IT, CN=localhost</td></tr>\n<tr bgcolor="#ff' 'ffff"><th>SSL Version:</th><td>TLSv1</td></tr>\n<tr bgcolor="#D8D8D8"><' 'th>Fingerprint (SHA1):</th><td>e03e335629b882f1f03f091123511eaa3fc2d6b1' '</td></tr>\n<tr bgcolor="red"><th>Status:</th><td><strong>Blacklisted<' '/strong> (Reason: ZeuS C&C, Listing date: 2016-10-17 11:52:40)</td' '></tr>\n</table>\n<br /><h2>Associated malware binaries</h2>\n<table clas' 's="sortable">\n<tr><th>Timestamp (UTC)</th><th>Malware binary (MD5 hash' ')</th><th>DstIP</th><th>DstPort</th></tr>\n<tr bgcolor="#D8D8D8"><td>20' '16-10-13 16:27:10</td><td>76b609dac79e76fe7b5a78af35c5a2d6</td><td>52.' '77.110.77</td><td>443</td></tr>\n<tr bgcolor="#ffffff"><td>2016-10-10 1' '7:29:57</td><td>9096210f20753c836378ca7aa18c3d25</td><td>52.77.110.77<' '/td><td>443</td></tr>\n<tr bgcolor="#D8D8D8"><td>2016-10-07 19:55:38</t' 'd><td>d9e83ed20a652e7629b753e20336f7a4</td><td>52.77.110.77</td><td>44' '3</td></tr>\n</table>\n<p># of referencing malware binaries: <strong>3</' 'strong></p>\n<div class="footer">Copyright © 2016 - sslbl.abuse.ch' '</div>\n</div>\n</body>\n</html>\n') not_updated_page = ( '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http:/' '/www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n<html xmlns="http:' '//www.w3.org/1999/xhtml">\n<head>\n' '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />' '\n<meta name="robots" content="all" />\n' '<meta name="description" content="The SSL Blacklist is a collection of' ' SHA1 fingerprints of malicious SSL certificates that are being used b' 'y specific botnet C&C channels to control infected computers" />\n<' 'meta name="keywords" content="SSL, blacklist, blocklist, database, fin' 'gerprint, sha1, suricata, ids, ips, intrusion detection, prevention, s' 'nort" />\n<link href="/css/layout.css" rel="stylesheet" type="text/css"' ' />\n<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico" ' '/>\n<script type="text/javascript" src="/js/sorttable.js"></script>\n<ti' 'tle>SSL Blacklist :: Blacklist</title></head>\n<body>\n<div class="MainC' 'ontainer">\n<div class="Header"></div>\n<div class="navigation"><a href=' '"/" target="_parent" title="SSL Blacklist Home">Home</a> | <a href="/b' 'lacklist/" target="_parent" title="SSL Blacklist">SSL Blacklist</a> | ' '<a href="http://www.abuse.ch/?page_id=4727" target="_blank" title="Con' 'tact abuse.ch">Contact</a></div>\n<h1>SSL Certificate Information</h1>' '\n<table class="tlstable">\n<tr bgcolor="#ffffff"><th>Subject Common Nam' 'e:</th><td>C=US, ST=Denial, L=Springfield, O=Dis</td></tr>\n<tr bgcolor' '="#D8D8D8"><th>Subject:</th><td>C=US, ST=Denial, L=Springfield, O=Dis<' '/td></tr>\n<tr bgcolor="#ffffff"><th>Issuer Common Name:</th><td>C=US, ' 'ST=Denial, L=Springfield, O=Dis</td></tr>\n<tr bgcolor="#D8D8D8"><th>Is' 'suer:</th><td>C=US, ST=Denial, L=Springfield, O=Dis</td></tr>\n<tr bgco' 'lor="#ffffff"><th>SSL Version:</th><td>TLS 1.2</td></tr>\n<tr bgcolor="' '#D8D8D8"><th>Fingerprint (SHA1):</th><td>dcbe920e3d0cba40be80fba5e23a6' 'b4f9a706dd4</td></tr>\n<tr bgcolor="red"><th>Status:</th><td><strong>Bl' 'acklisted</strong> (Reason: TorrentLocker C&C, Listing date: 2016-' '10-07 04:51:52)</td></tr>\n</table>\n<br /><h2>Associated malware binari' 'es</h2>\n<table class="sortable">\n<tr><th>Timestamp (UTC)</th><th>Malwa' 're binary (MD5 hash)</th><th>DstIP</th><th>DstPort</th></tr>\n<tr bgcol' 'or="#D8D8D8"><td>2016-10-06 15:32:44</td><td>cedb27c0621a42ca3da0b0a01' '2e2ac43</td><td>46.38.52.233</td><td>443</td></tr>\n</table>\n<p># of re' 'ferencing malware binaries: <strong>4</strong></p>\n<div class="footer"' '>Copyright © 2016 - sslbl.abuse.ch</div>\n</div>\n</body>\n</html>' '\n') states = [ { "https://sslbl.abuse.ch/intel/e03e335629b882f1f03f091123511eaa3fc2d6b1": { "subject": "C=GB, ST=Yorks, L=York, O=MyCompany Ltd., OU=IT, CN=localhost", "issuer": "C=GB, ST=Yorks, L=York, O=MyCompany Ltd., OU=IT, CN=localhost", "fingerprint": "e03e335629b882f1f03f091123511eaa3fc2d6b1", "name": "ZeuS C&C", "timestamp": "2016-10-17 11:52:40", "binaries": [ ( "2016-10-13 16:27:10", "76b609dac79e76fe7b5a78af35c5a2d6", "52.77.110.77", "443", ), ( "2016-10-10 17:29:57", "9096210f20753c836378ca7aa18c3d25", "52.77.110.77", "443", ), ], }, }, { "https://sslbl.abuse.ch/intel/dcbe920e3d0cba40be80fba5e23a6b4f9a706dd4": { "subject": "C=US, ST=Denial, L=Springfield, O=Dis", "issuer": "C=US, ST=Denial, L=Springfield, O=Dis", "fingerprint": "dcbe920e3d0cba40be80fba5e23a6b4f9a706dd4", "name": "TorrentLocker C&C", "timestamp": "2016-10-07 04:51:52", "binaries": [ ( "2016-10-06 15:32:44", "cedb27c0621a42ca3da0b0a012e2ac43", "46.38.52.233", "443", ), ], }, }, ] params = [ # 1st case: detail page does not contain binaries table param( rss=regular_rss[0], page=detail_page, state=None, result=results[0], ).label('no_binaries'), # 2nd case: detail page with binaries table param( rss=regular_rss[1], page=binaries_page, state=None, result=results[1], ).label('binaries'), # 3rd case: invalid RSS, no URL, no new data param( rss=invalid_rss[0], page=None, state=None, result=None, ).label('no_url'), # 4th case: detail page contains one more binary record, # comparing to data saved during last collector's "run" param( rss=regular_rss[2], page=updated_page, state=states[0], result=results[2], ).label('updated_page'), # 5th case: no new items, do not publish param( rss=regular_rss[3], page=not_updated_page, state=states[1], result=None, ).label('not_updated_page') ] mocked_config = { 'url': mock.sentinel.dummy_url, } @foreach(params) def test__get_output_data_body(self, rss, page, state, result, label): with mock.patch('n6.collectors.generic.CollectorWithStateMixin.__init__'), \ mock.patch.object(self.COLLECTOR_CLASS, 'config', self.mocked_config, create=True): instance = self.COLLECTOR_CLASS() instance._download_retry = mock.Mock(return_value=rss) instance._download_retry_external = mock.Mock(return_value=page) instance.load_state = mock.Mock(return_value=state) if label in ('no_url', 'not_updated_page'): with self.assertRaises(NoNewDataException): self.COLLECTOR_CLASS.get_output_data_body(instance) else: output_data_body = self.COLLECTOR_CLASS.get_output_data_body( instance) self.assertDictEqual(loads(output_data_body), result)
class Test__get_amqp_connection_params_dict(unittest.TestCase, TestCaseMixin): def setUp(self): self.ConfigMock = self.patch('n6lib.config.Config') self.patch('n6lib.amqp_helpers.pika.credentials.ExternalCredentials', return_value=sen.ExternalCredentials) @foreach([ param( given_args=[], expected_rabbitmq_config_section='rabbitmq', ), param( given_args=['particular_section'], expected_rabbitmq_config_section='particular_section', ), ]) @foreach([ param( conf_section_content={ 'host': 'debian', 'port': 5672, 'ssl': 0, 'heartbeat_interval': 30, }, expected_result={ 'host': 'debian', 'port': 5672, 'ssl': 0, 'ssl_options': {}, 'heartbeat_interval': 30, }, ), param( conf_section_content={ 'host': 'debian', 'port': 5672, 'ssl': 1, 'ssl_ca_certs': '/cert/testca/cacert.pem', 'ssl_certfile': '/cert/client/cert.pem', 'ssl_keyfile': '/cert/client/key.pem', 'heartbeat_interval': 30, }, expected_result={ 'host': 'debian', 'port': 5672, 'ssl': 1, 'ssl_options': { 'ca_certs': '/cert/testca/cacert.pem', 'certfile': '/cert/client/cert.pem', 'keyfile': '/cert/client/key.pem', 'cert_reqs': ssl.CERT_REQUIRED, }, 'credentials': sen.ExternalCredentials, 'heartbeat_interval': 30, }, ), ]) def test(self, given_args, conf_section_content, expected_rabbitmq_config_section, expected_result): self.ConfigMock.section.return_value = ConfigSection( '<irrelevant for these tests>', conf_section_content) expected_rabbitmq_config_spec = RABBITMQ_CONFIG_SPEC_PATTERN.format( rabbitmq_config_section=expected_rabbitmq_config_section) result = get_amqp_connection_params_dict(*given_args) self.assertEqual(self.ConfigMock.mock_calls, [ call.section(expected_rabbitmq_config_spec), ]) self.assertEqual(result, expected_result)
def _get_arguments(): yield param(access_zone='inside') yield param(access_zone='threats') yield param(access_zone='search')