Ejemplo n.º 1
0
 def test_time_related_components_of_generated_queries(
         self, given_time_constraints_items, expected_query_expr_reprs):
     mock = MagicMock()
     meth = MethodProxy(
         _EventsQueryProcessor,
         mock,
         class_attrs=[
             # for them, the actual members/methods (not mocks) will be used
             # * class constants:
             'queried_model_class',
             'client_asoc_model_class',
             # * methods:
             '_prepare_result_production_tools',
             '_fetch_rows_from_db',
             '_time_comparisons_per_step',
             '_fetch_rows_for_single_step',
             '_build_query_base_for_single_step',
             '_build_actual_query',
         ])
     actual_query_expr_reprs = []
     and_mock = self._make_mock_of_sqlalchemy_and(actual_query_expr_reprs)
     mock._query_base = self._make_mock_of_query_base(
         actual_query_expr_reprs)
     mock._day_step = 1
     mock._opt_limit = None
     mock._time_constraints = (
         given_time_constraints_items['time.min'],
         given_time_constraints_items.get('time.max'),
         given_time_constraints_items.get('time.until'))
     with patch('n6lib.data_backend_api.and_', and_mock), \
          patch('n6lib.data_backend_api.utcnow', return_value=self._UTCNOW):
         list(meth.generate_query_results())
     self.assertEqual(actual_query_expr_reprs, expected_query_expr_reprs)
Ejemplo n.º 2
0
 def test_time_related_components_of_generated_queries(
         self, given_params, expected_query_expr_reprs):
     mock = MagicMock()
     meth = MethodProxy(
         _QueryProcessor,
         mock,
         class_attrs=[
             # for them, the actual members/methods (not mocks) will be used
             # class constants:
             'queried_model_class',
             'client_relationship',
             'client_asoc_model_class',
             # methods:
             'pop_time_min_max_until',
             'pop_limit',
             'make_time_cmp_generator',
             'query__ordering_by',
             'query__limit',
         ])
     actual_query_expr_reprs = []
     and_mock = self._make_the_and_mock(actual_query_expr_reprs)
     query_mock = self._make_the_query_mock(actual_query_expr_reprs)
     mock.build_query.return_value = query_mock
     with patch('n6lib.data_backend_api.and_', and_mock), \
          patch('n6lib.data_backend_api.utcnow', return_value=self._UTCNOW):
         list(
             meth.generate_query_results(given_params,
                                         item_number_limit=None,
                                         day_step=1))
     self.assertEqual(actual_query_expr_reprs, expected_query_expr_reprs)
Ejemplo n.º 3
0
 def setUp(self):
     self.cleaned_result_dict = {
         'category': 'bots',
         'source': 'hidden.42',
     }
     self.mock = MagicMock(__class__=Anonymizer)
     self.meth = MethodProxy(Anonymizer, self.mock, 'OUTPUT_RK_PATTERN')
Ejemplo n.º 4
0
    def setUp(self):
        self.mock = MagicMock(__class__=Anonymizer)
        self.meth = MethodProxy(Anonymizer, self.mock)

        self.mock.data_spec = N6DataSpec()
        self.mock.auth_api.get_anonymized_source_mapping.return_value = {
            'forward_mapping': self.forward_source_mapping,
        }
        self.mock.auth_api.get_dip_anonymization_disabled_source_ids.return_value = frozenset()
Ejemplo n.º 5
0
class TestSSLUserAuthenticationPolicy(unittest.TestCase):

    def setUp(self):
        self.mock = Mock(__class__=SSLUserAuthenticationPolicy)
        self.meth = MethodProxy(SSLUserAuthenticationPolicy, self.mock, [
            'merge_orgid_userid',
        ])

    def test__unauthenticated_userid__ok(self):
        request = MagicMock()
        request.environ = {'SSL_CLIENT_S_DN_O': 'OrgName1', 'SSL_CLIENT_S_DN_CN': 'UserName1'}
        result = self.meth.unauthenticated_userid(request)
        expected_result = 'OrgName1,UserName1'
        self.assertEqual(result, expected_result)

    def test__unauthenticated_userid__user_id_is_None(self):
        request = MagicMock()
        request.environ = {'SSL_CLIENT_S_DN_O': 'testorgname2', 'SSL_CLIENT_S_DN_CN': None}
        result = self.meth.unauthenticated_userid(request)
        self.assertIsNone(result)

    def test__unauthenticated_userid__org_id_is_None(self):
        request = MagicMock()
        request.environ = {'SSL_CLIENT_S_DN_O': None, 'SSL_CLIENT_S_DN_CN': 'testusername3'}
        result = self.meth.unauthenticated_userid(request)
        self.assertIsNone(result)

    @patch('n6lib.pyramid_commons._pyramid_commons.LOGGER.warning')
    def test__unauthenticated_userid__comma_in_user_id(self, LOGGER_warning_mock):
        request = MagicMock()
        request.environ = {'SSL_CLIENT_S_DN_O': 'orgname4', 'SSL_CLIENT_S_DN_CN': 'user,name4'}
        result = self.meth.unauthenticated_userid(request)
        self.assertIsNone(result)
        self.assertEqual(LOGGER_warning_mock.mock_calls,
                         [call('Comma in user_id %r.', 'user,name4')])

    @patch('n6lib.pyramid_commons._pyramid_commons.LOGGER.warning')
    def test__unauthenticated_userid__comma_in_org_id(self, LOGGER_warning_mock):
        request = MagicMock()
        request.environ = {'SSL_CLIENT_S_DN_O': 'orgname,5', 'SSL_CLIENT_S_DN_CN': 'username5'}
        result = self.meth.unauthenticated_userid(request)
        self.assertIsNone(result)
        self.assertEqual(LOGGER_warning_mock.mock_calls,
                         [call('Comma in org_id %r.', 'orgname,5')])

    def test_other_important_methods_are_from_BaseUserAuthenticationPolicy(self):
        self.assertIs(SSLUserAuthenticationPolicy.get_auth_data,
                      BaseUserAuthenticationPolicy.get_auth_data)
        self.assertIs(SSLUserAuthenticationPolicy.authenticated_userid.__func__,
                      BaseUserAuthenticationPolicy.authenticated_userid.__func__)
        self.assertIs(SSLUserAuthenticationPolicy.effective_principals.__func__,
                      BaseUserAuthenticationPolicy.effective_principals.__func__)
Ejemplo n.º 6
0
 def test(self,
          raw_result_dict,
          expected_result,
          filtering_params=None,
          url_normalization_data_cache=None):
     mock = MagicMock()
     meth = MethodProxy(_EventsQueryProcessor, mock)
     mock._filtering_params = (copy.deepcopy(filtering_params)
                               if filtering_params is not None else {})
     mock._url_normalization_data_cache = (url_normalization_data_cache
                                           if url_normalization_data_cache
                                           is not None else {})
     raw_result_dict = copy.deepcopy(raw_result_dict)
     actual_result = meth._preprocess_result_dict(raw_result_dict)
     self.assertEqualIncludingTypes(actual_result, expected_result)
Ejemplo n.º 7
0
    def test__get_redirect_url_if_no_time_min(self, request_query_string,
                                              cleaned_param_dict,
                                              expected_redirect_url):
        mock = MagicMock()
        mock.request.path_url = self.REQUEST_PATH_URL
        mock.request.query_string = request_query_string
        meth = MethodProxy(
            RestAPIViewBase,
            mock,
            class_attrs=['SAFE_ACTIVE_MIN_DELTA', '_get_redirect_url'])

        with patch('n6web.utcnow', return_value=self.UTC_NOW):
            actual_redirect_url = meth.get_redirect_url_if_no_time_min(
                cleaned_param_dict, self.DEFAULT_DELTA)

        self.assertEqual(actual_redirect_url, expected_redirect_url)
Ejemplo n.º 8
0
    def setUp(self):
        self.event_type = 'bl-update'
        self.event_data = {'some...': 'content...', 'id': 'some id...'}
        self.routing_key = self.event_type + '.filtered.*.*'
        self.body = json.dumps(self.event_data)
        self.resource_to_org_ids = {}

        self.mock = MagicMock(__class__=Anonymizer)
        self.meth = MethodProxy(Anonymizer, self.mock, '_process_input')

        self.mock._get_resource_to_org_ids.return_value = self.resource_to_org_ids
        self.mock._get_result_dicts_and_output_body.return_value = (
            sen.raw_result_dict,
            sen.cleaned_result_dict,
            sen.output_body,
        )
Ejemplo n.º 9
0
 def test(self):
     mock = MagicMock()
     meth = MethodProxy(_QueryProcessor, mock)
     params = {
         'name': 'foo-bar',
         'opt.primary': True,
         'time.min': [dt(2015, 1, 4)],
         'time.until': [dt(2015, 1, 5)],
     }
     expected_params = {
         'name': 'foo-bar',
         'time.min': [dt(2015, 1, 4)],
         'time.until': [dt(2015, 1, 5)],
     }
     meth.delete_opt_prefixed_params(params)
     self.assertEqual(params, expected_params)
     self.assertEqual(mock.mock_calls, [])
Ejemplo n.º 10
0
 def setUp(self):
     self.mock = MagicMock(__class__=Anonymizer)
     self.meth = MethodProxy(Anonymizer, self.mock, '_VALID_EVENT_TYPES')
Ejemplo n.º 11
0
 def setUp(self):
     self.mock = Mock(__class__=AMQPThreadedPusher)
     self.mock.DEFAULT_PROP_KWARGS = AMQPThreadedPusher.DEFAULT_PROP_KWARGS
     self.meth = MethodProxy(AMQPThreadedPusher, self.mock)
Ejemplo n.º 12
0
class TestAMQPThreadedPusher__init__repr(unittest.TestCase):
    def setUp(self):
        self.mock = Mock(__class__=AMQPThreadedPusher)
        self.mock.DEFAULT_PROP_KWARGS = AMQPThreadedPusher.DEFAULT_PROP_KWARGS
        self.meth = MethodProxy(AMQPThreadedPusher, self.mock)

    def test__init__using_defaults__no_ssl(self):
        connection_params_dict_mock = RLockedMagicMock()
        connection_params_dict_mock.get.return_value = False
        self.meth.__init__(connection_params_dict=connection_params_dict_mock,
                           exchange='my-exchange')
        # attrs
        self.assertIs(self.mock._connection_params_dict,
                      connection_params_dict_mock)
        self.assertEqual(self.mock._exchange, {'exchange': 'my-exchange'})
        self.assertEqual(self.mock._exchange_name, 'my-exchange')
        self.assertEqual(self.mock._queues_to_declare, [])
        self.assertIsNone(self.mock._serialize)
        self.assertEqual(self.mock._prop_kwargs,
                         AMQPThreadedPusher.DEFAULT_PROP_KWARGS)
        self.assertEqual(self.mock._mandatory, False)
        self.assertIs(self.mock._output_fifo.__class__, queue.Queue)
        self.assertEqual(self.mock._output_fifo.maxsize, 20000)
        self.assertIsNone(self.mock._error_callback)
        # calls
        self.assertEqual(self.mock.mock_calls, [
            call._setup_communication(),
            call._start_publishing(),
        ])
        self.assertEqual(
            connection_params_dict_mock.mock_calls,
            [
                call.get('ssl'),
                ('__contains__', ('client_properties', ),
                 {}),  # because cannot use `call.__contains__`
                call.__setitem__('client_properties', ANY),
            ])
        self.assertIsInstance(
            # 2nd argument passed to __setitem__()
            connection_params_dict_mock.__setitem__.mock_calls[0][-2][1],
            dict)

    def test__init__specifying_all__with_ssl(self):
        connection_params_dict_mock = RLockedMagicMock()
        connection_params_dict_mock.get.return_value = True
        connection_params_dict_mock.__contains__.return_value = True
        self.meth.__init__(connection_params_dict=connection_params_dict_mock,
                           exchange={
                               'exchange': sen.exchange,
                               'foo': sen.foo
                           },
                           queues_to_declare=sen.queues_to_declare,
                           serialize=sen.serialize,
                           prop_kwargs=sen.prop_kwargs,
                           mandatory=sen.mandatory,
                           output_fifo_max_size=12345,
                           error_callback=sen.error_callback)
        # attrs
        self.assertIs(self.mock._connection_params_dict,
                      connection_params_dict_mock)
        self.assertEqual(self.mock._exchange, {
            'exchange': sen.exchange,
            'foo': sen.foo
        })
        self.assertEqual(self.mock._exchange_name, sen.exchange)
        self.assertEqual(self.mock._queues_to_declare, [
            {
                'queue': sen.queues_to_declare,
                'callback': ANY
            },
        ])
        self.assertEqual(self.mock._serialize, sen.serialize)
        self.assertEqual(self.mock._prop_kwargs, sen.prop_kwargs)
        self.assertEqual(self.mock._mandatory, sen.mandatory)
        self.assertIs(self.mock._output_fifo.__class__, queue.Queue)
        self.assertEqual(self.mock._output_fifo.maxsize, 12345)
        self.assertEqual(self.mock._error_callback, sen.error_callback)
        # calls
        self.assertEqual(self.mock.mock_calls, [
            call._setup_communication(),
            call._start_publishing(),
        ])
        self.assertEqual(
            connection_params_dict_mock.mock_calls,
            [
                call.get('ssl'),
                call.setdefault('credentials', ANY),
                ('__contains__', ('client_properties', ),
                 {}),  # because cannot use `call.__contains__`
            ])
        self.assertIsInstance(
            # 2nd argument passed to setdefault()
            connection_params_dict_mock.setdefault.mock_calls[0][-2][1],
            pika.credentials.ExternalCredentials)

    def test__init__specifying_all_and_obtaining_global_conn_params__with_ssl(
            self):
        connection_params_dict_mock = RLockedMagicMock(
            name='connection_params_dict')
        connection_params_dict_mock.get.return_value = True
        connection_params_dict_mock.__contains__.return_value = True
        with rlocked_patch(
                'n6lib.amqp_getters_pushers.get_amqp_connection_params_dict',
                **{'return_value':
                   connection_params_dict_mock}) as get_amqp_conn_params_mock:
            self.meth.__init__(connection_params_dict=None,
                               exchange={
                                   'exchange': sen.exchange,
                                   'bar': sen.bar
                               },
                               queues_to_declare=[
                                   sen.queue1,
                                   {
                                       'blabla': sen.blabla
                                   },
                                   {
                                       'blabla': sen.blabla,
                                       'callback': sen.callback
                                   },
                               ],
                               serialize=sen.serialize,
                               prop_kwargs=sen.prop_kwargs,
                               mandatory=sen.mandatory,
                               output_fifo_max_size=54321,
                               error_callback=sen.error_callback)
        # attrs
        self.assertIs(self.mock._connection_params_dict,
                      connection_params_dict_mock)
        self.assertEqual(self.mock._exchange, {
            'exchange': sen.exchange,
            'bar': sen.bar
        })
        self.assertEqual(self.mock._exchange_name, sen.exchange)
        self.assertEqual(self.mock._queues_to_declare, [
            {
                'queue': sen.queue1,
                'callback': ANY
            },
            {
                'blabla': sen.blabla,
                'callback': ANY
            },
            {
                'blabla': sen.blabla,
                'callback': sen.callback
            },
        ])
        self.assertEqual(self.mock._serialize, sen.serialize)
        self.assertEqual(self.mock._prop_kwargs, sen.prop_kwargs)
        self.assertEqual(self.mock._mandatory, sen.mandatory)
        self.assertIs(self.mock._output_fifo.__class__, queue.Queue)
        self.assertEqual(self.mock._output_fifo.maxsize, 54321)
        self.assertEqual(self.mock._error_callback, sen.error_callback)
        # calls
        self.assertEqual(self.mock.mock_calls, [
            call._setup_communication(),
            call._start_publishing(),
        ])
        self.assertEqual(get_amqp_conn_params_mock.mock_calls, [
            call(),
        ])
        self.assertEqual(
            connection_params_dict_mock.mock_calls,
            [
                call.get('ssl'),
                call.setdefault('credentials', ANY),
                ('__contains__', ('client_properties', ),
                 {}),  # because cannot use `call.__contains__`
            ])
        self.assertIsInstance(
            connection_params_dict_mock.setdefault.mock_calls[0][-2]
            [1],  # 2nd arg for setdefault
            pika.credentials.ExternalCredentials)

    def test__repr(self):
        string_repr = self.meth.__repr__()
        self.assertIs(type(string_repr), str)
        self.assertRegex(
            string_repr, r'<AMQPThreadedPusher object at 0x[0-9a-f]+ with .*>')
Ejemplo n.º 13
0
class TestAnonymizer___publish_output_data(TestCaseMixin, unittest.TestCase):
    def setUp(self):
        self.cleaned_result_dict = {
            'category': 'bots',
            'source': 'hidden.42',
        }
        self.mock = MagicMock(__class__=Anonymizer)
        self.meth = MethodProxy(Anonymizer, self.mock, 'OUTPUT_RK_PATTERN')

    @foreach(
        param(
            resource_to_org_ids={
                'inside': ['o2', 'o3'],
                'threats': ['o3', 'o5', 'o8'],
            },
            expected_publish_output_calls=[
                call(
                    routing_key='inside.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o3'
                    }},
                ),
                call(
                    routing_key='inside.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o2'
                    }},
                ),
                call(
                    routing_key='threats.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o8'
                    }},
                ),
                call(
                    routing_key='threats.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o5'
                    }},
                ),
                call(
                    routing_key='threats.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o3'
                    }},
                ),
            ],
        ).label('for both resources'),
        param(
            resource_to_org_ids={
                'inside': ['o2', 'o3'],
                'threats': [],
            },
            expected_publish_output_calls=[
                call(
                    routing_key='inside.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o3'
                    }},
                ),
                call(
                    routing_key='inside.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o2'
                    }},
                ),
            ],
        ).label('for "inside" only'),
        param(
            resource_to_org_ids={
                'inside': [],
                'threats': ['o3', 'o5', 'o8'],
            },
            expected_publish_output_calls=[
                call(
                    routing_key='threats.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o8'
                    }},
                ),
                call(
                    routing_key='threats.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o5'
                    }},
                ),
                call(
                    routing_key='threats.bots.hidden.42',
                    body=sen.output_body,
                    prop_kwargs={'headers': {
                        'n6-client-id': 'o3'
                    }},
                ),
            ],
        ).label('for "threats" only'),
        param(
            resource_to_org_ids={
                'inside': [],
                'threats': [],
            },
            expected_publish_output_calls=[],
        ).label('for no resources'),
    )
    def test_normal(self, resource_to_org_ids, expected_publish_output_calls):
        with patch('n6.utils.anonymizer.LOGGER') as LOGGER_mock:
            self.meth._publish_output_data(sen.event_type, resource_to_org_ids,
                                           sen.raw_result_dict,
                                           self.cleaned_result_dict,
                                           sen.output_body)

        self.assertEqual(self.mock.publish_output.mock_calls,
                         expected_publish_output_calls)
        self.assertFalse(LOGGER_mock.error.mock_calls)

    def test_error(self):
        resource_to_org_ids = {
            'inside': ['o2', 'o3'],
            'threats': ['o3', 'o5', 'o8'],
        }
        expected_publish_output_calls = [
            call(
                routing_key='inside.bots.hidden.42',
                body=sen.output_body,
                prop_kwargs={'headers': {
                    'n6-client-id': 'o3'
                }},
            ),
            call(
                routing_key='inside.bots.hidden.42',
                body=sen.output_body,
                prop_kwargs={'headers': {
                    'n6-client-id': 'o2'
                }},
            ),
            call(
                routing_key='threats.bots.hidden.42',
                body=sen.output_body,
                prop_kwargs={'headers': {
                    'n6-client-id': 'o8'
                }},
            ),
        ]
        exc_type = ZeroDivisionError  # (just an example exception class)
        self.mock.publish_output.side_effect = [
            None,
            None,
            exc_type,
        ]

        with patch('n6.utils.anonymizer.LOGGER') as LOGGER_mock, \
             self.assertRaises(exc_type):
            self.meth._publish_output_data(sen.event_type, resource_to_org_ids,
                                           sen.raw_result_dict,
                                           self.cleaned_result_dict,
                                           sen.output_body)

        self.assertEqual(self.mock.publish_output.mock_calls,
                         expected_publish_output_calls)
        self.assertEqual(LOGGER_mock.error.mock_calls, [
            call(
                ANY,
                'threats',
                'o8',
                sen.event_type,
                sen.raw_result_dict,
                'threats.bots.hidden.42',
                sen.output_body,
                ("for the resource 'inside' -- "
                 "* skipped for the org ids: none; "
                 "* done for the org ids: 'o3', 'o2';  "
                 "for the resource 'threats' -- "
                 "* skipped for the org ids: 'o3', 'o5', 'o8'; "
                 "* done for the org ids: none"),
            ),
        ])
Ejemplo n.º 14
0
class TestAnonymizer___get_result_dicts_and_output_body(
        TestCaseMixin, unittest.TestCase):

    forward_source_mapping = {
        'some.source': 'hidden.42',
    }

    event_raw_base = dict(
        id=(32 * '3'),
        rid=(32 * '4'),  # (restricted - to be skipped before *value* cleaning)
        source='some.source',  # (to be anonymized)
        restriction=
        'public',  # (restricted - to be skipped before *value* cleaning)
        confidence='low',
        category='malurl',
        time='2013-07-12 11:30:00',
    )

    cleaned_base = dict(
        id=(32 * '3'),
        source='hidden.42',  # (after anonymization)
        confidence='low',
        category='malurl',
        time=datetime.datetime(2013, 7, 12, 11, 30, 00),
        type=sen.TO_BE_SET,
    )

    def setUp(self):
        self.mock = MagicMock(__class__=Anonymizer)
        self.meth = MethodProxy(Anonymizer, self.mock)

        self.mock.data_spec = N6DataSpec()
        self.mock.auth_api.get_anonymized_source_mapping.return_value = {
            'forward_mapping': self.forward_source_mapping,
        }
        self.mock.auth_api.get_dip_anonymization_disabled_source_ids.return_value = frozenset(
        )

    @foreach(
        param(
            event_type='event',
            event_data=dict(
                event_raw_base,
                client=
                [],  # (empty `client` -- to be skipped before *any* cleaning)
            ),
            expected_raw=dict(event_raw_base, ),
            expected_cleaned=dict(
                cleaned_base,
                type='event',  # (event_type value set *after* cleaning)
            ),
        ),
        param(
            event_type='event',
            event_data=dict(
                event_raw_base,
                client=['o1', 'o3', 'o2'],
                address=
                [],  # (empty `address` -- to be skipped before *any* cleaning)
                dip='192.168.0.1',
                fqdn='www.example.com',
                type=
                'foobar',  # (not a result key -- to be skipped before *any* cleaning)
                blabla=
                'foooo',  # (not a result key -- to be skipped before *any* cleaning)
                until='spamspam',
                min_amplification=4000 * 'foo bar',
                rid='xxxxx',
            ),
            expected_raw=dict(
                event_raw_base,
                client=[
                    'o1', 'o3', 'o2'
                ],  # (restricted -- to be skipped before *value* cleaning)
                dip='192.168.0.1',  # (to be anonymized -> as 'adip')
                fqdn='www.example.com',
                until=
                'spamspam',  # (restricted -- to be skipped before *value* cleaning)
                min_amplification=4000 *
                'foo bar',  # (restricted [custom]      -- as above)
                rid='xxxxx',  # (restricted [+required]   -- as above)
            ),
            expected_cleaned=dict(
                cleaned_base,
                adip='x.x.0.1',  # ('dip' value after anonymization)
                fqdn='www.example.com',
                type='event',  # (event_type value set *after* cleaning)
            ),
        ),
        param(
            event_type='bl-update',
            event_data=dict(
                event_raw_base,
                **{
                    'client':
                    [],  # (empty `client` -- to be skipped before *any* cleaning)
                    'address': [{
                        'ip': '1.2.3.4',
                        'cc': 'pl',
                        'asn': '1.1'
                    }],
                    'adip': 'x.10.20.30',
                    'dip': '192.168.0.1',
                    '_bl-series-no':
                    42,  # (not a result field -- to be skipped before *any* cleaning)
                    'type':
                    'barfoo',  # (not a result field -- to be skipped before *any* cleaning)
                }),
            expected_raw=dict(
                event_raw_base,
                **{
                    'address': [{
                        'ip': '1.2.3.4',
                        'cc': 'pl',
                        'asn': '1.1'
                    }],
                    'adip': 'x.10.20.30',
                    'dip':
                    '192.168.0.1',  # (to be just omitted -- 'adip' is explicitly specified)
                }),
            expected_cleaned=dict(
                cleaned_base,
                address=[{
                    'ip': '1.2.3.4',
                    'cc': 'PL',
                    'asn': 65537
                }],
                adip='x.10.20.30',  # (just given 'adip')
                type='bl-update',  # (event_type value set *after* cleaning)
            ),
        ),
        # below -- the same two as above but with dip anonymization disabled
        param(
            event_type='event',
            event_data=dict(
                event_raw_base,
                client=['o1', 'o3', 'o2'],
                address=
                [],  # (empty `address` -- to be skipped before *any* cleaning)
                dip='192.168.0.1',
                fqdn='www.example.com',
                type=
                'foobar',  # (not a result key -- to be skipped before *any* cleaning)
                blabla=
                'foooo',  # (not a result key -- to be skipped before *any* cleaning)
                until='spamspam',
                min_amplification=4000 * 'foo bar',
                rid='xxxxx',
            ),
            expected_raw=dict(
                event_raw_base,
                client=[
                    'o1', 'o3', 'o2'
                ],  # (restricted -- to be skipped before *value* cleaning)
                dip='192.168.0.1',  # (to be *not* anonymized [sic])
                fqdn='www.example.com',
                until=
                'spamspam',  # (restricted -- to be skipped before *value* cleaning)
                min_amplification=4000 *
                'foo bar',  # (restricted [custom]      -- as above)
                rid='xxxxx',  # (restricted [+required]   -- as above)
            ),
            expected_cleaned=dict(
                cleaned_base,
                dip='192.168.0.1',  # (*not* anonymized [sic])
                fqdn='www.example.com',
                type='event',  # (event_type value set *after* cleaning)
            ),
            dip_anonymization_disabled_source_ids=frozenset(['some.source']),
        ),
        param(
            event_type='bl-update',
            event_data=dict(
                event_raw_base,
                **{
                    'client':
                    [],  # (empty `client` -- to be skipped before *any* cleaning)
                    'address': [{
                        'ip': '1.2.3.4',
                        'cc': 'pl',
                        'asn': '1.1'
                    }],
                    'adip': 'x.10.20.30',
                    'dip': '192.168.0.1',
                    '_bl-series-no':
                    42,  # (not a result field -- to be skipped before *any* cleaning)
                    'type':
                    'barfoo',  # (not a result field -- to be skipped before *any* cleaning)
                }),
            expected_raw=dict(
                event_raw_base, **{
                    'address': [{
                        'ip': '1.2.3.4',
                        'cc': 'pl',
                        'asn': '1.1'
                    }],
                    'adip': 'x.10.20.30',
                    'dip': '192.168.0.1',
                }),
            expected_cleaned=dict(
                cleaned_base,
                address=[{
                    'ip': '1.2.3.4',
                    'cc': 'PL',
                    'asn': 65537
                }],
                adip='x.10.20.30',  # (just given 'adip')
                dip='192.168.0.1',  # (just given 'dip' [sic])
                type='bl-update',  # (event_type value set *after* cleaning)
            ),
            dip_anonymization_disabled_source_ids=frozenset(['some.source']),
        ),
    )
    def test_normal(self,
                    event_type,
                    event_data,
                    expected_raw,
                    expected_cleaned,
                    dip_anonymization_disabled_source_ids=frozenset()):
        expected_auth_api_calls = [call.get_anonymized_source_mapping()]
        if 'dip' in event_data:
            expected_auth_api_calls.append(
                call.get_dip_anonymization_disabled_source_ids())
        self.mock.auth_api.get_dip_anonymization_disabled_source_ids.return_value = (
            dip_anonymization_disabled_source_ids)

        with patch('n6.utils.anonymizer.LOGGER') as LOGGER_mock:
            (raw_result_dict, cleaned_result_dict,
             output_body) = self.meth._get_result_dicts_and_output_body(
                 event_type, event_data, sen.resource_to_org_ids)

        self.assertEqual(raw_result_dict, expected_raw)
        self.assertEqual(cleaned_result_dict, expected_cleaned)
        self.assertEqual(json.loads(output_body),
                         self._get_expected_body_content(expected_cleaned))
        self.assertItemsEqual(self.mock.auth_api.mock_calls,
                              expected_auth_api_calls)
        self.assertFalse(LOGGER_mock.error.mock_calls)

    @staticmethod
    def _get_expected_body_content(expected_cleaned):
        formatted_time = expected_cleaned['time'].isoformat() + 'Z'
        assert formatted_time[10] == 'T' and formatted_time[-1] == 'Z'
        return dict(expected_cleaned, time=formatted_time)

    @foreach(
        param(
            event_data=dict(
                event_raw_base,
                client=['o3', 'o1', 'o2'],
            ),
            without_keys={'id'},
            exc_type=ResultKeyCleaningError,
        ).label('missing key: required and unrestricted'),
        param(
            event_data=dict(
                event_raw_base,
                client=['o3', 'o1', 'o2'],
            ),
            without_keys={'source'},
            exc_type=ResultKeyCleaningError,
        ).label('missing key: required and anonymized'),
        param(
            event_data=dict(
                event_raw_base,
                client=['o3', 'o1', 'o2'],
            ),
            without_keys={'rid'},
            exc_type=ResultKeyCleaningError,
        ).label('missing key: required and restricted'),
        param(
            event_data=dict(
                event_raw_base,
                client=['o3', 'o1', 'o2'],
                id='spam',
            ),
            exc_type=ResultValueCleaningError,
        ).label('illegal value for required and unrestricted key'),
        param(
            event_data=dict(
                event_raw_base,
                client=['o3', 'o1', 'o2'],
                fqdn='foo..bar',
            ),
            exc_type=ResultValueCleaningError,
        ).label('illegal value for optional and unrestricted key'),
        param(
            event_data=dict(
                event_raw_base,
                client=['o3', 'o1', 'o2'],
                dip='spam',
            ),
            exc_type=ResultValueCleaningError,
        ).label('illegal value for optional and anonymized-source key'),
        param(
            event_data=dict(
                event_raw_base,
                client=['o3', 'o1', 'o2'],
                adip='spam',
            ),
            exc_type=ResultValueCleaningError,
        ).label('illegal value for optional and anonymized-target key'),
    )
    def test_error(self, event_data, exc_type, without_keys=()):
        event_type = 'event'
        event_data = event_data.copy()
        for key in without_keys:
            del event_data[key]
        resource_to_org_ids = {'foo': {'bar'}, 'baz': {'spam', 'ham'}}
        with patch('n6.utils.anonymizer.LOGGER') as LOGGER_mock, \
             self.assertRaises(exc_type):
            self.meth._get_result_dicts_and_output_body(
                event_type, event_data, resource_to_org_ids)
        self.assertEqual(len(LOGGER_mock.error.mock_calls), 1)
Ejemplo n.º 15
0
class TestAnonymizer__input_callback(TestCaseMixin, unittest.TestCase):
    def setUp(self):
        self.event_type = 'bl-update'
        self.event_data = {'some...': 'content...', 'id': 'some id...'}
        self.routing_key = self.event_type + '.filtered.*.*'
        self.body = json.dumps(self.event_data)
        self.resource_to_org_ids = {}

        self.mock = MagicMock(__class__=Anonymizer)
        self.meth = MethodProxy(Anonymizer, self.mock, '_process_input')

        self.mock._get_resource_to_org_ids.return_value = self.resource_to_org_ids
        self.mock._get_result_dicts_and_output_body.return_value = (
            sen.raw_result_dict,
            sen.cleaned_result_dict,
            sen.output_body,
        )
        self.force_exit_on_any_remaining_entered_contexts_mock = self.patch(
            'n6.utils.anonymizer.force_exit_on_any_remaining_entered_contexts')

    @foreach(
        param(resource_to_org_ids_items={
            'foo': [sen.o1, sen.o2],
        }),
        param(resource_to_org_ids_items={
            'foo': [sen.o1, sen.o2],
            'bar': [],
        }),
        param(resource_to_org_ids_items={
            'foo': [],
            'bar': [sen.o3, sen.o4, sen.o5],
        }),
        param(resource_to_org_ids_items={
            'foo': [sen.o1, sen.o2],
            'bar': [sen.o3, sen.o4, sen.o5],
        }),
    )
    def test_with_some_org_ids(self, resource_to_org_ids_items):
        self.resource_to_org_ids.update(resource_to_org_ids_items)

        self.meth.input_callback(self.routing_key, self.body, sen.properties)

        self.assertEqual(
            self.force_exit_on_any_remaining_entered_contexts_mock.mock_calls,
            [
                call(self.mock.auth_api),
            ])
        self.assertEqual(self.mock.mock_calls, [
            call.setting_error_event_info(self.event_data),
            call.setting_error_event_info().__enter__(),
            call._check_event_type(self.event_type, self.event_data),
            call.auth_api.__enter__(),
            call._get_resource_to_org_ids(self.event_type, self.event_data),
            call._get_result_dicts_and_output_body(
                self.event_type, self.event_data, self.resource_to_org_ids),
            call._publish_output_data(
                self.event_type, self.resource_to_org_ids, sen.raw_result_dict,
                sen.cleaned_result_dict, sen.output_body),
            call.auth_api.__exit__(None, None, None),
            call.setting_error_event_info().__exit__(None, None, None),
        ])

    @foreach(
        param(resource_to_org_ids_items={}),
        param(resource_to_org_ids_items={
            'foo': [],
        }),
        param(resource_to_org_ids_items={
            'foo': [],
            'bar': [],
        }),
    )
    def test_without_org_ids(self, resource_to_org_ids_items):
        self.resource_to_org_ids.update(resource_to_org_ids_items)

        self.meth.input_callback(self.routing_key, self.body, sen.properties)

        self.assertEqual(
            self.force_exit_on_any_remaining_entered_contexts_mock.mock_calls,
            [
                call(self.mock.auth_api),
            ])
        self.assertEqual(self.mock.mock_calls, [
            call.setting_error_event_info(self.event_data),
            call.setting_error_event_info().__enter__(),
            call._check_event_type(self.event_type, self.event_data),
            call.auth_api.__enter__(),
            call._get_resource_to_org_ids(self.event_type, self.event_data),
            call.auth_api.__exit__(None, None, None),
            call.setting_error_event_info().__exit__(None, None, None),
        ])

    def test_with_some_error(self):
        self.resource_to_org_ids.update({
            'foo': [sen.o1, sen.o2],
            'bar': [sen.o3, sen.o4, sen.o5],
        })
        exc_type = ZeroDivisionError  # (just an example exception class)
        self.mock._get_result_dicts_and_output_body.side_effect = exc_type

        with self.assertRaises(exc_type) as exc_context:
            self.meth.input_callback(self.routing_key, self.body,
                                     sen.properties)

        self.assertEqual(
            self.force_exit_on_any_remaining_entered_contexts_mock.mock_calls,
            [
                call(self.mock.auth_api),
            ])
        self.assertEqual(self.mock.mock_calls, [
            call.setting_error_event_info(self.event_data),
            call.setting_error_event_info().__enter__(),
            call._check_event_type(self.event_type, self.event_data),
            call.auth_api.__enter__(),
            call._get_resource_to_org_ids(self.event_type, self.event_data),
            call._get_result_dicts_and_output_body(
                self.event_type, self.event_data, self.resource_to_org_ids),
            call.auth_api.__exit__(exc_type, exc_context.exception, ANY),
            call.setting_error_event_info().__exit__(
                exc_type, exc_context.exception, ANY),
        ])
Ejemplo n.º 16
0
class Test__n6NormalizedData(unittest.TestCase):

    def setUp(self):
        self.mock = MagicMock()
        self.meth = MethodProxy(n6NormalizedData, self.mock)

    def test_class_attrs(self):
        instrumented_attr_names = {
            name for name, obj in vars(n6NormalizedData).items()
            if isinstance(obj, sqlalchemy.orm.attributes.InstrumentedAttribute)}
        column_names_to_sql_reprs = {
            str(name): str(self._get_sql_repr(obj))
            for name, obj in n6NormalizedData._n6columns.items()}
        self.assertEqual(
            n6NormalizedData.__tablename__,
            'event')
        self.assertEqual(
            instrumented_attr_names, {
                'clients',

                'address',
                'ip',
                'asn',
                'cc',
                ###'ipv6',
                ###'rdns',
                ###'dir',

                'category',
                'confidence',
                'count',
                'custom',
                'dip',
                'dport',
                ###'email',
                'expires',
                'fqdn',
                ###'iban',
                'id',
                ###'injects',
                'md5',
                'modified',
                'name',
                'origin',
                ###'phone',
                'proto',
                ###'registrar',
                'replaces',
                'restriction',
                'rid',
                'sha1',
                'sha256',
                'source',
                'sport',
                'status',
                'target',
                'time',
                'until',
                'url',
                ###'url_pattern',
                ###'username',
                ###'x509fp_sha1',
            })
        self.assertEqual(
            instrumented_attr_names - {'clients'},
            set(n6NormalizedData._n6columns))
        self.assertEqual(
            column_names_to_sql_reprs, {
                'address': 'address TEXT',  # note: in actual db it is MEDIUMTEXT
                'ip': 'ip INTEGER UNSIGNED NOT NULL',
                'asn': 'asn INTEGER UNSIGNED',
                'cc': 'cc VARCHAR(2)',
                ###'ipv6': '',
                ###'rdns': '',
                ###'dir': '',

                'category': (
                    "category ENUM('amplifier','bots','backdoor','cnc',"
                    "'deface','dns-query','dos-attacker','dos-victim','flow',"
                    "'flow-anomaly','fraud','leak','malurl','malware-action','other','phish',"
                    "'proxy','sandbox-url','scam','scanning','server-exploit','spam',"
                    "'spam-url','tor','vulnerable','webinject') NOT NULL"),
                'confidence': "confidence ENUM('low','medium','high') NOT NULL",
                'count': 'count SMALLINT',
                'custom': 'custom TEXT',  # note: in actual db it is MEDIUMTEXT
                'dip': 'dip INTEGER UNSIGNED',
                'dport': 'dport INTEGER',
                ###'email': '',
                'expires': 'expires DATETIME',
                'fqdn': 'fqdn VARCHAR(255)',
                ###'iban': '',
                'id': 'id BINARY(16) NOT NULL',
                ###'injects': '',
                'md5': 'md5 BINARY(16)',
                'modified': 'modified DATETIME',
                'name': 'name VARCHAR(255)',
                'origin': (
                    "origin ENUM('c2','dropzone','proxy','p2p-crawler',"
                    "'p2p-drone','sinkhole','sandbox','honeypot',"
                    "'darknet','av','ids','waf')"),
                ###'phone': '',
                'proto': "proto ENUM('tcp','udp','icmp')",
                ###'registrar': '',
                'replaces': 'replaces BINARY(16)',
                'restriction': "restriction ENUM('public','need-to-know','internal') NOT NULL",
                'rid': 'rid BINARY(16) NOT NULL',
                'sha1': 'sha1 BINARY(20)',
                'sha256': 'sha256 BINARY(32)',
                'source': 'source VARCHAR(32) NOT NULL',
                'sport': 'sport INTEGER',
                'status': "status ENUM('active','delisted','expired','replaced')",
                'target': 'target VARCHAR(100)',
                'time': 'time DATETIME NOT NULL',
                'until': 'until DATETIME',
                'url': 'url VARCHAR(2048)',
                ###'url_pattern': '',
                ###'username': '',
                ###'x509fp_sha1': '',
            })

    def _get_sql_repr(self, col):
        type_name = (
            str(col.type) if not isinstance(col.type, sqlalchemy.types.Enum)
            else 'ENUM({0})'.format(','.join(
                    "'{0}'".format(e) for e in col.type.enums)))
        r = '{0} {1}'.format(col.name, type_name)
        if isinstance(col.type, IPAddress):
            self.assertTrue(col.type.impl.mapping['mysql'].unsigned)
            r += ' UNSIGNED'
        self.assertIsInstance(col.nullable, bool)
        if not col.nullable:
            r += ' NOT NULL'
        return r

    def test_init_and_attrs_1(self):
        obj = self.obj = n6NormalizedData(
            id=sen.event_id,
            ip=sen.some_ip_addr,
            dport=sen.some_port_number,
            time='2014-04-01 01:07:42+02:00',
        )
        self.assertEqual(obj.id, sen.event_id)
        self.assertEqual(obj.ip, sen.some_ip_addr)
        self.assertEqual(obj.dport, sen.some_port_number)
        self.assertEqual(
            obj.time,
            datetime.datetime(2014, 3, 31, 23, 7, 42))

        for name in n6NormalizedData._n6columns:
            if name in ('id', 'ip', 'dport', 'time'):
                continue
            val = getattr(obj, name)
            self.assertIsNone(val)

        self.assertIsInstance(
            obj.clients,
            sqlalchemy.orm.collections.InstrumentedList)
        self.assertEqual(obj.clients, [])
        self.client1 = MagicMock()
        self.client1.client = 'c1'
        self.client2 = MagicMock()
        self.client2.client = 'c2'
        obj.clients.append(self.client2)
        obj.clients.append(self.client1)
        self.assertEqual(obj.clients, [self.client2, self.client1])

    def test_init_and_attrs_2(self):
        obj = self.obj = n6NormalizedData(
            time='2014-04-01 01:07:42+02:00',
            expires='2015-04-01 01:07:43+02:00',
            until='2015-04-01 01:07:43+02:00',
        )
        self.assertIsNone(obj.id)
        self.assertEqual(obj.ip, '0.0.0.0')  # "no IP" placeholder
        self.assertEqual(
            obj.time,
            datetime.datetime(2014, 3, 31, 23, 7, 42))
        self.assertEqual(
            obj.expires,
            datetime.datetime(2015, 3, 31, 23, 7, 43))
        ### THIS IS A PROBLEM -- TO BE SOLVED IN #3113:
        self.assertEqual(
            obj.until,
            '2015-04-01 01:07:43+02:00')

    def test__key_query(self):
        self.mock.some_key.in_.return_value = sen.result
        act_result = self.meth.key_query('some_key', sen.value)
        self.assertIs(act_result, sen.result)
        self.mock.some_key.in_.assert_called_once_with(sen.value)

    @foreach(
        param(
            key='url.sub',
            mapped_to='url',
            result=sen.or_result),
        param(
            key='fqdn.sub',
            mapped_to='fqdn',
            result=sen.or_result),
        param(
            key='fqdn.illegal',
            exc_type=KeyError),
        param(
            key='illegal',
            exc_type=KeyError),
    )
    @patch('n6lib.db_events.or_', return_value=sen.or_result)
    def test__like_query(self, or_mock, key, mapped_to=None,
                         result=None, exc_type=None, **kwargs):
        value = [
            u'val',
            u'ążź',  # (ticket #8043 - `UnicodeEncodeError: 'ascii' codec can't encode...`)
        ]
        if exc_type is None:
            assert result is not None
            getattr(self.mock, mapped_to).like.side_effect = [sen.term1, sen.term2]
            act_result = self.meth.like_query(key, value)
            self.assertIs(act_result, result)
            or_mock.assert_called_once_with(sen.term1, sen.term2)
            self.assertEqual(self.mock.mock_calls, [
                getattr(call, mapped_to).like(u'%val%'),
                getattr(call, mapped_to).like(u'%ążź%'),
            ])
        else:
            with self.assertRaises(exc_type):
                self.meth.like_query(key, value)

    @foreach(
        param(
            value=[('10.20.30.41', 24), ('10.20.30.41', 32)],
            min_max_ips=[(169090560, 169090815), (169090601, 169090601)],
            result=sen.or_result),
        param(
            value=[('10.20.30.41', 24)],
            min_max_ips=[(169090560, 169090815)],
            result=sen.or_result),
        param(
            value=[('10.20.30.441', 24), ('10.20.30.41', 32)],
            exc_type=socket.error),
        param(
            value=[('10.20.30.441', 24)],
            exc_type=socket.error),
        param(
            value=[None],
            exc_type=TypeError),
        param(
            value=('10.20.30.41', 24),
            exc_type=ValueError),
        param(
            value=None,
            exc_type=TypeError),
    )
    @patch('n6lib.db_events.and_', return_value=sen.and_result)
    @patch('n6lib.db_events.or_', return_value=sen.or_result)
    def test__ip_net_query(self, or_mock, and_mock, value=None, min_max_ips=None,
                           result=None, exc_type=None, **kwargs):
        key = MagicMock()
        key.__ne__.side_effect = (lambda k: k != 'ip.net')
        if exc_type is None:
            assert result is not None
            self.mock.ip.__ge__.side_effect = (lambda min_ip: (sen.term_ge, min_ip))
            self.mock.ip.__le__.side_effect = (lambda max_ip: (sen.term_le, max_ip))
            act_result = self.meth.ip_net_query(key, value)
            self.assertIs(act_result, result)
            or_mock.assert_called_once_with(*(len(value) * [sen.and_result]))
            self.assertEqual(
                and_mock.mock_calls,
                [call(
                    (sen.term_ge, min_ip),
                    (sen.term_le, max_ip))
                 for min_ip, max_ip in min_max_ips])
        else:
            with self.assertRaises(exc_type):
                self.meth.ip_net_query(key, value)
        # the only operation on the key was one unequality test (against 'ip.net')
        key.__ne__.assert_called_once_with('ip.net')

    @foreach(
        param(key='active.min', cmp_meth_name='__ge__'),
        param(key='active.max', cmp_meth_name='__le__'),
        param(key='active.until', cmp_meth_name='__lt__'),
        param(key='active.illegal', exc_type=AssertionError),
        param(key='illegal', exc_type=AssertionError),
    )
    @patch('n6lib.db_events.null', return_value=sen.Null)
    @patch('n6lib.db_events.or_', return_value=sen.or_result)
    @patch('n6lib.db_events.and_', return_value=sen.and_result)
    def test__active_bl_query(self, and_mock, or_mock, null_mock,
                              key, cmp_meth_name=None, exc_type=None,
                              **kwargs):
        value = [sen.val]
        if exc_type is None:
            self.mock.expires.is_.return_value = sen.expires_is_result
            self.mock.expires.isnot.return_value = sen.expires_isnot_result
            getattr(self.mock.expires, cmp_meth_name).return_value = sen.expires_cmp_result
            getattr(self.mock.time, cmp_meth_name).return_value = sen.time_cmp_result
            act_result = self.meth.active_bl_query(key, value)
            self.assertIs(act_result, sen.or_result)
            if key == 'active.min':
                assert cmp_meth_name == '__ge__'
                or_mock.assert_called_once_with(sen.expires_cmp_result, sen.time_cmp_result)
                self.assertEqual(self.mock.expires.is_.mock_calls, [])
                self.assertEqual(self.mock.expires.isnot.mock_calls, [])
            else:
                assert (
                    (key == 'active.max' and cmp_meth_name == '__le__') or
                    (key == 'active.until' and cmp_meth_name == '__lt__'))
                or_mock.assert_called_once_with(sen.and_result, sen.and_result)
                self.assertEqual(and_mock.mock_calls, [
                    call(sen.expires_isnot_result, sen.expires_cmp_result),
                    call(sen.expires_is_result, sen.time_cmp_result),
                ])
                self.mock.expires.is_.assert_called_once_with(sen.Null)
                self.mock.expires.isnot.assert_called_once_with(sen.Null)
            getattr(self.mock.expires, cmp_meth_name).assert_called_once_with(sen.val)
            getattr(self.mock.time, cmp_meth_name).assert_called_once_with(sen.val)
        else:
            with self.assertRaises(exc_type):
                self.meth.active_bl_query(key, value)

    @foreach(
        param('modified.min', cmp_meth_name='__ge__'),
        param('modified.max', cmp_meth_name='__le__'),
        param('modified.until', cmp_meth_name='__lt__'),
        param('modified.illegal', exc_type=AssertionError),
        param('illegal', exc_type=AssertionError),
    )
    def test__modified_query(self, key, cmp_meth_name=None, exc_type=None):
        value = [sen.val]
        if exc_type is None:
            getattr(self.mock.modified, cmp_meth_name).return_value = sen.result
            act_result = self.meth.modified_query(key, value)
            self.assertIs(act_result, sen.result)
            getattr(self.mock.modified, cmp_meth_name).assert_called_once_with(sen.val)
        else:
            with self.assertRaises(exc_type):
                self.meth.modified_query(key, value)

    def test__to_raw_result_dict__1(self):
        self.test_init_and_attrs_1()
        d = self.obj.to_raw_result_dict()
        self.assertEqual(d, {
            'id': sen.event_id,
            'ip': sen.some_ip_addr,
            'dport': sen.some_port_number,
            'time': datetime.datetime(2014, 3, 31, 23, 7, 42),
            'client': ['c1', 'c2'],
        })

    def test__to_raw_result_dict__2(self):
        self.test_init_and_attrs_2()
        d = self.obj.to_raw_result_dict()
        self.assertEqual(d, {
            # note that ip='0.0.0.0' has been removed
            'time': datetime.datetime(2014, 3, 31, 23, 7, 42),
            'expires': datetime.datetime(2015, 3, 31, 23, 7, 43),
            ### THIS IS A PROBLEM -- TO BE SOLVED IN #3113:
            'until': '2015-04-01 01:07:43+02:00',
        })
Ejemplo n.º 17
0
class TestBaseUserAuthenticationPolicy(unittest.TestCase):
    def setUp(self):
        self.mock = Mock(__class__=BaseUserAuthenticationPolicy)
        self.meth = MethodProxy(BaseUserAuthenticationPolicy, self.mock, [
            'merge_orgid_userid',
        ])

    def test__get_auth_data__ok(self):
        request = MagicMock()
        request.unauthenticated_userid = 'org_id,user_id'
        request.registry.auth_api.authenticate.return_value = sen.auth_data
        expected_result = sen.auth_data
        result = BaseUserAuthenticationPolicy.get_auth_data(request)
        self.assertEqual(request.registry.auth_api.authenticate.mock_calls,
                         [call('org_id', 'user_id')])
        self.assertEqual(result, expected_result)

    def test__get_auth_data__unauthenticated_userid_is_None(self):
        request = MagicMock()
        request.unauthenticated_userid = None
        result = BaseUserAuthenticationPolicy.get_auth_data(request)
        self.assertIsNone(result)

    @patch('n6lib.pyramid_commons._pyramid_commons.LOGGER.warning')
    def test__get_auth_data__authenticate_raises_exception(
            self, LOGGER_warning_mock):
        request = MagicMock()
        request.unauthenticated_userid = 'org_id,user_id'
        request.registry.auth_api.authenticate.side_effect = AuthAPIUnauthenticatedError
        result = BaseUserAuthenticationPolicy.get_auth_data(request)
        self.assertIsNone(result)
        self.assertEqual(LOGGER_warning_mock.mock_calls, [
            call(
                'could not authenticate for '
                'organization id %r + user id '
                '%r', 'org_id', 'user_id')
        ])

    def test__authenticated_userid__is_ok(self):
        request = MagicMock()
        request.auth_data = {'org_id': 'ORGid', 'user_id': 'USERid'}
        result = self.meth.authenticated_userid(request)
        expected_value = 'ORGid,USERid'
        self.assertEqual(result, expected_value)

    def test__authenticated_userid__is_None(self):
        request = MagicMock()
        request.auth_data = None
        result = self.meth.authenticated_userid(request)
        self.assertIsNone(result)

    def test__effective_principals__when_authenticated(self):
        self._patch_super_for_effective_principals()
        request = MagicMock()
        request.auth_data = {'org_id': 'ORGid', 'user_id': 'USERid'}
        result = self.meth.effective_principals(request)
        self.assertIn(Everyone, result)
        self.assertIn(Authenticated, result)
        self.assertIn('ORGid,USERid', result)

    def test__effective_principals__when_not_authenticated(self):
        self._patch_super_for_effective_principals()
        request = MagicMock()
        request.auth_data = None
        result = self.meth.effective_principals(request)
        self.assertIn(Everyone, result)
        self.assertNotIn(Authenticated, result)

    def _patch_super_for_effective_principals(self):
        cls = BaseUserAuthenticationPolicy
        super_effective_principals = super(cls,
                                           cls).effective_principals.__func__
        patcher = patch('n6lib.pyramid_commons._pyramid_commons.super',
                        create=True)
        super_mock = patcher.start()
        self.addCleanup(patcher.stop)
        super_mock().effective_principals.side_effect = (
            lambda request: super_effective_principals(sen.self, request))
Ejemplo n.º 18
0
class TestAnonymizer___check_event_type(TestCaseMixin, unittest.TestCase):
    def setUp(self):
        self.mock = MagicMock(__class__=Anonymizer)
        self.meth = MethodProxy(Anonymizer, self.mock, '_VALID_EVENT_TYPES')

    @foreach(
        param(
            event_type='event',
            event_data={
                'some_key': sen.some_value,
            },
        ).label('no type in event data'),
        param(
            event_type='event',
            event_data={
                'type': 'event',
                'some_key': sen.some_value,
            },
        ).label('type "event" in event data'),
        param(
            event_type='bl-update',
            event_data={
                'type': 'bl-update',
                'some_key': sen.some_value,
            },
        ).label('another type in event data'),
    )
    def test_matching_and_valid(self, event_type, event_data):
        assert (event_type == event_data.get('type', 'event')
                and event_type in TYPE_ENUMS)  # (test case self-test)

        self.meth._check_event_type(event_type, event_data)

        # the _check_event_type() method is called outside the AuthAPI
        # context (outside its `with` statement) -- so we want to ensure
        # that no AuthAPI methods are called:
        self.assertEqual(self.mock.auth_api.mock_calls, [])

    @foreach(
        param(
            event_type='event',
            event_data={
                'type': 'bl-update',
                'some_key': sen.some_value,
            },
        ).label('type "event" does not match another one'),
        param(
            event_type='bl-update',
            event_data={
                'type': 'event',
                'some_key': sen.some_value,
            },
        ).label('another type does not match "event"'),
    )
    def test_not_matching(self, event_type, event_data):
        assert (event_type != event_data.get('type', 'event')
                and event_type in TYPE_ENUMS)  # (test case self-test)

        with self.assertRaises(ValueError):
            self.meth._check_event_type(event_type, event_data)

        # the _check_event_type() method is called outside the AuthAPI
        # context (outside its `with` statement) -- so we want to ensure
        # that no AuthAPI methods are called:
        self.assertEqual(self.mock.auth_api.mock_calls, [])

    def test_matching_but_not_valid(self):
        event_type = 'illegal'
        event_data = {
            'type': event_type,
            'some_key': sen.some_value,
        }
        assert event_type not in TYPE_ENUMS  # (test case self-test)

        with self.assertRaises(ValueError):
            self.meth._check_event_type(event_type, event_data)

        # the _check_event_type() method is called outside the AuthAPI
        # context (outside its `with` statement) -- so we want to ensure
        # that no AuthAPI methods are called:
        self.assertEqual(self.mock.auth_api.mock_calls, [])
Ejemplo n.º 19
0
 def setUp(self):
     self.mock = Mock(__class__=BaseParser, allow_empty_results=False)
     self.meth = MethodProxy(BaseParser, self.mock)
Ejemplo n.º 20
0
class TestBaseParser(unittest.TestCase):
    def setUp(self):
        self.mock = Mock(__class__=BaseParser, allow_empty_results=False)
        self.meth = MethodProxy(BaseParser, self.mock)

    def _asserts_of_proper__new__instance_adjustment(self, instance):
        # BaseQueued.__new__() ensures that
        self.assertIsNot(instance.input_queue, BaseParser.input_queue)

    def _asserts_of_proper_preinit_hook_instance_adjustment(
            self, instance, binding_key):
        # for classes with `default_binding_key`
        # BaseParser.preinit_hook() ensures that
        self.assertEqual(
            instance.input_queue, {
                'exchange': 'raw',
                'exchange_type': 'topic',
                'queue_name': binding_key,
                'binding_keys': [binding_key],
            })
        self.assertEqual(BaseParser.input_queue, {
            'exchange': 'raw',
            'exchange_type': 'topic',
        })

    def _basic_init_related_asserts(self, instance, subclass, super_mock,
                                    super_cls_mock, expected_config,
                                    expected_config_full):
        # assert that an instance of the proper type has been returned
        self.assertIsInstance(instance, subclass)
        # assert that super used properly
        super_mock.assert_called_once_with(BaseParser, instance)
        super_cls_mock.__init__.assert_called_once_with(a=sentinel.a,
                                                        bb=sentinel.bb)
        # assert that configuration stuff has been obtained properly
        self.assertEqual(instance.config, expected_config)
        self.assertIsInstance(instance.config, ConfigSection)
        self.assertEqual(instance.config_full, expected_config_full)
        self.assertIsInstance(instance.config_full, Config)

    def test_basics(self):
        self.assertTrue(issubclass(BaseParser, QueuedBase))
        self.assertTrue(hasattr(BaseParser, 'default_binding_key'))
        self.assertTrue(hasattr(BaseParser, 'config_spec_pattern'))
        self.assertTrue(hasattr(BaseParser, 'constant_items'))
        self.assertTrue(hasattr(BaseParser, 'record_dict_class'))
        self.assertTrue(hasattr(BaseParser, 'event_type'))

    def test_config_spec_pattern(self):
        config_spec = BaseParser.config_spec_pattern.format(
            parser_class_name='example_foo')
        config_spec_parsed = parse_config_spec(config_spec)
        prefetch_count_opt_spec = config_spec_parsed.get_opt_spec(
            'example_foo.prefetch_count')
        self.assertEqual(prefetch_count_opt_spec.name, 'prefetch_count')
        self.assertEqual(prefetch_count_opt_spec.converter_spec, 'int')

    def test_initialization_without_default_binding_key(self):
        class SomeParser(BaseParser):
            pass  # no `default_binding_key` defined => it's an abstract class

        with self.assertRaises(NotImplementedError):
            SomeParser()

        unready_instance = SomeParser.__new__(SomeParser)
        self._asserts_of_proper__new__instance_adjustment(unready_instance)
        # for classes without `default_binding_key`
        # `queue_name` and `binding_keys` items are *not* added...
        self.assertEqual(unready_instance.input_queue, BaseParser.input_queue)
        self.assertEqual(BaseParser.input_queue, {
            'exchange': 'raw',
            'exchange_type': 'topic',
        })

    @foreach(
        param(
            mocked_conf_from_files={},
            expected_config=ConfigSection('SomeParser', {'prefetch_count': 1}),
            expected_config_full=Config.make(
                {'SomeParser': {
                    'prefetch_count': 1
                }}),
        ),
        param(
            mocked_conf_from_files={
                'SomeParser': {
                    'prefetch_count': '42'
                },
                'another_section': {
                    'another_opt': '123.456'
                },
            },
            expected_config=ConfigSection('SomeParser',
                                          {'prefetch_count': 42}),
            expected_config_full=Config.make(
                {'SomeParser': {
                    'prefetch_count': 42
                }}),
        ),
        param(
            custom_config_spec_pattern=concat_reducing_indent(
                BaseParser.config_spec_pattern,
                '''
                    some_opt = [-3, null] :: json
                    [another_section]
                    another_opt :: float
                    yet_another_opt = Foo Bar Spam Ham
                ''',
            ),
            mocked_conf_from_files={
                'SomeParser': {
                    'prefetch_count': '42'
                },
                'another_section': {
                    'another_opt': '123.456'
                },
            },
            expected_config=ConfigSection('SomeParser', {
                'prefetch_count': 42,
                'some_opt': [-3, None],
            }),
            expected_config_full=Config.make({
                'SomeParser': {
                    'prefetch_count': 42,
                    'some_opt': [-3, None],
                },
                'another_section': {
                    'another_opt': 123.456,
                    'yet_another_opt': 'Foo Bar Spam Ham',
                },
            }),
        ),
    )
    @foreach(
        param(binding_key='foo.bar'),
        param(binding_key='foo.bar.33'),
    )
    def test_initialization_with_default_binding_key(
            self,
            binding_key,
            mocked_conf_from_files,
            expected_config,
            expected_config_full,
            custom_config_spec_pattern=None):
        class SomeParser(BaseParser):
            default_binding_key = binding_key  # => it's a concrete class

        if custom_config_spec_pattern is not None:
            SomeParser.config_spec_pattern = custom_config_spec_pattern

        unready_instance = SomeParser.__new__(SomeParser)
        self._asserts_of_proper__new__instance_adjustment(unready_instance)
        self._asserts_of_proper_preinit_hook_instance_adjustment(
            unready_instance, binding_key)

        super_cls_mock = SimpleNamespace(__init__=Mock())
        with patch_always('n6.parsers.generic.super',
                          return_value=super_cls_mock) as super_mock, \
             patch('n6.parsers.generic.Config._load_n6_config_files',
                   return_value=mocked_conf_from_files):
            # instantiation
            instance = SomeParser(a=sentinel.a, bb=sentinel.bb)
            self._asserts_of_proper__new__instance_adjustment(instance)
            self._asserts_of_proper_preinit_hook_instance_adjustment(
                instance, binding_key)
            self._basic_init_related_asserts(instance, SomeParser, super_mock,
                                             super_cls_mock, expected_config,
                                             expected_config_full)

    def test__make_binding_keys(self):
        self.mock.default_binding_key = 'fooo.barr'
        binding_keys = self.meth.make_binding_keys()
        self.assertEqual(binding_keys, ['fooo.barr'])
        self.assertEqual(self.mock.mock_calls, [])

    def test__make_binding_keys_with_raw_format_version_tag(self):
        self.mock.default_binding_key = 'fooo.barr.33'
        binding_keys = self.meth.make_binding_keys()
        self.assertEqual(binding_keys, ['fooo.barr.33'])
        self.assertEqual(self.mock.mock_calls, [])

    def test__get_script_init_kwargs(self):
        self.assertIsInstance(
            vars(BaseParser)['get_script_init_kwargs'], classmethod)
        init_kwargs = BaseParser.get_script_init_kwargs.__func__(self.mock)
        self.assertEqual(init_kwargs, {})
        self.assertEqual(self.mock.mock_calls, [])

    def test__run_handling__interrupted(self):
        self.mock.configure_mock(**{'run.side_effect': KeyboardInterrupt})
        self.meth.run_handling()
        self.mock.run.assert_called_once_with()
        self.mock.stop.assert_called_once_with()

    def test__run_handling__not_interrupted(self):
        self.meth.run_handling()
        self.mock.run.assert_called_once_with()
        self.assertEqual(self.mock.stop.mock_calls, [])

    @patch('n6.parsers.generic.FilePagedSequence')
    def test__input_callback(self, FilePagedSequence_mock):
        FilePagedSequence_mock.return_value = MagicMock()
        FilePagedSequence_mock.return_value.__enter__.return_value = sentinel.working_seq
        data = MagicMock(**{'get.return_value': sentinel.rid})
        self.mock.configure_mock(
            **{
                '_fix_body.return_value':
                sentinel.body,
                'prepare_data.return_value':
                data,
                'setting_error_event_info':
                MagicMock(),
                'get_output_rk.return_value':
                sentinel.output_rk,
                'get_output_bodies.return_value':
                [sentinel.output_body1, sentinel.output_body2],
            })
        self.meth.input_callback(sentinel.routing_key, sentinel.body,
                                 sentinel.properties)
        self.assertEqual(self.mock.mock_calls, [
            call._fix_body(sentinel.body),
            call.prepare_data(sentinel.routing_key, sentinel.body,
                              sentinel.properties),
            call.prepare_data().get('properties.message_id'),
            call.setting_error_event_info(sentinel.rid),
            call.setting_error_event_info().__enter__(),
            call.get_output_rk(data),
            call.get_output_bodies(data, sentinel.working_seq),
            call.publish_output(routing_key=sentinel.output_rk,
                                body=sentinel.output_body1),
            call.publish_output(routing_key=sentinel.output_rk,
                                body=sentinel.output_body2),
            call.setting_error_event_info().__exit__(None, None, None),
        ])
        self.assertEqual(FilePagedSequence_mock.mock_calls, [
            call(page_size=1000),
            call().__enter__(),
            call().__exit__(None, None, None),
        ])

    def test__prepare_data(self):
        data = self.meth.prepare_data(routing_key='ham.spam',
                                      body=sentinel.body,
                                      properties=SimpleNamespace(
                                          foo=sentinel.foo,
                                          bar=sentinel.bar,
                                          timestamp=1389348840,
                                          headers={'a': sentinel.a}))
        self.assertEqual(
            data, {
                'a': sentinel.a,
                'properties.foo': sentinel.foo,
                'properties.bar': sentinel.bar,
                'source': 'ham.spam',
                'properties.timestamp': '2014-01-10 10:14:00',
                'raw_format_version_tag': None,
                'raw': sentinel.body,
            })

    def test__prepare_data__rk__with_raw_format_version_tag(self):
        data = self.meth.prepare_data(routing_key='ham.spam.33',
                                      body=sentinel.body,
                                      properties=SimpleNamespace(
                                          foo=sentinel.foo,
                                          bar=sentinel.bar,
                                          timestamp=1389348840,
                                          headers={'a': sentinel.a}))
        self.assertEqual(
            data, {
                'a': sentinel.a,
                'properties.foo': sentinel.foo,
                'properties.bar': sentinel.bar,
                'source': 'ham.spam',
                'properties.timestamp': '2014-01-10 10:14:00',
                'raw_format_version_tag': '33',
                'raw': sentinel.body,
            })

    def test__get_output_rk(self):
        self.mock.configure_mock(**{
            'event_type': 'foobar',
        })
        data = {'source': 'ham.spam'}
        output_rk = self.meth.get_output_rk(data)
        self.assertEqual(output_rk, 'foobar.parsed.ham.spam')

    def test__get_output_bodies(self):
        parsed = [
            MagicMock(
                **{
                    '__class__':
                    RecordDict,
                    'used_as_context_manager':
                    True,
                    'get_ready_json.return_value':
                    getattr(sentinel, 'output_body{}'.format(i))
                }) for i in (1, 2)
        ]
        self.mock.configure_mock(
            **{
                'parse.return_value':
                parsed,
                'get_output_message_id.side_effect': [
                    sentinel.msg_A,
                    sentinel.msg_B,
                ],
                'setting_error_event_info':
                MagicMock(),
                'postprocess_parsed.side_effect': (
                    lambda data, parsed, total, item_no: parsed),
            })
        seq_mock = FilePagedSequence._instance_mock()
        output_bodies = self.meth.get_output_bodies(sentinel.data, seq_mock)
        self.assertIs(output_bodies, seq_mock)
        self.assertEqual(seq_mock._list, [
            sentinel.output_body1,
            sentinel.output_body2,
        ])
        self.assertEqual(parsed[0].mock_calls, [
            call.__setitem__('id', sentinel.msg_A),
            call.get_ready_json(),
        ])
        self.assertEqual(parsed[1].mock_calls, [
            call.__setitem__('id', sentinel.msg_B),
            call.get_ready_json(),
        ])
        self.assertEqual(self.mock.mock_calls, [
            call.parse(sentinel.data),
            call.get_output_message_id(parsed[0]),
            call.delete_too_long_address(parsed[0]),
            call.get_output_message_id(parsed[1]),
            call.delete_too_long_address(parsed[1]),
            call.setting_error_event_info(parsed[0]),
            call.setting_error_event_info().__enter__(),
            call.postprocess_parsed(sentinel.data, parsed[0], 2, item_no=1),
            call.setting_error_event_info().__exit__(None, None, None),
            call.setting_error_event_info(parsed[1]),
            call.setting_error_event_info().__enter__(),
            call.postprocess_parsed(sentinel.data, parsed[1], 2, item_no=2),
            call.setting_error_event_info().__exit__(None, None, None),
        ])

    def test__get_output_bodies__record_dict_not_used_as_context_manager(self):
        parsed = [
            MagicMock(**{
                '__class__': RecordDict,
                'used_as_context_manager': False
            }) for i in (1, 2)
        ]
        self.mock.configure_mock(**{'parse.return_value': parsed})
        with self.assertRaises(AssertionError):
            self.meth.get_output_bodies(sentinel.data,
                                        FilePagedSequence._instance_mock())
        self.assertEqual(self.mock.method_calls, [
            call.parse(sentinel.data),
        ])

    def test__get_output_bodies__parse_yielded_no_items(self):
        self.mock.configure_mock(**{'parse.return_value': iter([])})
        with self.assertRaises(ValueError):
            self.meth.get_output_bodies(sentinel.data,
                                        FilePagedSequence._instance_mock())
        self.assertEqual(self.mock.method_calls, [
            call.parse(sentinel.data),
        ])

    def test__get_output_bodies__parse_yielded_no_items__allow_empty_results(
            self):
        self.mock.configure_mock(**{
            'parse.return_value': iter([]),
            'allow_empty_results': True
        })
        seq_mock = FilePagedSequence._instance_mock()
        output_bodies = self.meth.get_output_bodies(sentinel.data, seq_mock)
        self.assertIs(output_bodies, seq_mock)
        self.assertEqual(seq_mock._list, [])  # just empty
        self.assertEqual(self.mock.mock_calls, [
            call.parse(sentinel.data),
        ])

    def test__delete_too_long_address__address_is_ok(self):
        parsed = RecordDict()
        parsed['address'] = [{'ip': i + 1} for i in xrange(MAX_IPS_IN_ADDRESS)]
        expected = RecordDict()
        expected['address'] = [{
            'ip': i + 1
        } for i in xrange(MAX_IPS_IN_ADDRESS)]
        self.meth.delete_too_long_address(parsed)
        self.assertEqual(parsed, expected)

    def test__delete_too_long_address__address_is_too_long(self):
        ips = MAX_IPS_IN_ADDRESS + 1
        parsed = RecordDict()
        parsed['id'] = '0123456789abcdef0123456789abcdef'
        parsed['address'] = [{'ip': i + 1} for i in xrange(ips)]
        expected = RecordDict()
        expected['id'] = '0123456789abcdef0123456789abcdef'
        self.meth.delete_too_long_address(parsed)
        self.assertEqual(parsed, expected)

    def test__delete_too_long_address__address_is_empty(self):
        parsed = RecordDict()
        parsed.update({'source': 'foo.bar'})
        expected = RecordDict()
        expected.update({'source': 'foo.bar'})
        self.meth.delete_too_long_address(parsed)
        self.assertEqual(parsed, expected)

    def test__get_output_message_id(self):
        inputs_and_resultant_hash_bases = [
            # basics
            ({
                'source': 'foo.bar'
            }, 'source,foo.bar'),
            ({
                u'source': u'foo.bar'
            }, 'source,foo.bar'),
            # proper sorting of multiple values
            ({
                'key1': 2,
                'key2': ['value2', 'value3', 'value1']
            }, 'key1,2\nkey2,value1,value2,value3'),
            # ...and of keys + proper encoding of unicode keys/values
            ({
                u'key2': [u'value3', u'value1', u'value2'],
                u'key1': 2L
            }, 'key1,2\nkey2,value1,value2,value3'),
            # ...as well as proper int/long normalization/representation
            ({
                u'key2': [30, 10, 20L],
                u'key1': 9000111222333444555666777888999000L
            }, 'key1,9000111222333444555666777888999000\nkey2,10,20,30'),
            # non-ascii values
            ({
                'target': 'zażółć',
                u'client': [u'jaźń', u'gęślą']
            }, 'client,gęślą,jaźń\ntarget,zażółć'),
            ({
                u'target': u'zażółć',
                'client': ['jaźń', 'gęślą']
            }, 'client,gęślą,jaźń\ntarget,zażółć'),
            # subdicts
            ({
                'dip': u'3.3.3.3',
                u'address': [{
                    'ip': '255.255.255.0'
                }, {
                    'ip': '127.0.0.1'
                }]
            },
             "address,{'ip': '127.0.0.1'},{'ip': '255.255.255.0'}\ndip,3.3.3.3"
             ),
            # non-ascii subdict keys/values
            ({
                u'key2': [{
                    'ką2': 'vą2'
                }, {
                    'ką1': 'vą1'
                }],
                'key1': {
                    'ką': 'vą'
                }
            }, "key1,{'k\\xc4\\x85': 'v\\xc4\\x85'}\n" +
             "key2,{'k\\xc4\\x851': 'v\\xc4\\x851'},{'k\\xc4\\x852': 'v\\xc4\\x852'}"
             ),
            # proper encoding of unicode keys/values + proper sorting of whole subdicts
            ({
                'key1': {
                    u'ką': u'vą'
                },
                'key2': [{
                    u'ką2': 'vą2'
                }, {
                    'ką1': u'vą1'
                }]
            }, "key1,{'k\\xc4\\x85': 'v\\xc4\\x85'}\n" +
             "key2,{'k\\xc4\\x851': 'v\\xc4\\x851'},{'k\\xc4\\x852': 'v\\xc4\\x852'}"
             ),
            # ...as well as proper int/long normalization/representation
            ({
                'key1': {
                    u'k': 2L
                },
                'key2': [{
                    'k2': 2L
                }, {
                    u'k1': 1
                }]
            }, "key1,{'k': 2}\nkey2,{'k1': 1},{'k2': 2}"),
            ({
                u'key2': [{
                    'k2': 2
                }, {
                    'k1': 1
                }],
                'key1': {
                    'k': 3
                }
            }, "key1,{'k': 3}\nkey2,{'k1': 1},{'k2': 2}"),
            ({
                u'key2': [{
                    'k2': 2L
                }, {
                    'k1': 1L
                }],
                'key1': {
                    'k': 9000111222333444555666777888999000L
                }
            },
             "key1,{'k': 9000111222333444555666777888999000}\nkey2,{'k1': 1},{'k2': 2}"
             ),
            # proper sorting of multiple items in subdicts
            ({
                'key1': {
                    'c': 2,
                    u'a': 3L,
                    u'b': 1L
                },
                'key2': [{
                    'c': 2,
                    u'a': 3L,
                    u'b': 1L
                }, {
                    'd': 3,
                    u'a': 2L,
                    u'b': 1L
                }]
            }, "key1,{'a': 3, 'b': 1, 'c': 2}\n" +
             "key2,{'a': 2, 'b': 1, 'd': 3},{'a': 3, 'b': 1, 'c': 2}"),
        ]

        class _RecordDict(RecordDict):
            adjust_key1 = adjust_key2 = None
            optional_keys = RecordDict.optional_keys | {'key1', 'key2'}

        parser = BaseParser.__new__(BaseParser)
        for input_dict, expected_base in inputs_and_resultant_hash_bases:
            record_dict = _RecordDict(input_dict)
            expected_result = hashlib.md5(expected_base).hexdigest()
            result = parser.get_output_message_id(record_dict)
            self.assertIsInstance(result, str)
            self.assertEqual(result, expected_result)

    def test__get_output_message_id__errors(self):
        inputs_and_exc_classes = [
            # bad subdict key type
            (
                {
                    'key1': {
                        32: 2
                    }
                },
                TypeError,
            ),
            (
                {
                    'key1': [{
                        32: 2
                    }]
                },
                TypeError,
            ),
            # bad subdict value type
            (
                {
                    'key1': {
                        'k': 2.3
                    }
                },
                TypeError,
            ),
            (
                {
                    'key1': [{
                        'k': 2.3
                    }]
                },
                TypeError,
            ),
            (
                {
                    'key1': {
                        'k': {
                            'k': 2
                        }
                    }
                },  # nesting is illegal
                TypeError,
            ),
            (
                {
                    'key1': [{
                        'k': {
                            'k': 2
                        }
                    }]
                },  # nesting is illegal
                TypeError,
            ),
            # bad value type
            (
                {
                    'key1': 2.3
                },
                TypeError,
            ),
            (
                {
                    'key1': [2.3]
                },
                TypeError,
            ),
            (
                {
                    'key1': [[2]]
                },  # nesting is illegal
                TypeError,
            ),
        ]

        class _RecordDict(RecordDict):
            adjust_key1 = adjust_key2 = None
            optional_keys = RecordDict.optional_keys | {'key1', 'key2'}

        parser = BaseParser.__new__(BaseParser)
        for input_dict, exc_class in inputs_and_exc_classes:
            record_dict = _RecordDict(input_dict)
            with self.assertRaises(exc_class):
                parser.get_output_message_id(record_dict)

    def test__postprocess_parsed__without__do_not_resolve_fqdn_to_ip(self):
        data = {}
        parsed = RecordDict()
        self.meth.postprocess_parsed(data, parsed, 1, item_no=1)
        self.assertEqual(parsed, {})

    def test__postprocess_parsed__with__do_not_resolve_fqdn_to_ip__False(self):
        data = {'_do_not_resolve_fqdn_to_ip': False}
        parsed = RecordDict()
        self.meth.postprocess_parsed(data, parsed, 1, item_no=1)
        self.assertEqual(parsed, {})

    def test__postprocess_parsed__with__do_not_resolve_fqdn_to_ip__True(self):
        data = {'_do_not_resolve_fqdn_to_ip': True}
        parsed = RecordDict()
        self.meth.postprocess_parsed(data, parsed, 1, item_no=1)
        self.assertEqual(parsed, {'_do_not_resolve_fqdn_to_ip': True})
Ejemplo n.º 21
0
 def setUp(self):
     self.mock = Mock(__class__=SSLUserAuthenticationPolicy)
     self.meth = MethodProxy(SSLUserAuthenticationPolicy, self.mock, [
         'merge_orgid_userid',
     ])
Ejemplo n.º 22
0
class TestAnonymizer___get_resource_to_org_ids(TestCaseMixin,
                                               unittest.TestCase):
    def setUp(self):
        self.event_type = 'bl-update'

        def YES_predicate(record):
            self.assertIsInstance(record, RecordFacadeForPredicates)
            return True

        def NO_predicate(record):
            self.assertIsInstance(record, RecordFacadeForPredicates)
            return False

        self.mock = MagicMock(__class__=Anonymizer)
        self.meth = MethodProxy(Anonymizer, self.mock)

        self.mock.data_spec = N6DataSpec()
        self.mock.auth_api.get_source_ids_to_subs_to_stream_api_access_infos.return_value = \
            self.s_to_s_to_saai = {
                'src.empty': {},
                'src.some-1': {
                    sen.something_1: (
                        YES_predicate,
                        {
                            'inside': set(),
                            'threats': set(),
                            'search': set(),
                        }
                    ),
                    sen.something_2: (
                        YES_predicate,
                        {
                            'inside': {'o4'},
                            'threats': set(),
                            'search': {'o1', 'o2', 'o3', 'o4', 'o5', 'o6'},
                        }
                    ),
                    sen.something_3: (
                        NO_predicate,
                        {
                            'inside': {'o2'},
                            'threats': {'o3'},
                            'search': set(),
                        }
                    ),
                    sen.something_4: (
                        NO_predicate,
                        {
                            'inside': {'o1', 'o3', 'o9'},
                            'threats': {'o3', 'o5', 'o6'},
                            'search': {'o3', 'o4', 'o5', 'o6'},
                        }
                    ),
                },
                'src.some-2': {
                    sen.something_5: (
                        YES_predicate,
                        {
                            'inside': {'o1', 'o3', 'o9'},
                            'threats': {'o3', 'o5', 'o6'},
                            'search': {'o3', 'o4', 'o5', 'o6'},
                        }
                    ),
                    sen.something_6: (
                        YES_predicate,
                        {
                            'inside': {'o2'},
                            'threats': {'o2'},
                            'search': set(),
                        }
                    ),
                    sen.something_7: (
                        YES_predicate,
                        {
                            'inside': set(),
                            'threats': {'o8'},
                            'search': set(),
                        }
                    ),
                    sen.something_8: (
                        YES_predicate,
                        {
                            'inside': set(),
                            'threats': set(),
                            'search': set(),
                        }
                    ),
                    sen.something_9: (
                        NO_predicate,
                        {
                            'inside': {'o1', 'o5', 'o4', 'o9'},
                            'threats': {'o3', 'o4', 'o5', 'o9'},
                            'search': {'o1', 'o2', 'o3', 'o4'},
                        }
                    ),
                },
            }

    @foreach(
        param(
            event_data=dict(
                source='src.not-found',
                client=['o5', 'o1', 'o3', 'o2'],
            ),
            expected_result=dict(
                inside=[],
                threats=[],
            ),
        ).label('no such source'),
        param(
            event_data=dict(
                source='src.empty',
                client=['o5', 'o1', 'o3', 'o2'],
            ),
            expected_result=dict(
                inside=[],
                threats=[],
            ),
        ).label('no subsources'),
        param(
            event_data=dict(
                source='src.some-1',
                client=['o5', 'o1', 'o3', 'o2'],
            ),
            expected_result=dict(
                inside=[],
                threats=[],
            ),
        ).label('no matching subsources/organizations'),
        param(
            event_data=dict(
                source='src.some-2',
                client=['o5', 'o1', 'o3', 'o2'],
            ),
            expected_result=dict(
                inside=['o1', 'o2', 'o3'],
                threats=['o2', 'o3', 'o5', 'o6', 'o8'],
            ),
        ).label('some matching subsources and organizations (1)'),
        param(
            event_data=dict(
                source='src.some-2',
                client=['o2', 'o4', 'o9'],
            ),
            expected_result=dict(
                inside=['o2', 'o9'],
                threats=['o2', 'o3', 'o5', 'o6', 'o8'],
            ),
        ).label('some matching subsources and organizations (2)'),
        param(
            event_data=dict(
                source='src.some-2',
                client=['o4'],
            ),
            expected_result=dict(
                inside=[],
                threats=['o2', 'o3', 'o5', 'o6', 'o8'],
            ),
        ).label('some matching subsources and organizations (only "threats")'),
    )
    def test_normal(self, event_data, expected_result):
        expected_mock_calls = [
            call.auth_api.get_source_ids_to_subs_to_stream_api_access_infos(),
        ]

        with patch('n6.utils.anonymizer.LOGGER') as LOGGER_mock:
            result = self.meth._get_resource_to_org_ids(
                self.event_type, event_data)

        self.assertEqual(result, expected_result)
        self.assertEqual(self.mock.mock_calls, expected_mock_calls)
        self.assertFalse(LOGGER_mock.error.mock_calls)

    def test_error(self):
        event_data = dict(
            source='src.some-2',
            client=['o5', 'o1', 'o3', 'o2'],
        )
        res_to_org_ids = {
            'inside': set(),
            'threats': {'o8'},
            'search': set(),
        }
        exc_type = ZeroDivisionError  # (just an example exception class)

        def raise_exc(rec):
            raise exc_type('blablabla')

        self.s_to_s_to_saai['src.some-2'][
            sen.something_7] = raise_exc, res_to_org_ids

        with patch('n6.utils.anonymizer.LOGGER') as LOGGER_mock, \
             self.assertRaises(exc_type):
            self.meth._get_resource_to_org_ids(self.event_type, event_data)

        self.assertEqual(len(LOGGER_mock.error.mock_calls), 1)
Ejemplo n.º 23
0
 def setUp(self):
     self.mock = MagicMock()
     self.meth = MethodProxy(n6NormalizedData, self.mock)
Ejemplo n.º 24
0
    def setUp(self):
        self.event_type = 'bl-update'

        def YES_predicate(record):
            self.assertIsInstance(record, RecordFacadeForPredicates)
            return True

        def NO_predicate(record):
            self.assertIsInstance(record, RecordFacadeForPredicates)
            return False

        self.mock = MagicMock(__class__=Anonymizer)
        self.meth = MethodProxy(Anonymizer, self.mock)

        self.mock.data_spec = N6DataSpec()
        self.mock.auth_api.get_source_ids_to_subs_to_stream_api_access_infos.return_value = \
            self.s_to_s_to_saai = {
                'src.empty': {},
                'src.some-1': {
                    sen.something_1: (
                        YES_predicate,
                        {
                            'inside': set(),
                            'threats': set(),
                            'search': set(),
                        }
                    ),
                    sen.something_2: (
                        YES_predicate,
                        {
                            'inside': {'o4'},
                            'threats': set(),
                            'search': {'o1', 'o2', 'o3', 'o4', 'o5', 'o6'},
                        }
                    ),
                    sen.something_3: (
                        NO_predicate,
                        {
                            'inside': {'o2'},
                            'threats': {'o3'},
                            'search': set(),
                        }
                    ),
                    sen.something_4: (
                        NO_predicate,
                        {
                            'inside': {'o1', 'o3', 'o9'},
                            'threats': {'o3', 'o5', 'o6'},
                            'search': {'o3', 'o4', 'o5', 'o6'},
                        }
                    ),
                },
                'src.some-2': {
                    sen.something_5: (
                        YES_predicate,
                        {
                            'inside': {'o1', 'o3', 'o9'},
                            'threats': {'o3', 'o5', 'o6'},
                            'search': {'o3', 'o4', 'o5', 'o6'},
                        }
                    ),
                    sen.something_6: (
                        YES_predicate,
                        {
                            'inside': {'o2'},
                            'threats': {'o2'},
                            'search': set(),
                        }
                    ),
                    sen.something_7: (
                        YES_predicate,
                        {
                            'inside': set(),
                            'threats': {'o8'},
                            'search': set(),
                        }
                    ),
                    sen.something_8: (
                        YES_predicate,
                        {
                            'inside': set(),
                            'threats': set(),
                            'search': set(),
                        }
                    ),
                    sen.something_9: (
                        NO_predicate,
                        {
                            'inside': {'o1', 'o5', 'o4', 'o9'},
                            'threats': {'o3', 'o4', 'o5', 'o9'},
                            'search': {'o1', 'o2', 'o3', 'o4'},
                        }
                    ),
                },
            }