def test_get_iterator(self):
        config = DotDict()
        config.logger = self.logger
        config.quit_on_empty_queue = False

        tm = TaskManager(
            config,
            job_source_iterator=range(1),
        )
        eq_(tm._get_iterator(), [0])

        def an_iter(self):
            for i in range(5):
                yield i

        tm = TaskManager(
            config,
            job_source_iterator=an_iter,
        )
        eq_([x for x in tm._get_iterator()], [0, 1, 2, 3, 4])

        class X(object):
            def __init__(self, config):
                self.config = config

            def __iter__(self):
                for key in self.config:
                    yield key

        tm = TaskManager(config, job_source_iterator=X(config))
        eq_([x for x in tm._get_iterator()], [y for y in config.keys()])
    def test_blocking_start(self):
        config = DotDict()
        config.logger = self.logger
        config.idle_delay = 1
        config.quit_on_empty_queue =  False

        class MyTaskManager(TaskManager):
            def _responsive_sleep(
                self,
                seconds,
                wait_log_interval=0,
                wait_reason=''
            ):
                try:
                    if self.count >= 2:
                        self.quit = True
                    self.count += 1
                except AttributeError:
                    self.count = 0

        tm = MyTaskManager(
            config,
            task_func=Mock()
        )

        waiting_func = Mock()

        tm.blocking_start(waiting_func=waiting_func)

        eq_(
            tm.task_func.call_count,
            10
        )
        eq_(waiting_func.call_count, 0)
    def test_doing_work_with_two_workers_and_generator(self):
        config = DotDict()
        config.logger = self.logger
        config.number_of_threads = 2
        config.maximum_queue_size = 2
        my_list = []

        def insert_into_list(anItem):
            my_list.append(anItem)

        ttm = ThreadedTaskManager(config,
                                  task_func=insert_into_list,
                                  job_source_iterator=(((x,), {}) for x in
                                                       xrange(10))
                                 )
        try:
            ttm.start()
            time.sleep(0.2)
            ok_(len(ttm.thread_list) == 2,
                            "expected 2 threads, but found %d"
                              % len(ttm.thread_list))
            ok_(len(my_list) == 10,
                            'expected to do 10 inserts, '
                              'but %d were done instead' % len(my_list))
            ok_(sorted(my_list) == range(10),
                            'expected %s, but got %s' % (range(10),
                                                         sorted(my_list)))
        except Exception:
            # we got threads to join
            ttm.wait_for_completion()
            raise
    def test_blocking_start(self):
        config = DotDict()
        config.logger = self.logger
        config.idle_delay = 1
        config.quit_on_empty_queue = False

        class MyTaskManager(TaskManager):
            def _responsive_sleep(self,
                                  seconds,
                                  wait_log_interval=0,
                                  wait_reason=''):
                try:
                    if self.count >= 2:
                        self.quit = True
                    self.count += 1
                except AttributeError:
                    self.count = 0

        tm = MyTaskManager(config, task_func=Mock())

        waiting_func = Mock()

        tm.blocking_start(waiting_func=waiting_func)

        eq_(tm.task_func.call_count, 10)
        eq_(waiting_func.call_count, 0)
    def test_doing_work_with_one_worker(self):
        config = DotDict()
        config.logger = self.logger
        config.number_of_threads = 1
        config.maximum_queue_size = 1
        my_list = []

        def insert_into_list(anItem):
            my_list.append(anItem)

        ttm = ThreadedTaskManager(config,
                                  task_func=insert_into_list
                                 )
        try:
            ttm.start()
            time.sleep(0.2)
            ok_(len(my_list) == 10,
                            'expected to do 10 inserts, '
                               'but %d were done instead' % len(my_list))
            ok_(my_list == range(10),
                            'expected %s, but got %s' % (range(10), my_list))
            ttm.stop()
        except Exception:
            # we got threads to join
            ttm.wait_for_completion()
            raise
    def test_doing_work_with_two_workers_and_generator(self):
        config = DotDict()
        config.logger = self.logger
        config.number_of_threads = 2
        config.maximum_queue_size = 2
        my_list = []

        def insert_into_list(anItem):
            my_list.append(anItem)

        ttm = ThreadedTaskManager(config,
                                  task_func=insert_into_list,
                                  job_source_iterator=(((x,), {}) for x in
                                                       xrange(10))
                                 )
        try:
            ttm.start()
            time.sleep(0.2)
            ok_(len(ttm.thread_list) == 2,
                            "expected 2 threads, but found %d"
                              % len(ttm.thread_list))
            ok_(len(my_list) == 10,
                            'expected to do 10 inserts, '
                              'but %d were done instead' % len(my_list))
            ok_(sorted(my_list) == range(10),
                            'expected %s, but got %s' % (range(10),
                                                         sorted(my_list)))
        except Exception:
            # we got threads to join
            ttm.wait_for_completion()
            raise
    def test_doing_work_with_one_worker(self):
        config = DotDict()
        config.logger = self.logger
        config.number_of_threads = 1
        config.maximum_queue_size = 1
        my_list = []

        def insert_into_list(anItem):
            my_list.append(anItem)

        ttm = ThreadedTaskManager(config,
                                  task_func=insert_into_list
                                 )
        try:
            ttm.start()
            time.sleep(0.2)
            ok_(len(my_list) == 10,
                            'expected to do 10 inserts, '
                               'but %d were done instead' % len(my_list))
            ok_(my_list == range(10),
                            'expected %s, but got %s' % (range(10), my_list))
            ttm.stop()
        except Exception:
            # we got threads to join
            ttm.wait_for_completion()
            raise
    def setup_mocked_s3_storage(
        self,
        executor=TransactionExecutor,
        executor_for_gets=TransactionExecutor,
        storage_class='BotoS3CrashStorage',
        host='',
        port=0,
        resource_class=S3ConnectionContext,
        **extra
    ):
        config = DotDict({
            'resource_class': resource_class,
            'logger': mock.Mock(),
            'host': host,
            'port': port,
            'access_key': 'this is the access key',
            'secret_access_key': 'secrets',
            'bucket_name': 'silliness',
            'keybuilder_class': KeyBuilderBase,
            'prefix': 'dev',
            'calling_format': mock.Mock()
        })
        config.update(extra)
        s3_conn = resource_class(config)
        s3_conn._connect_to_endpoint = mock.Mock()
        s3_conn._mocked_connection = s3_conn._connect_to_endpoint.return_value
        s3_conn._calling_format.return_value = mock.Mock()
        s3_conn._CreateError = mock.Mock()
        s3_conn.ResponseError = mock.Mock()
        s3_conn._open = mock.MagicMock()

        return s3_conn
    def setup_mocked_s3_storage(
        self,
        executor=TransactionExecutor,
        executor_for_gets=TransactionExecutor,
        storage_class='BotoS3CrashStorage',
        host='',
        port=0,
        resource_class=S3ConnectionContext,
        **extra
    ):
        config = DotDict({
            'resource_class': resource_class,
            'logger': mock.Mock(),
            'host': host,
            'port': port,
            'access_key': 'this is the access key',
            'secret_access_key': 'secrets',
            'bucket_name': 'silliness',
            'keybuilder_class': KeyBuilderBase,
            'prefix': 'dev',
            'calling_format': mock.Mock()
        })
        config.update(extra)
        s3_conn = resource_class(config)
        s3_conn._connect_to_endpoint = mock.Mock()
        s3_conn._mocked_connection = s3_conn._connect_to_endpoint.return_value
        s3_conn._calling_format.return_value = mock.Mock()
        s3_conn._CreateError = mock.Mock()
        s3_conn.ResponseError = mock.Mock()
        s3_conn._open = mock.MagicMock()

        return s3_conn
 def test_executor_identity(self):
     config = DotDict()
     config.logger = self.logger
     tm = TaskManager(
         config,
         job_source_iterator=range(1),
     )
     tm._pid = 666
     eq_(tm.executor_identity(), '666-MainThread')
    def test_executor_identity(self):
        config = DotDict()
        config.logger = self.logger
        tm = TaskManager(
            config,
            job_source_iterator=range(1),

        )
        tm._pid = 666
        eq_(tm.executor_identity(), '666-MainThread')
    def test_constuctor1(self):
        config = DotDict()
        config.logger = self.logger
        config.quit_on_empty_queue =  False

        tm = TaskManager(config)
        ok_(tm.config == config)
        ok_(tm.logger == self.logger)
        ok_(tm.task_func == default_task_func)
        ok_(tm.quit == False)
    def test_constuctor1(self):
        config = DotDict()
        config.logger = self.logger
        config.quit_on_empty_queue = False

        tm = TaskManager(config)
        ok_(tm.config == config)
        ok_(tm.logger == self.logger)
        ok_(tm.task_func == default_task_func)
        ok_(tm.quit is False)
    def test_transaction_ack_crash(self):
        config = self._setup_config()
        connection = Mock()
        ack_token = DotDict()
        ack_token.delivery_tag = 1
        crash_id = 'some-crash-id'

        crash_store = RabbitMQCrashStorage(config)
        crash_store._transaction_ack_crash(connection, crash_id, ack_token)

        connection.channel.basic_ack.assert_called_once_with(delivery_tag=1)
Exemple #15
0
    def test_transaction_ack_crash(self):
        config = self._setup_config()
        connection = Mock()
        ack_token = DotDict()
        ack_token.delivery_tag = 1
        crash_id = 'some-crash-id'

        crash_store = RabbitMQCrashStorage(config)
        crash_store._transaction_ack_crash(connection, crash_id, ack_token)

        connection.channel.basic_ack.assert_called_once_with(delivery_tag=1)
    def _fake_processed_crash(self):
        d = DotDict()
        # these keys survive redaction
        d.a = DotDict()
        d.a.b = DotDict()
        d.a.b.c = 11
        d.sensitive = DotDict()
        d.sensitive.x = 2
        d.not_url = 'not a url'

        return d
 def _setup_config(self):
     config = DotDict()
     config.transaction_executor_class = Mock()
     config.backoff_delays = (0, 0, 0)
     config.logger = Mock()
     config.rabbitmq_class = MagicMock()
     config.routing_key = 'socorro.normal'
     config.filter_on_legacy_processing = True
     config.redactor_class = Redactor
     config.forbidden_keys = Redactor.required_config.forbidden_keys.default
     config.throttle = 100
     return config
 def test_constuctor1(self):
     config = DotDict()
     config.logger = self.logger
     config.number_of_threads = 1
     config.maximum_queue_size = 1
     ttm = ThreadedTaskManager(config)
     try:
         ok_(ttm.config == config)
         ok_(ttm.logger == self.logger)
         ok_(ttm.task_func == default_task_func)
         ok_(ttm.quit is False)
     finally:
         # we got threads to join
         ttm._kill_worker_threads()
Exemple #19
0
    def test_save_raw_crash_normal(self):
        config = self._setup_config()
        crash_store = RabbitMQCrashStorage(config)

        # test for "legacy_processing" missing from crash
        crash_store.save_raw_crash(raw_crash=DotDict(),
                                   dumps=DotDict(),
                                   crash_id='crash_id')
        ok_(not crash_store.transaction.called)
        config.logger.reset_mock()

        # test for normal save
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
        crash_store.transaction.reset_mock()

        # test for save rejection because of "legacy_processing"
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        ok_(not crash_store.transaction.called)
    def test_blocking_start_with_quit_on_empty(self):
        config = DotDict()
        config.logger = self.logger
        config.idle_delay = 1
        config.quit_on_empty_queue = True

        tm = TaskManager(config, task_func=Mock())

        waiting_func = Mock()

        tm.blocking_start(waiting_func=waiting_func)

        eq_(tm.task_func.call_count, 10)
        eq_(waiting_func.call_count, 0)
Exemple #21
0
    def test_save_raw_crash_no_legacy(self):
        config = self._setup_config()
        config.filter_on_legacy_processing = False
        crash_store = RabbitMQCrashStorage(config)

        # test for "legacy_processing" missing from crash
        crash_store.save_raw_crash(raw_crash=DotDict(),
                                   dumps=DotDict(),
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
        config.logger.reset_mock()

        # test for normal save
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
        crash_store.transaction.reset_mock()

        # test for save without regard to "legacy_processing" value
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
 def test_constuctor1(self):
     config = DotDict()
     config.logger = self.logger
     config.number_of_threads = 1
     config.maximum_queue_size = 1
     ttm = ThreadedTaskManager(config)
     try:
         ok_(ttm.config == config)
         ok_(ttm.logger == self.logger)
         ok_(ttm.task_func == default_task_func)
         ok_(ttm.quit == False)
     finally:
         # we got threads to join
         ttm._kill_worker_threads()
    def test_bogus_source_iter_and_worker(self):
        class TestFTSAppClass(FetchTransformSaveApp):
            def __init__(self, config):
                super(TestFTSAppClass, self).__init__(config)
                self.the_list = []

            def _setup_source_and_destination(self):
                self.source = Mock()
                self.destination = Mock()
                pass

            def _create_iter(self):
                for x in xrange(5):
                    yield ((x, ), {})

            def transform(self, anItem):
                self.the_list.append(anItem)

        logger = SilentFakeLogger()
        config = DotDict({
            'logger':
            logger,
            'number_of_threads':
            2,
            'maximum_queue_size':
            2,
            'number_of_submissions':
            'all',
            'source':
            DotDict({'crashstorage_class': None}),
            'destination':
            DotDict({'crashstorage_class': None}),
            'producer_consumer':
            DotDict({
                'producer_consumer_class': TaskManager,
                'logger': logger,
                'number_of_threads': 1,
                'maximum_queue_size': 1
            })
        })

        fts_app = TestFTSAppClass(config)
        fts_app.main()
        ok_(
            len(fts_app.the_list) == 5, 'expected to do 5 inserts, '
            'but %d were done instead' % len(fts_app.the_list))
        ok_(
            sorted(fts_app.the_list) == range(5),
            'expected %s, but got %s' % (range(5), sorted(fts_app.the_list)))
        class FakeStorageSource(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict({'1234': DotDict({'ooid': '1234',
                                                       'Product': 'FireSquid',
                                                       'Version': '1.0'}),
                                      '1235': DotDict({'ooid': '1235',
                                                       'Product': 'ThunderRat',
                                                       'Version': '1.0'}),
                                      '1236': DotDict({'ooid': '1236',
                                                       'Product': 'Caminimal',
                                                       'Version': '1.0'}),
                                      '1237': DotDict({'ooid': '1237',
                                                       'Product': 'Fennicky',
                                                       'Version': '1.0'}),
                                     })

            def get_raw_crash(self, ooid):
                return self.store[ooid]

            def get_raw_dump(self, ooid):
                return 'this is a fake dump'

            def new_ooids(self):
                for k in self.store.keys():
                    yield k
Exemple #25
0
    def test_constructor(self):
        faked_connection_object = Mock()
        config = DotDict()
        conn = Connection(
            config,
            faked_connection_object
        )
        ok_(conn.config is config)
        ok_(conn.connection is faked_connection_object)
        faked_connection_object.channel.called_once_with()

        eq_(
            faked_connection_object.channel.return_value
                .queue_declare.call_count,
            3
        )
        expected_queue_declare_call_args = [
            call(queue='socorro.normal', durable=True),
            call(queue='socorro.priority', durable=True),
            call(queue='socorro.reprocessing', durable=True),
        ]
        eq_(
            faked_connection_object.channel.return_value.queue_declare.call_args_list,
            expected_queue_declare_call_args
        )
        class FakeStorageSource(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict({'1234': DotDict({'ooid': '1234',
                                                       'Product': 'FireSquid',
                                                       'Version': '1.0'}),
                                      '1235': DotDict({'ooid': '1235',
                                                       'Product': 'ThunderRat',
                                                       'Version': '1.0'}),
                                      '1236': DotDict({'ooid': '1236',
                                                       'Product': 'Caminimal',
                                                       'Version': '1.0'}),
                                      '1237': DotDict({'ooid': '1237',
                                                       'Product': 'Fennicky',
                                                       'Version': '1.0'}),
                                     })
                self.number_of_close_calls = 0

            def close(self):
                self.number_of_close_calls += 1

            def get_raw_crash(self, ooid):
                return self.store[ooid]

            def get_raw_dumps(self, ooid):
                return {'upload_file_minidump': 'this is a fake dump',
                        'flash1': 'broken flash dump'}

            def new_crashes(self):
                for k in self.store.keys():
                    yield k
Exemple #27
0
 def test_close(self):
     faked_connection_object = Mock()
     config = DotDict()
     conn = Connection(
         config,
         faked_connection_object
     )
     conn.close()
     faked_connection_object.close.assert_called_once_with()
    def setup_mocked_s3_storage(
            self,
            executor=TransactionExecutor,
            executor_for_gets=TransactionExecutor,
            storage_class='BotoS3CrashStorage',
            host='',
            port=0):

        config = DotDict({
            'source': {
                'dump_field': 'dump'
            },
            'transaction_executor_class': executor,
            'transaction_executor_class_for_get': executor_for_gets,
            'resource_class': S3ConnectionContext,
            'keybuilder_class': KeyBuilderBase,
            'backoff_delays': [0, 0, 0],
            'redactor_class': Redactor,
            'forbidden_keys': Redactor.required_config.forbidden_keys.default,
            'logger': mock.Mock(),
            'host': host,
            'port': port,
            'access_key': 'this is the access key',
            'secret_access_key': 'secrets',
            'temporary_file_system_storage_path': self.TEMPDIR,
            'dump_file_suffix': '.dump',
            'bucket_name': 'mozilla-support-reason',
            'prefix': 'dev',
            'calling_format': mock.Mock()
        })
        if storage_class == 'BotoS3CrashStorage':
            config.bucket_name = 'crash_storage'
            s3 = BotoS3CrashStorage(config)
        elif storage_class == 'SupportReasonAPIStorage':
            s3 = SupportReasonAPIStorage(config)
        s3_conn = s3.connection_source
        s3_conn._connect_to_endpoint = mock.Mock()
        s3_conn._mocked_connection = s3_conn._connect_to_endpoint.return_value
        s3_conn._calling_format.return_value = mock.Mock()
        s3_conn._CreateError = mock.Mock()
##        s3_conn.ResponseError = mock.Mock()
        s3_conn._open = mock.MagicMock()

        return s3
    def test_new_crash_duplicate_discovered(self):
        """ Tests queue with standard queue items only
        """
        config = self._setup_config()
        config.transaction_executor_class = TransactionExecutor
        crash_store = RabbitMQCrashStorage(config)
        crash_store.rabbitmq.config.standard_queue_name = 'socorro.normal'
        crash_store.rabbitmq.config.reprocessing_queue_name = \
            'socorro.reprocessing'
        crash_store.rabbitmq.config.priority_queue_name = 'socorro.priority'

        faked_methodframe = DotDict()
        faked_methodframe.delivery_tag = 'delivery_tag'
        test_queue = [
            (None, None, None),
            (faked_methodframe, '1', 'normal_crash_id'),
            (None, None, None),
        ]

        def basic_get(queue='socorro.priority'):
            if len(test_queue) == 0:
                raise StopIteration
            return test_queue.pop()

        crash_store.rabbitmq.return_value.__enter__.return_value  \
            .channel.basic_get = MagicMock(side_effect=basic_get)

        transaction_connection = crash_store.transaction.db_conn_context_source \
            .return_value.__enter__.return_value

        # load the cache as if this crash had alredy been seen
        crash_store.acknowledgement_token_cache['normal_crash_id'] = \
            faked_methodframe

        for result in crash_store.new_crashes():
            # new crash should be suppressed
            eq_(None, result)

        # we should ack the new crash even though we did use it for processing
        transaction_connection.channel.basic_ack \
            .assert_called_with(
                delivery_tag=faked_methodframe.delivery_tag
            )
    def test_save_raw_crash_no_legacy(self):
        config = self._setup_config()
        config.filter_on_legacy_processing = False
        crash_store = RabbitMQCrashStorage(config)

        # test for "legacy_processing" missing from crash
        crash_store.save_raw_crash(
            raw_crash=DotDict(),
            dumps=DotDict(),
            crash_id='crash_id'
        )
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction,
            'crash_id'
        )
        config.logger.reset_mock()

        # test for normal save
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction,
            'crash_id'
        )
        crash_store.transaction.reset_mock()

        # test for save without regard to "legacy_processing" value
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction,
            'crash_id'
        )
Exemple #31
0
    def test_new_crash_duplicate_discovered(self):
        """ Tests queue with standard queue items only
        """
        config = self._setup_config()
        config.transaction_executor_class = TransactionExecutor
        crash_store = RabbitMQCrashStorage(config)
        crash_store.rabbitmq.config.standard_queue_name = 'socorro.normal'
        crash_store.rabbitmq.config.reprocessing_queue_name = \
            'socorro.reprocessing'
        crash_store.rabbitmq.config.priority_queue_name = 'socorro.priority'

        faked_methodframe = DotDict()
        faked_methodframe.delivery_tag = 'delivery_tag'
        test_queue = [
            (None, None, None),
            (faked_methodframe, '1', 'normal_crash_id'),
            (None, None, None),
        ]

        def basic_get(queue='socorro.priority'):
            if len(test_queue) == 0:
                raise StopIteration
            return test_queue.pop()

        crash_store.rabbitmq.return_value.__enter__.return_value  \
            .channel.basic_get = MagicMock(side_effect=basic_get)

        transaction_connection = crash_store.transaction.db_conn_context_source \
            .return_value.__enter__.return_value

        # load the cache as if this crash had alredy been seen
        crash_store.acknowledgement_token_cache['normal_crash_id'] = \
            faked_methodframe

        for result in crash_store.new_crashes():
            # new crash should be suppressed
            eq_(None, result)

        # we should ack the new crash even though we did use it for processing
        transaction_connection.channel.basic_ack \
            .assert_called_with(
                delivery_tag=faked_methodframe.delivery_tag
            )
    def test_blocking_start_with_quit_on_empty(self):
        config = DotDict()
        config.logger = self.logger
        config.idle_delay = 1
        config.quit_on_empty_queue =  True

        tm = TaskManager(
            config,
            task_func=Mock()
        )

        waiting_func = Mock()

        tm.blocking_start(waiting_func=waiting_func)

        eq_(
            tm.task_func.call_count,
            10
        )
        eq_(waiting_func.call_count, 0)
 def _get_raw_crash_from_form(self):
     """this method creates the raw_crash and the dumps mapping using the
     POST form"""
     dumps = MemoryDumpsMapping()
     raw_crash = DotDict()
     raw_crash.dump_checksums = DotDict()
     for name, value in self._form_as_mapping().iteritems():
         name = self._no_x00_character(name)
         if isinstance(value, basestring):
             if name != "dump_checksums":
                 raw_crash[name] = self._no_x00_character(value)
         elif hasattr(value, 'file') and hasattr(value, 'value'):
             dumps[name] = value.value
             raw_crash.dump_checksums[name] = \
                 self.checksum_method(value.value).hexdigest()
         elif isinstance(value, int):
             raw_crash[name] = value
         else:
             raw_crash[name] = value.value
     return raw_crash, dumps
 def _get_raw_crash_from_form(self):
     """this method creates the raw_crash and the dumps mapping using the
     POST form"""
     dumps = MemoryDumpsMapping()
     raw_crash = DotDict()
     raw_crash.dump_checksums = DotDict()
     for name, value in self._form_as_mapping().iteritems():
         name = self._no_x00_character(name)
         if isinstance(value, basestring):
             if name != "dump_checksums":
                 raw_crash[name] = self._no_x00_character(value)
         elif hasattr(value, 'file') and hasattr(value, 'value'):
             dumps[name] = value.value
             raw_crash.dump_checksums[name] = \
                 self.checksum_method(value.value).hexdigest()
         elif isinstance(value, int):
             raw_crash[name] = value
         else:
             raw_crash[name] = value.value
     return raw_crash, dumps
    def test_blocking_start_with_quit_on_empty(self):
        config = DotDict()
        config.logger = self.logger
        config.number_of_threads = 2
        config.maximum_queue_size = 2
        config.quit_on_empty_queue =  True

        tm = ThreadedTaskManager(
            config,
            task_func=Mock()
        )

        waiting_func = Mock()

        tm.blocking_start(waiting_func=waiting_func)

        eq_(
            tm.task_func.call_count,
            10
        )
 def test_start1(self):
     config = DotDict()
     config.logger = self.logger
     config.number_of_threads = 1
     config.maximum_queue_size = 1
     ttm = ThreadedTaskManager(config)
     try:
         ttm.start()
         time.sleep(0.2)
         ok_(ttm.queuing_thread.isAlive(),
                         "the queing thread is not running")
         ok_(len(ttm.thread_list) == 1,
                         "where's the worker thread?")
         ok_(ttm.thread_list[0].isAlive(),
                         "the worker thread is stillborn")
         ttm.stop()
         ok_(ttm.queuing_thread.isAlive() == False,
                         "the queuing thread did not stop")
     except Exception:
         # we got threads to join
         ttm.wait_for_completion()
 def test_start1(self):
     config = DotDict()
     config.logger = self.logger
     config.number_of_threads = 1
     config.maximum_queue_size = 1
     ttm = ThreadedTaskManager(config)
     try:
         ttm.start()
         time.sleep(0.2)
         ok_(ttm.queuing_thread.isAlive(),
                         "the queing thread is not running")
         ok_(len(ttm.thread_list) == 1,
                         "where's the worker thread?")
         ok_(ttm.thread_list[0].isAlive(),
                         "the worker thread is stillborn")
         ttm.stop()
         ok_(ttm.queuing_thread.isAlive() is False,
                         "the queuing thread did not stop")
     except Exception:
         # we got threads to join
         ttm.wait_for_completion()
 def __init__(self, config, quit_check_callback):
     self.store = DotDict({
         '1234':
         DotDict({
             'ooid': '1234',
             'Product': 'FireSquid',
             'Version': '1.0'
         }),
         '1235':
         DotDict({
             'ooid': '1235',
             'Product': 'ThunderRat',
             'Version': '1.0'
         }),
         '1236':
         DotDict({
             'ooid': '1236',
             'Product': 'Caminimal',
             'Version': '1.0'
         }),
         '1237':
         DotDict({
             'ooid': '1237',
             'Product': 'Fennicky',
             'Version': '1.0'
         }),
     })
     self.number_of_close_calls = 0
    def test_get_iterator(self):
        config = DotDict()
        config.logger = self.logger
        config.quit_on_empty_queue =  False

        tm = TaskManager(
            config,
            job_source_iterator=range(1),
        )
        eq_(tm._get_iterator(), [0])

        def an_iter(self):
            for i in range(5):
                yield i

        tm = TaskManager(
            config,
            job_source_iterator=an_iter,
        )
        eq_(
            [x for x in tm._get_iterator()],
            [0, 1, 2, 3, 4]
        )

        class X(object):
            def __init__(self, config):
                self.config = config

            def __iter__(self):
                for key in self.config:
                    yield key

        tm = TaskManager(
            config,
            job_source_iterator=X(config)
        )
        eq_(
            [x for x in tm._get_iterator()],
            [y for y in config.keys()]
        )
 def __init__(self, config, quit_check_callback):
     self.store = DotDict({'1234': DotDict({'ooid': '1234',
                                            'Product': 'FireSquid',
                                            'Version': '1.0'}),
                           '1235': DotDict({'ooid': '1235',
                                            'Product': 'ThunderRat',
                                            'Version': '1.0'}),
                           '1236': DotDict({'ooid': '1236',
                                            'Product': 'Caminimal',
                                            'Version': '1.0'}),
                           '1237': DotDict({'ooid': '1237',
                                            'Product': 'Fennicky',
                                            'Version': '1.0'}),
                          })
    def test_save_raw_crash_normal(self):
        config = self._setup_config()
        crash_store = RabbitMQCrashStorage(config)

        # test for "legacy_processing" missing from crash
        crash_store.save_raw_crash(
            raw_crash=DotDict(),
            dumps=DotDict(),
            crash_id='crash_id'
        )
        ok_(not crash_store.transaction.called)
        config.logger.reset_mock()

        # test for normal save
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction,
            'crash_id'
        )
        crash_store.transaction.reset_mock()

        # test for save rejection because of "legacy_processing"
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        ok_(not crash_store.transaction.called)
    def test_no_source(self):
        class FakeStorageDestination(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict()
                self.dumps = DotDict()

            def save_raw_crash(self, raw_crash, dump, crash_id):
                self.store[crash_id] = raw_crash
                self.dumps[crash_id] = dump

        logger = SilentFakeLogger()
        config = DotDict({
            'logger':
            logger,
            'number_of_threads':
            2,
            'maximum_queue_size':
            2,
            'number_of_submissions':
            'forever',
            'source':
            DotDict({'crashstorage_class': None}),
            'destination':
            DotDict({'crashstorage_class': FakeStorageDestination}),
            'producer_consumer':
            DotDict({
                'producer_consumer_class': ThreadedTaskManager,
                'logger': logger,
                'number_of_threads': 1,
                'maximum_queue_size': 1
            })
        })

        fts_app = FetchTransformSaveApp(config)

        assert_raises(TypeError, fts_app.main)
        class FakeStorageSource(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict({
                    '1234':
                    DotDict({
                        'ooid': '1234',
                        'Product': 'FireSquid',
                        'Version': '1.0'
                    }),
                    '1235':
                    DotDict({
                        'ooid': '1235',
                        'Product': 'ThunderRat',
                        'Version': '1.0'
                    }),
                    '1236':
                    DotDict({
                        'ooid': '1236',
                        'Product': 'Caminimal',
                        'Version': '1.0'
                    }),
                    '1237':
                    DotDict({
                        'ooid': '1237',
                        'Product': 'Fennicky',
                        'Version': '1.0'
                    }),
                })
                self.number_of_close_calls = 0

            def close(self):
                self.number_of_close_calls += 1

            def get_raw_crash(self, ooid):
                return self.store[ooid]

            def get_raw_dumps(self, ooid):
                return {
                    'upload_file_minidump': 'this is a fake dump',
                    'flash1': 'broken flash dump'
                }

            def new_crashes(self):
                for k in self.store.keys():
                    yield k
        class FakeStorageSource(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict({
                    '1234':
                    DotDict({
                        'ooid': '1234',
                        'Product': 'FireSquid',
                        'Version': '1.0'
                    }),
                    '1235':
                    DotDict({
                        'ooid': '1235',
                        'Product': 'ThunderRat',
                        'Version': '1.0'
                    }),
                    '1236':
                    DotDict({
                        'ooid': '1236',
                        'Product': 'Caminimal',
                        'Version': '1.0'
                    }),
                    '1237':
                    DotDict({
                        'ooid': '1237',
                        'Product': 'Fennicky',
                        'Version': '1.0'
                    }),
                })

            def get_raw_crash(self, ooid):
                return self.store[ooid]

            def get_raw_dump(self, ooid):
                return 'this is a fake dump'

            def new_ooids(self):
                for k in self.store.keys():
                    yield k
    def test_task_raises_unexpected_exception(self):
        global count
        count = 0

        def new_iter():
            for x in xrange(10):
                yield (x,)

        my_list = []

        def insert_into_list(anItem):
            global count
            count += 1
            if count == 4:
                raise Exception('Unexpected')
            my_list.append(anItem)

        config = DotDict()
        config.logger = self.logger
        config.number_of_threads = 1
        config.maximum_queue_size = 1
        config.job_source_iterator = new_iter
        config.task_func = insert_into_list
        ttm = ThreadedTaskManagerWithConfigSetup(config)
        try:
            ttm.start()
            time.sleep(0.2)
            ok_(len(ttm.thread_list) == 1,
                            "expected 1 threads, but found %d"
                              % len(ttm.thread_list))
            ok_(sorted(my_list) == [0, 1, 2, 4, 5, 6, 7, 8, 9],
                            'expected %s, but got %s'
                              % ([0, 1, 2, 5, 6, 7, 8, 9], sorted(my_list)))
            ok_(len(my_list) == 9,
                            'expected to do 9 inserts, '
                              'but %d were done instead' % len(my_list))
        except Exception:
            # we got threads to join
            ttm.wait_for_completion()
            raise
    def test_blocking_start_with_quit_on_empty(self):
        config = DotDict()
        config.logger = self.logger
        config.number_of_threads = 2
        config.maximum_queue_size = 2
        config.quit_on_empty_queue =  True

        tm = ThreadedTaskManager(
            config,
            task_func=Mock()
        )

        waiting_func = Mock()

        tm.blocking_start(waiting_func=waiting_func)

        eq_(
            tm.task_func.call_count,
            10
        )
    def test_task_raises_unexpected_exception(self):
        global count
        count = 0

        def new_iter():
            for x in xrange(10):
                yield (x,)

        my_list = []

        def insert_into_list(anItem):
            global count
            count += 1
            if count == 4:
                raise Exception('Unexpected')
            my_list.append(anItem)

        config = DotDict()
        config.logger = self.logger
        config.number_of_threads = 1
        config.maximum_queue_size = 1
        config.job_source_iterator = new_iter
        config.task_func = insert_into_list
        ttm = ThreadedTaskManagerWithConfigSetup(config)
        try:
            ttm.start()
            time.sleep(0.2)
            ok_(len(ttm.thread_list) == 1,
                            "expected 1 threads, but found %d"
                              % len(ttm.thread_list))
            ok_(sorted(my_list) == [0, 1, 2, 4, 5, 6, 7, 8, 9],
                            'expected %s, but got %s'
                              % ([0, 1, 2, 5, 6, 7, 8, 9], sorted(my_list)))
            ok_(len(my_list) == 9,
                            'expected to do 9 inserts, '
                              'but %d were done instead' % len(my_list))
        except Exception:
            # we got threads to join
            ttm.wait_for_completion()
            raise
 def _setup_config(self):
     config = DotDict();
     config.crashstorage_class = FakeCrashStore
     return config
 def __init__(self, config, quit_check_callback):
     self.store = DotDict()
     self.dumps = DotDict()
    def test_no_destination(self):
        class FakeStorageSource(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict({
                    '1234':
                    DotDict({
                        'ooid': '1234',
                        'Product': 'FireSquid',
                        'Version': '1.0'
                    }),
                    '1235':
                    DotDict({
                        'ooid': '1235',
                        'Product': 'ThunderRat',
                        'Version': '1.0'
                    }),
                    '1236':
                    DotDict({
                        'ooid': '1236',
                        'Product': 'Caminimal',
                        'Version': '1.0'
                    }),
                    '1237':
                    DotDict({
                        'ooid': '1237',
                        'Product': 'Fennicky',
                        'Version': '1.0'
                    }),
                })

            def get_raw_crash(self, ooid):
                return self.store[ooid]

            def get_raw_dump(self, ooid):
                return 'this is a fake dump'

            def new_ooids(self):
                for k in self.store.keys():
                    yield k

        logger = SilentFakeLogger()
        config = DotDict({
            'logger':
            logger,
            'number_of_threads':
            2,
            'maximum_queue_size':
            2,
            'number_of_submissions':
            'forever',
            'source':
            DotDict({'crashstorage_class': FakeStorageSource}),
            'destination':
            DotDict({'crashstorage_class': None}),
            'producer_consumer':
            DotDict({
                'producer_consumer_class': ThreadedTaskManager,
                'logger': logger,
                'number_of_threads': 1,
                'maximum_queue_size': 1
            })
        })

        fts_app = FetchTransformSaveApp(config)

        assert_raises(TypeError, fts_app.main)
    def _setup_config(self):
        config = DotDict();
        config.host = 'localhost'
        config.virtual_host = '/'
        config.port = '5672'
        config.rabbitmq_user = '******'
        config.rabbitmq_password = '******'
        config.standard_queue_name = 'dwight'
        config.priority_queue_name = 'wilma'
        config.reprocessing_queue_name = 'betty'
        config.rabbitmq_connection_wrapper_class = Connection

        config.executor_identity = lambda: 'MainThread'

        return config
def testLegacyThrottler():

    # phase 1 tests

    config = DotDict()
    config.throttle_conditions = [ ('alpha', re.compile('ALPHA'), 100),
                                   ('beta',  'BETA', 100),
                                   ('gamma', lambda x: x == 'GAMMA', 100),
                                   ('delta', True, 100),
                                   (None, True, 0)
                                  ]
    config.minimal_version_for_understanding_refusal = {
      'product1': '3.5',
      'product2': '4.0'
    }
    config.never_discard = False
    config.logger = mock.Mock()
    thr = LegacyThrottler(config)
    expected = 5
    actual = len(thr.processed_throttle_conditions)
    assert expected == actual, \
      "expected thr.preprocessThrottleConditions to have length %d, but got " \
      "%d instead" % (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.0',
                          'alpha':'ALPHA',
                          })
    expected = False
    actual = thr.understands_refusal(raw_crash)
    assert expected == actual, \
      "understand refusal expected %d, but got %d instead" % (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'alpha':'ALPHA',
                          })
    expected = True
    actual = thr.understands_refusal(raw_crash)
    assert expected == actual, \
      "understand refusal expected %d, but got %d instead" % (expected, actual)

    expected = (ACCEPT, 100)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "regexp throttle expected %d, but got %d instead" % (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.4',
                          'alpha':'not correct',
                          })
    expected = (DEFER, 0)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "regexp throttle expected %d, but got %d instead" % (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'alpha':'not correct',
                          })
    expected = (DISCARD, 0)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "regexp throttle expected %d, but got %d instead" % (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'beta':'BETA',
                          })
    expected = (ACCEPT, 100)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "string equality throttle expected %d, but got %d instead" % \
      (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'beta':'not BETA',
                          })
    expected = (DISCARD, 0)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "string equality throttle expected %d, but got %d instead" % \
      (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'gamma':'GAMMA',
                          })
    expected = (ACCEPT, 100)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "string equality throttle expected %d, but got %d instead" % \
      (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'gamma':'not GAMMA',
                          })
    expected = (DISCARD, 0)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "string equality throttle expected %d, but got %d instead" % \
      (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'delta':"value doesn't matter",
                          })
    expected = (ACCEPT, 100)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "string equality throttle expected %d, but got %d instead" % \
      (expected, actual)

    # phase 2 tests

    config = DotDict()
    config.throttle_conditions = [
      ('*', lambda x: 'alpha' in x, None),
      ('*', lambda x: x['beta'] == 'BETA', 100),
    ]
    config.minimal_version_for_understanding_refusal = {
      'product1': '3.5',
      'product2': '4.0'
    }
    config.never_discard = True
    config.logger = mock.Mock()
    thr = LegacyThrottler(config)
    expected = 2
    actual = len(thr.processed_throttle_conditions)
    assert expected == actual, \
      "expected thr.preprocessThrottleConditions to have length %d, but got " \
      "%d instead" % (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'beta': 'ugh',
                          'alpha':"value doesn't matter",
                          })
    expected = (IGNORE, None)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "IGNORE expected %d, but got %d instead" % \
      (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'beta': 'ugh',
                          'delta':"value doesn't matter",
                          })
    expected = (DEFER, 0)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "DEFER expected %d, but got %d instead" % \
      (expected, actual)

    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'beta': 'BETA',
                          'alpha':"value doesn't matter",
                          })
    expected = (IGNORE, None)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "IGNORE expected %d, but got %d instead" % \
      (expected, actual)
    raw_crash = DotDict({ 'ProductName':'product1',
                          'Version':'3.6',
                          'beta': 'BETA',
                          'delta':"value doesn't matter",
                          })
    expected = (ACCEPT, 100)
    actual = thr.throttle(raw_crash)
    assert expected == actual, \
      "ACCEPT expected %d, but got %d instead" % \
      (expected, actual)
    def test_bogus_source_and_destination(self):
        class NonInfiniteFTSAppClass(FetchTransformSaveApp):
            def _basic_iterator(self):
                for x in self.source.new_crashes():
                    yield ((x, ), {})

        class FakeStorageSource(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict({
                    '1234':
                    DotDict({
                        'ooid': '1234',
                        'Product': 'FireSquid',
                        'Version': '1.0'
                    }),
                    '1235':
                    DotDict({
                        'ooid': '1235',
                        'Product': 'ThunderRat',
                        'Version': '1.0'
                    }),
                    '1236':
                    DotDict({
                        'ooid': '1236',
                        'Product': 'Caminimal',
                        'Version': '1.0'
                    }),
                    '1237':
                    DotDict({
                        'ooid': '1237',
                        'Product': 'Fennicky',
                        'Version': '1.0'
                    }),
                })
                self.number_of_close_calls = 0

            def get_raw_crash(self, ooid):
                return self.store[ooid]

            def get_raw_dumps(self, ooid):
                return {'upload_file_minidump': 'this is a fake dump'}

            def new_crashes(self):
                for k in self.store.keys():
                    yield k

            def close(self):
                self.number_of_close_calls += 1

        class FakeStorageDestination(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict()
                self.dumps = DotDict()
                self.number_of_close_calls = 0

            def save_raw_crash(self, raw_crash, dump, crash_id):
                self.store[crash_id] = raw_crash
                self.dumps[crash_id] = dump

            def close(self):
                self.number_of_close_calls += 1

        logger = SilentFakeLogger()
        config = DotDict({
            'logger':
            logger,
            'number_of_threads':
            2,
            'maximum_queue_size':
            2,
            'number_of_submissions':
            'all',
            'source':
            DotDict({'crashstorage_class': FakeStorageSource}),
            'destination':
            DotDict({'crashstorage_class': FakeStorageDestination}),
            'producer_consumer':
            DotDict({
                'producer_consumer_class': ThreadedTaskManager,
                'logger': logger,
                'number_of_threads': 1,
                'maximum_queue_size': 1
            })
        })

        fts_app = NonInfiniteFTSAppClass(config)
        fts_app.main()

        source = fts_app.source
        destination = fts_app.destination

        eq_(source.store, destination.store)
        eq_(len(destination.dumps), 4)
        eq_(destination.dumps['1237'], source.get_raw_dumps('1237'))
        # ensure that each storage system had its close called
        eq_(source.number_of_close_calls, 1)
        eq_(destination.number_of_close_calls, 1)
Exemple #54
0
    def test_save_raw_crash_normal_throttle(self, randint_mock):
        random_ints = [100, 49, 50, 51, 1, 100]

        def side_effect(*args, **kwargs):
            return random_ints.pop(0)

        randint_mock.side_effect = side_effect

        config = self._setup_config()
        config.throttle = 50
        crash_store = RabbitMQCrashStorage(config)

        # test for "legacy_processing" missing from crash #0: 100
        crash_store.save_raw_crash(raw_crash=DotDict(),
                                   dumps=DotDict(),
                                   crash_id='crash_id')
        ok_(not crash_store.transaction.called)
        config.logger.reset_mock()

        # test for normal save #1: 49
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
        crash_store.transaction.reset_mock()

        # test for normal save #2: 50
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
        crash_store.transaction.reset_mock()

        # test for normal save #3: 51
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        ok_(not crash_store.transaction.called)
        crash_store.transaction.reset_mock()

        # test for save rejection because of "legacy_processing" #4: 1
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        ok_(not crash_store.transaction.called)

        # test for save rejection because of "legacy_processing" #5: 100
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        ok_(not crash_store.transaction.called)
Exemple #55
0
    def _setup_config(self):
        config = DotDict()
        config.host = 'localhost'
        config.virtual_host = '/'
        config.port = '5672'
        config.rabbitmq_user = '******'
        config.rabbitmq_password = '******'
        config.standard_queue_name = 'dwight'
        config.priority_queue_name = 'wilma'
        config.reprocessing_queue_name = 'betty'
        config.rabbitmq_connection_wrapper_class = Connection

        config.executor_identity = lambda: 'MainThread'

        return config
Exemple #56
0
 def _setup_config(self):
     config = DotDict()
     config.transaction_executor_class = Mock()
     config.backoff_delays = (0, 0, 0)
     config.logger = Mock()
     config.rabbitmq_class = MagicMock()
     config.routing_key = 'socorro.normal'
     config.filter_on_legacy_processing = True
     config.redactor_class = Redactor
     config.forbidden_keys = Redactor.required_config.forbidden_keys.default
     config.throttle = 100
     return config
    def test_save_raw_crash_normal_throttle(self, randint_mock):
        random_ints = [100, 49, 50, 51, 1, 100]
        def side_effect(*args, **kwargs):
            return random_ints.pop(0)
        randint_mock.side_effect = side_effect

        config = self._setup_config()
        config.throttle = 50
        crash_store = RabbitMQCrashStorage(config)

        # test for "legacy_processing" missing from crash #0: 100
        crash_store.save_raw_crash(
            raw_crash=DotDict(),
            dumps=DotDict(),
            crash_id='crash_id'
        )
        ok_(not crash_store.transaction.called)
        config.logger.reset_mock()

        # test for normal save #1: 49
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction,
            'crash_id'
        )
        crash_store.transaction.reset_mock()

        # test for normal save #2: 50
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction,
            'crash_id'
        )
        crash_store.transaction.reset_mock()

        # test for normal save #3: 51
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        ok_(not crash_store.transaction.called)
        crash_store.transaction.reset_mock()



        # test for save rejection because of "legacy_processing" #4: 1
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        ok_(not crash_store.transaction.called)

        # test for save rejection because of "legacy_processing" #5: 100
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(
            raw_crash=raw_crash,
            dumps=DotDict,
            crash_id='crash_id'
        )
        ok_(not crash_store.transaction.called)