Exemple #1
0
    def test_action_case_4(self):
        """nothing in 1st dump, sentinel but no secondary in
        upload_file_minidump_flash2 dump"""
        pc = DotDict()
        pc.dump = DotDict()
        pijd = copy.deepcopy(cannonical_json_dump)
        pc.dump.json_dump = pijd
        f2jd = copy.deepcopy(cannonical_json_dump)
        pc.upload_file_minidump_flash2 = DotDict()
        pc.upload_file_minidump_flash2.json_dump = f2jd
        pc.upload_file_minidump_flash2.json_dump['crashing_thread']['frames'][2] \
            ['function'] = 'NtUserSetWindowPos'

        fake_processor = create_basic_fake_processor()

        rc = DotDict()
        rd = {}
        rule = SetWindowPos()
        action_result = rule.action(rc, rd, pc, fake_processor)

        ok_(action_result)
        ok_('classifications' in pc)
        ok_('skunk_works' in pc.classifications)
        eq_(pc.classifications.skunk_works.classification,
            'NtUserSetWindowPos | other')
Exemple #2
0
    def test_action_case_2(self):
        """sentinel exsits in stack, plus one secondary"""
        pc = DotDict()
        pc.process_type = 'plugin'
        pijd = copy.deepcopy(cannonical_json_dump)
        pc.json_dump = pijd
        pc.json_dump['crashing_thread']['frames'][2]['function'] = \
            'NtUserSetWindowPos'
        pc.json_dump['crashing_thread']['frames'][4]['function'] = \
            'F_1378698112'
        f2jd = copy.deepcopy(cannonical_json_dump)
        pc.upload_file_minidump_flash2 = DotDict()
        pc.upload_file_minidump_flash2.json_dump = f2jd

        fake_processor = create_basic_fake_processor()

        rc = DotDict()
        rd = {}
        rule = SetWindowPos()
        action_result = rule.action(rc, rd, pc, fake_processor)

        ok_(action_result)
        ok_('classifications' in pc)
        ok_('skunk_works' in pc.classifications)
        eq_(pc.classifications.skunk_works.classification,
            'NtUserSetWindowPos | F_1378698112')
    def test_put_with_data(self, logging_info):
        # what the middleware app does is that it creates a class based on
        # another and sets an attribute called `cls`
        class MadeUp(middleware_app.ImplementationWrapper):
            cls = AuxImplementation4
            all_services = {}

        config = DotDict(
            logger=logging,
            web_server=DotDict(
                ip_address='127.0.0.1',
                port='88888'
            )
        )

        server = CherryPy(config, (
            ('/aux/(.*)', MadeUp),
        ))

        testapp = TestApp(server._wsgi_func)
        response = testapp.put('/aux/', params={'add': 1})
        eq_(response.status, 200)
        eq_(json.loads(response.body), {'age': 101})

        logging_info.assert_called_with('Running AuxImplementation4')
 def __init__(self, config, quit_check_callback):
     self.store = DotDict({
         '1234':
         DotDict({
             'ooid': '1234',
             'Product': 'FireSquid',
             'Version': '1.0'
         }),
         '1235':
         DotDict({
             'ooid': '1235',
             'Product': 'ThunderRat',
             'Version': '1.0'
         }),
         '1236':
         DotDict({
             'ooid': '1236',
             'Product': 'Caminimal',
             'Version': '1.0'
         }),
         '1237':
         DotDict({
             'ooid': '1237',
             'Product': 'Fennicky',
             'Version': '1.0'
         }),
     })
     self.number_of_close_calls = 0
Exemple #5
0
    def _add_classification(self,
                            processed_crash,
                            classification,
                            classification_data,
                            logger=None):
        """This method adds a 'support' classification to a processed
        crash.

        parameters:
            processed_crash - a reference to the processed crash to which the
                              classification is to be added.
            classification - a string that is the classification.
            classification_data - a string of extra data that goes along with a
                                  classification
        """
        if 'classifications' not in processed_crash:
            processed_crash['classifications'] = DotDict()
        processed_crash['classifications']['support'] = DotDict({
            'classification':
            classification,
            'classification_data':
            classification_data,
            'classification_version':
            self.version()
        })
        if logger:
            logger.debug('Support classification: %s', classification)
        return True
    def test_basic_get_with_parsed_query_string(self, logging_info):
        # what the middleware app does is that it creates a class based on
        # another and sets an attribute called `cls`
        class MadeUp(middleware_app.ImplementationWrapper):
            cls = AuxImplementation5
            all_services = {}

        config = DotDict(
            logger=logging,
            web_server=DotDict(
                ip_address='127.0.0.1',
                port='88888'
            )
        )
        server = CherryPy(config, (
            ('/aux/(.*)', MadeUp),
        ))

        testapp = TestApp(server._wsgi_func)
        response = testapp.get(
            '/aux/',
            {'foo': 'bar', 'names': ['peter', 'anders']},
        )
        eq_(response.status, 200)
        eq_(json.loads(response.body),
                         {'foo': 'bar',
                          'names': ['peter', 'anders']})

        logging_info.assert_called_with('Running AuxImplementation5')
Exemple #7
0
    def test_predicate(self):
        jd = copy.deepcopy(cannonical_json_dump)
        processed_crash = DotDict()
        processed_crash.json_dump = jd
        raw_crash = DotDict()
        raw_crash.ProductName = 'Firefox'
        raw_crash.Version = '16'
        raw_dumps = {}

        fake_processor = create_basic_fake_processor()
        fake_processor.config.firefox_out_of_date_version = '17'

        classifier = OutOfDateClassifier()
        ok_(
            classifier._predicate(raw_crash, raw_dumps, processed_crash,
                                  fake_processor))

        raw_crash.Version = '19'
        ok_(not classifier._predicate(raw_crash, raw_dumps, processed_crash,
                                      fake_processor))

        raw_crash.Version = '12'
        raw_crash.ProductName = 'NotFireFox'
        ok_(not classifier._predicate(raw_crash, raw_dumps, processed_crash,
                                      fake_processor))
    def test_basic_get(self, logging_info):
        # what the middleware app does is that it creates a class based on
        # another and sets an attribute called `cls`
        class MadeUp(middleware_app.ImplementationWrapper):
            cls = AuxImplementation1
            all_services = {}

        config = DotDict(
            logger=logging,
            web_server=DotDict(
                ip_address='127.0.0.1',
                port='88888'
            )
        )
        server = CherryPy(config, (
            ('/aux/(.*)', MadeUp),
        ))

        testapp = TestApp(server._wsgi_func)
        response = testapp.get('/aux/')
        eq_(response.status, 200)
        eq_(json.loads(response.body), {'age': 100})

        logging_info.assert_called_with('Running AuxImplementation1')

        response = testapp.get('/xxxjunkxxx', expect_errors=True)
        eq_(response.status, 404)
Exemple #9
0
    def test_no_source(self):

        class FakeStorageDestination(object):

            def __init__(self, config, quit_check_callback):
                self.store = DotDict()
                self.dumps = DotDict()

            def save_raw_crash(self, raw_crash, dump, crash_id):
                self.store[crash_id] = raw_crash
                self.dumps[crash_id] = dump

        logger = SilentFakeLogger()
        config = DotDict({
          'logger': logger,
          'number_of_threads': 2,
          'maximum_queue_size': 2,
          'number_of_submissions': "all",
          'source': DotDict({'crashstorage_class':
                                 None}),
          'destination': DotDict({'crashstorage_class':
                                     FakeStorageDestination}),
          'producer_consumer': DotDict({'producer_consumer_class':
                                          ThreadedTaskManager,
                                        'logger': logger,
                                        'number_of_threads': 1,
                                        'maximum_queue_size': 1}
                                      )
        })

        fts_app = CrashMoverApp(config)

        assert_raises(TypeError, fts_app.main)
Exemple #10
0
    def test_action(self):
        jd = copy.deepcopy(cannonical_json_dump)
        processed_crash = DotDict()
        processed_crash.json_dump = jd
        raw_crash = DotDict()
        raw_crash.ProductName = 'Firefox'
        raw_crash.Version = '16'
        raw_dumps = {}

        fake_processor = create_basic_fake_processor()

        classifier = OutOfDateClassifier()
        classifier.out_of_date_threshold = ('17', )

        processed_crash.json_dump['system_info']['os'] = 'Mac OS X'
        processed_crash.json_dump['system_info']['os_ver'] = '10.1'
        processed_crash.json_dump['system_info']['cpu_arch'] = 'ppc'
        ok_(
            classifier._action(raw_crash, raw_dumps, processed_crash,
                               fake_processor))
        eq_(processed_crash.classifications.support.classification,
            'firefox-no-longer-works-mac-os-10-4-or-powerpc')
        processed_crash.json_dump['system_info']['os'] = 'Windows NT'
        processed_crash.json_dump['system_info']['os_ver'] = \
            '5.1.2600 Service Pack 3'
        ok_(
            classifier._action(raw_crash, raw_dumps, processed_crash,
                               fake_processor))
        eq_(processed_crash.classifications.support.classification,
            'update-firefox-latest-version')
Exemple #11
0
    def test_save_raw_crash_normal(self):
        config = self._setup_config()
        crash_store = RabbitMQCrashStorage(config)

        # test for "legacy_processing" missing from crash
        crash_store.save_raw_crash(raw_crash=DotDict(),
                                   dumps=DotDict(),
                                   crash_id='crash_id')
        ok_(not crash_store.transaction.called)
        config.logger.reset_mock()

        # test for normal save
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
        crash_store.transaction.reset_mock()

        # test for save rejection because of "legacy_processing"
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        ok_(not crash_store.transaction.called)
Exemple #12
0
    def test_save_raw_crash_no_legacy(self):
        config = self._setup_config()
        config.filter_on_legacy_processing = False
        crash_store = RabbitMQCrashStorage(config)

        # test for "legacy_processing" missing from crash
        crash_store.save_raw_crash(raw_crash=DotDict(),
                                   dumps=DotDict(),
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
        config.logger.reset_mock()

        # test for normal save
        raw_crash = DotDict()
        raw_crash.legacy_processing = 0
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
        crash_store.transaction.reset_mock()

        # test for save without regard to "legacy_processing" value
        raw_crash = DotDict()
        raw_crash.legacy_processing = 5
        crash_store.save_raw_crash(raw_crash=raw_crash,
                                   dumps=DotDict,
                                   crash_id='crash_id')
        crash_store.transaction.assert_called_with(
            crash_store._save_raw_crash_transaction, 'crash_id')
    def test_basic_get(self, logging_info):

        config_ = DotDict(
            logger=logging,
            web_server=DotDict(
                ip_address='127.0.0.1',
                port='88888'
            )
        )

        # what the middleware app does is that it creates a class based on
        # another and sets an attribute called `cls`
        class MadeUp(middleware_app.MeasuringImplementationWrapper):
            cls = AuxImplementation1
            all_services = {}
            config = config_

        server = CherryPy(config_, (
            ('/aux/(.*)', MadeUp),
        ))

        testapp = TestApp(server._wsgi_func)
        response = testapp.get('/aux/', params={'add': 1})
        eq_(response.status, 200)
        for call in logging_info.call_args_list:
            # mock calls are funny
            args = call[0]
            arg = args[0]
            if re.findall('measuringmiddleware:[\d\.]+\t/aux/\t\?add=1', arg):
                break
        else:
            raise AssertionError('call never found')
Exemple #14
0
    def test_action(self):
        rc = DotDict()
        rd = {}
        pc = DotDict()
        processor = None

        skunk_rule = SkunkClassificationRule()
        ok_(skunk_rule.action(rc, rd, pc, processor))
Exemple #15
0
def create_basic_fake_processor():
    fake_processor = DotDict()
    fake_processor.c_signature_tool = c_signature_tool
    fake_processor.config = DotDict()
    # need help figuring out failures? switch to FakeLogger and read stdout
    fake_processor.config.logger = SilentFakeLogger()
    #fake_processor.config.logger = FakeLogger()
    return fake_processor
Exemple #16
0
    def test_action(self):
        rc = DotDict()
        rd = {}
        pc = DotDict()
        processor = None

        support_rule = SupportClassificationRule()
        ok_(support_rule.action(rc, rd, pc, processor))
Exemple #17
0
    def test_add_classification_to_processed_crash(self):
        pc = DotDict()
        pc.classifications = DotDict()

        support_rule = SupportClassificationRule()
        support_rule._add_classification(pc, 'stupid', 'extra stuff')
        ok_('classifications' in pc)
        ok_('support' in pc.classifications)
        eq_('stupid', pc.classifications.support.classification)
        eq_('extra stuff', pc.classifications.support.classification_data)
        eq_('0.0', pc.classifications.support.classification_version)
    def _fake_processed_crash(self):
        d = DotDict()
        # these keys survive redaction
        d.a = DotDict()
        d.a.b = DotDict()
        d.a.b.c = 11
        d.sensitive = DotDict()
        d.sensitive.x = 2
        d.not_url = 'not a url'

        return d
Exemple #19
0
    def test_predicate(self):
        rc = DotDict()
        rd = {}
        pc = DotDict()
        pc.classifications = DotDict()
        processor = None

        support_rule = SupportClassificationRule()
        ok_(support_rule.predicate(rc, rd, pc, processor))

        pc.classifications.support = DotDict()
        ok_(support_rule.predicate(rc, rd, pc, processor))
Exemple #20
0
    def test_no_destination(self):
        class FakeStorageSource(object):
            def __init__(self, config, quit_check_callback):
                self.store = DotDict({'1234': DotDict({'ooid': '1234',
                                                       'Product': 'FireSquid',
                                                       'Version': '1.0'}),
                                      '1235': DotDict({'ooid': '1235',
                                                       'Product': 'ThunderRat',
                                                       'Version': '1.0'}),
                                      '1236': DotDict({'ooid': '1236',
                                                       'Product': 'Caminimal',
                                                       'Version': '1.0'}),
                                      '1237': DotDict({'ooid': '1237',
                                                       'Product': 'Fennicky',
                                                       'Version': '1.0'}),
                                     })

            def get_raw_crash(self, ooid):
                return self.store[ooid]

            def get_raw_dumps(self, ooid):
                return {'upload_file_minidump': 'this is a fake dump',
                        'flash1': 'broken flash dump'}

            def new_ooids(self):
                for k in self.store.keys():
                    yield k



        logger = SilentFakeLogger()
        config = DotDict({
          'logger': logger,
          'number_of_threads': 2,
          'maximum_queue_size': 2,
          'number_of_submissions': "all",
          'source': DotDict({'crashstorage_class':
                                 FakeStorageSource}),
          'destination': DotDict({'crashstorage_class':
                                     None}),
          'producer_consumer': DotDict({'producer_consumer_class':
                                          ThreadedTaskManager,
                                        'logger': logger,
                                        'number_of_threads': 1,
                                        'maximum_queue_size': 1}
                                      )

        })

        fts_app = CrashMoverApp(config)

        assert_raises(TypeError, fts_app.main)
Exemple #21
0
    def test_action_fail(self):
        jd = copy.deepcopy(cannonical_json_dump)
        pc = DotDict()
        pc.json_dump = jd

        fake_processor = create_basic_fake_processor()

        rc = DotDict()
        rd = {}
        rule = BitguardClassifier()
        action_result = rule.action(rc, rd, pc, fake_processor)

        ok_(not action_result)
        ok_('classifications' not in pc)
    def test_bogus_source_iter_and_worker(self):
        class TestFTSAppClass(FetchTransformSaveApp):
            def __init__(self, config):
                super(TestFTSAppClass, self).__init__(config)
                self.the_list = []

            def _setup_source_and_destination(self):
                self.source = Mock()
                self.destination = Mock()
                pass

            def _create_iter(self):
                for x in xrange(5):
                    yield ((x, ), {})

            def transform(self, anItem):
                self.the_list.append(anItem)

        logger = SilentFakeLogger()
        config = DotDict({
            'logger':
            logger,
            'number_of_threads':
            2,
            'maximum_queue_size':
            2,
            'number_of_submissions':
            'all',
            'source':
            DotDict({'crashstorage_class': None}),
            'destination':
            DotDict({'crashstorage_class': None}),
            'producer_consumer':
            DotDict({
                'producer_consumer_class': TaskManager,
                'logger': logger,
                'number_of_threads': 1,
                'maximum_queue_size': 1
            })
        })

        fts_app = TestFTSAppClass(config)
        fts_app.main()
        ok_(
            len(fts_app.the_list) == 5, 'expected to do 5 inserts, '
            'but %d were done instead' % len(fts_app.the_list))
        ok_(
            sorted(fts_app.the_list) == range(5),
            'expected %s, but got %s' % (range(5), sorted(fts_app.the_list)))
Exemple #23
0
    def test_predicate(self):
        rc = DotDict()
        rd = {}
        pc = DotDict()
        pc.classifications = DotDict()
        processor = None

        skunk_rule = SkunkClassificationRule()
        ok_(skunk_rule.predicate(rc, rd, pc, processor))

        pc.classifications.skunk_works = DotDict()
        ok_(skunk_rule.predicate(rc, rd, pc, processor))

        pc.classifications.skunk_works.classification = 'stupid'
        ok_(not skunk_rule.predicate(rc, rd, pc, processor))
 def _get_model(overrides=None):
     config_values = {
         'base_url': 'http://crashanalysis.com',
         'save_root': '',
         'save_download': False,
         'save_seconds': 1000,
     }
     if overrides:
         config_values.update(overrides)
     cls = correlations.CorrelationsSignatures
     config = DotDict()
     config.logger = mock.Mock()
     config.http = DotDict()
     config.http.correlations = DotDict(config_values)
     return cls(config=config)
    def test_get_iterator(self):
        config = DotDict()
        config.logger = self.logger
        config.quit_on_empty_queue = False

        tm = TaskManager(
            config,
            job_source_iterator=range(1),
        )
        eq_(tm._get_iterator(), [0])

        def an_iter(self):
            for i in range(5):
                yield i

        tm = TaskManager(
            config,
            job_source_iterator=an_iter,
        )
        eq_([x for x in tm._get_iterator()], [0, 1, 2, 3, 4])

        class X(object):
            def __init__(self, config):
                self.config = config

            def __iter__(self):
                for key in self.config:
                    yield key

        tm = TaskManager(config, job_source_iterator=X(config))
        eq_([x for x in tm._get_iterator()], [y for y in config.keys()])
Exemple #26
0
class SubmitterFileSystemWalkerSource(CrashStorageBase):
    """This is a crashstorage derivative that can walk an arbitrary file
    system path looking for crashes.  The new_crashes generator yields
    pathnames rather than crash_ids - so it is not compatible with other
    instances of the CrashStorageSystem."""
    required_config = Namespace()
    required_config.add_option(
        'search_root',
        doc="a filesystem location to begin a search for raw crash/dump sets",
        short_form='s',
        default=None)
    required_config.add_option('dump_suffix',
                               doc="the standard file extension for dumps",
                               default='.dump')
    required_config.add_option('dump_field',
                               doc="the default name for the main dump",
                               default='upload_file_minidump')

    #--------------------------------------------------------------------------
    def __init__(self, config, quit_check_callback=None):
        if isinstance(quit_check_callback, basestring):
            # this class is being used as a 'new_crash_source' and the name
            # of the app has been passed - we can ignore it
            quit_check_callback = None
        super(SubmitterFileSystemWalkerSource,
              self).__init__(config, quit_check_callback)

    #--------------------------------------------------------------------------
    def get_raw_crash(self, (prefix, path_tuple)):
        """the default implemntation of fetching a raw_crash
        parameters:
           path_tuple - a tuple of paths. the first element is the raw_crash
                        pathname"""
        with open(path_tuple[0]) as raw_crash_fp:
            return DotDict(json.load(raw_crash_fp))
Exemple #27
0
    def test_stuff_missing(self):
        config = self.get_basic_config()

        raw_crash = copy.copy(canonical_standard_raw_crash)

        raw_dumps = {}
        system_info = copy.copy(
            canonical_processed_crash['json_dump']['system_info']
        )
        del system_info['cpu_count']
        processed_crash = DotDict()
        processed_crash.json_dump = {
            'system_info': system_info
        }

        processor_meta = self.get_basic_processor_meta()

        rule = CPUInfoRule(config)

        # the call to be tested
        rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)

        eq_(
            processed_crash.cpu_info,
            "GenuineIntel family 6 model 42 stepping 7"
        )
        eq_(processed_crash.cpu_name, 'x86')

        # raw crash should be unchanged
        eq_(raw_crash, canonical_standard_raw_crash)
    def _execute_external_process(self, command_line, processor_meta):
        stackwalker_output, return_code = super(
            BreakpadStackwalkerRule2015,
            self)._execute_external_process(command_line, processor_meta)

        if not isinstance(stackwalker_output, Mapping):
            processor_meta.processor_notes.append(
                "MDSW produced unexpected output: %s..." %
                str(stackwalker_output)[:10])
            stackwalker_output = {}

        stackwalker_data = DotDict()
        stackwalker_data.json_dump = stackwalker_output
        stackwalker_data.mdsw_return_code = return_code

        stackwalker_data.mdsw_status_string = stackwalker_output.get(
            'status', 'unknown error')
        stackwalker_data.success = stackwalker_data.mdsw_status_string == 'OK'

        if return_code == 124:
            processor_meta.processor_notes.append(
                "MDSW terminated with SIGKILL due to timeout")
        elif return_code != 0 or not stackwalker_data.success:
            processor_meta.processor_notes.append(
                "MDSW failed on '%s': %s" %
                (command_line, stackwalker_data.mdsw_status_string))

        return stackwalker_data, return_code
Exemple #29
0
    def setup_mocked_s3_storage(self,
                                executor=TransactionExecutor,
                                executor_for_gets=TransactionExecutor,
                                storage_class='BotoS3CrashStorage',
                                host='',
                                port=0,
                                resource_class=S3ConnectionContext,
                                **extra):
        config = DotDict({
            'resource_class': resource_class,
            'logger': mock.Mock(),
            'host': host,
            'port': port,
            'access_key': 'this is the access key',
            'secret_access_key': 'secrets',
            'bucket_name': 'silliness',
            'keybuilder_class': KeyBuilderBase,
            'prefix': 'dev',
            'calling_format': mock.Mock()
        })
        config.update(extra)
        s3_conn = resource_class(config)
        s3_conn._connect_to_endpoint = mock.Mock()
        s3_conn._mocked_connection = s3_conn._connect_to_endpoint.return_value
        s3_conn._calling_format.return_value = mock.Mock()
        s3_conn._CreateError = mock.Mock()
        s3_conn.ResponseError = mock.Mock()
        s3_conn._open = mock.MagicMock()

        return s3_conn
    def test_blocking_start(self):
        config = DotDict()
        config.logger = self.logger
        config.idle_delay = 1
        config.quit_on_empty_queue = False

        class MyTaskManager(TaskManager):
            def _responsive_sleep(self,
                                  seconds,
                                  wait_log_interval=0,
                                  wait_reason=''):
                try:
                    if self.count >= 2:
                        self.quit = True
                    self.count += 1
                except AttributeError:
                    self.count = 0

        tm = MyTaskManager(config, task_func=Mock())

        waiting_func = Mock()

        tm.blocking_start(waiting_func=waiting_func)

        eq_(tm.task_func.call_count, 10)
        eq_(waiting_func.call_count, 0)