def test_basic_hbase_usage(self): local_config = DotDict({ 'hbase_host': 'host', 'database_name': 'name', 'hbase_port': 9090, 'hbase_timeout': 9000, 'logger': SilentFakeLogger(), }) a_fake_hbase_connection = FakeHB_Connection(local_config) with mock.patch.object( happybase, 'Connection', mock.Mock(return_value=a_fake_hbase_connection)): hb_context = connection_context.HappyBaseConnectionContext( local_config) # open a connection with hb_context() as conn: pass self.assertEqual(a_fake_hbase_connection.close_counter, 1) # open another connection again with hb_context() as conn: pass self.assertEqual(a_fake_hbase_connection.close_counter, 2) # get a named connection with hb_context('fred') as conn: pass self.assertEqual(a_fake_hbase_connection.close_counter, 3) # close all connections hb_context.close() self.assertEqual(a_fake_hbase_connection.close_counter, 3)
def test_basic_hbase_usage(self): local_config = DotDict({ 'hbase_host': 'host', 'database_name': 'name', 'hbase_port': 9090, 'hbase_timeout': 9000, 'number_of_retries': 2, 'logger': SilentFakeLogger(), 'executor_identity': lambda: 'dwight' # bogus thread id }) a_fake_hbase_connection = FakeHB_Connection(local_config) with mock.patch.object( connection_context, 'HBaseConnection', mock.Mock(return_value=a_fake_hbase_connection)): hb_context = connection_context.HBaseConnectionContext( local_config) # open a connection with hb_context() as conn: pass eq_(a_fake_hbase_connection.close_counter, 1) # open another connection again with hb_context() as conn: pass eq_(a_fake_hbase_connection.close_counter, 2) # get a named connection with hb_context('fred') as conn: pass eq_(a_fake_hbase_connection.close_counter, 3) # close all connections hb_context.close() eq_(a_fake_hbase_connection.close_counter, 3)
def testNewEntryPermissions(self): dirPermissions=0707 dumpPermissions=0500 sfl = SilentFakeLogger() j = JDS.JsonDumpStorage(root=self.testDir,dirPermissions=dirPermissions,dumpPermissions=dumpPermissions,logger=sfl) u = str(socorro_uuid.uuid1()) f1, f2 = j.newEntry(u) f1.close() f2.close() jpath = j.getJson(u) gotPermissions = stat.S_IMODE(os.stat(jpath)[0]) assert stat.S_IMODE(os.stat(jpath)[0]) == dumpPermissions, "%s: Expected %o, got %o" % (jpath, dumpPermissions, gotPermissions) dpath = j.getDump(u) gotPermissions = stat.S_IMODE(os.stat(dpath)[0]) assert stat.S_IMODE(os.stat(dpath)[0]) == dumpPermissions, "%s: Expected %o, got %o" % (dpath, dumpPermissions, gotPermissions) udir = os.path.split(dpath)[0] datePath = os.path.abspath(os.path.join(udir,os.readlink(os.path.splitext(dpath)[0]))) namePath = os.path.abspath(os.path.splitext(dpath)[0]) topPath = os.path.abspath(self.testDir) dailies = os.listdir(topPath) def assertPermVisitor(p): gotPerm = stat.S_IMODE(os.stat(p)[0]) assert dirPermissions == gotPerm, "%s: Expected %0o, got %0o"%(p,dirPermissions,gotPerm) for d in dailies: # visitPath quietly ignores a file as the leaf socorro_fs.visitPath(os.path.join(topPath,d),datePath,assertPermVisitor) socorro_fs.visitPath(os.path.join(topPath,d),namePath,assertPermVisitor)
def setUp(self): self.context = mock.MagicMock() self.context.__enter__.return_value = self.context config = DotDict({ 'hbase_host': 'host', 'database_name': 'name', 'hbase_port': 9090, 'hbase_timeout': 9000, 'number_of_retries': 2, 'logger': SilentFakeLogger(), 'hbase_connection_context_class': mock.Mock(return_value=self.context), 'transaction_executor_class': TransactionExecutor, 'new_crash_limit': 10**6, 'redactor_class': Redactor, 'forbidden_keys': Redactor.required_config.forbidden_keys.default, }) self.storage = HBaseCrashStorage(config)
def test_no_source(self): class FakeStorageDestination(object): def __init__(self, config, quit_check_callback): self.store = DotDict() self.dumps = DotDict() def save_raw_crash(self, raw_crash, dump, crash_id): self.store[crash_id] = raw_crash self.dumps[crash_id] = dump logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'source': DotDict({'crashstorage_class': None}), 'destination': DotDict({'crashstorage_class': FakeStorageDestination}), 'producer_consumer': DotDict({ 'producer_consumer_class': ThreadedTaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1 }) }) fts_app = FetchTransformSaveApp(config) self.assertRaises(TypeError, fts_app.main)
def _get_mocked_config(self): config = DotDict() config.database = DotDict() config.database.database_class = mock.Mock() config.database.database_transaction_executor_class = mock.Mock() config.logger = SilentFakeLogger() self.config = config
def test_bogus_source_iter_and_worker(self): class TestFTSAppClass(FetchTransformSaveApp): def __init__(self, config): super(TestFTSAppClass, self).__init__(config) self.the_list = [] def _setup_source_and_destination(self): pass def source_iterator(self): for x in xrange(5): yield ((x, ), {}) def transform(self, anItem): self.the_list.append(anItem) logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'source': DotDict({'crashstorage': None}), 'destination': DotDict({'crashstorage': None}) }) fts_app = TestFTSAppClass(config) fts_app.main() self.assertTrue( len(fts_app.the_list) == 5, 'expected to do 5 inserts, ' 'but %d were done instead' % len(fts_app.the_list)) self.assertTrue( sorted(fts_app.the_list) == range(5), 'expected %s, but got %s' % (range(5), sorted(fts_app.the_list)))
def test_source_iterator(self): class FakeStorageSource(object): def __init__(self): self.first = True def new_crashes(self): if self.first: self.first = False else: for k in range(999): yield k for k in range(2): yield None class FakeStorageDestination(object): def __init__(self, config, quit_check_callback): self.store = DotDict() self.dumps = DotDict() def save_raw_crash(self, raw_crash, dump, crash_id): self.store[crash_id] = raw_crash self.dumps[crash_id] = dump logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'source': DotDict({'crashstorage_class': FakeStorageSource}), 'destination': DotDict({'crashstorage_class': FakeStorageDestination}), 'producer_consumer': DotDict({'producer_consumer_class': ThreadedTaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1} ) }) fts_app = FetchTransformSaveApp(config) fts_app.source = FakeStorageSource() fts_app.destination = FakeStorageDestination error_detected = False for x, y in zip(xrange(1002), (a for a in fts_app.source_iterator())): if x == 0: self.assertTrue(y is None) elif x < 1000: if x - 1 != y[0][0] and not error_detected: error_detected = True self.assertEqual(x, y, 'iterator fails on iteration %d' % x) else: if y is not None and not error_detected: error_detected = True self.assertTrue(x is None, 'iterator fails on iteration %d' % x)
def create_basic_fake_processor(): fake_processor = DotDict() fake_processor.c_signature_tool = c_signature_tool fake_processor.config = DotDict() # need help figuring out failures? switch to FakeLogger and read stdout fake_processor.config.logger = SilentFakeLogger() #fake_processor.config.logger = FakeLogger() return fake_processor
def test_no_destination(self): class FakeStorageSource(object): def __init__(self, config, quit_check_callback): self.store = DotDict({'1234': DotDict({'ooid': '1234', 'Product': 'FireSquid', 'Version': '1.0'}), '1235': DotDict({'ooid': '1235', 'Product': 'ThunderRat', 'Version': '1.0'}), '1236': DotDict({'ooid': '1236', 'Product': 'Caminimal', 'Version': '1.0'}), '1237': DotDict({'ooid': '1237', 'Product': 'Fennicky', 'Version': '1.0'}), }) def get_raw_crash(self, ooid): return self.store[ooid] def get_raw_dump(self, ooid): return 'this is a fake dump' def new_ooids(self): for k in self.store.keys(): yield k logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'number_of_submissions': 'forever', 'source': DotDict({'crashstorage_class': FakeStorageSource}), 'destination': DotDict({'crashstorage_class': None}), 'producer_consumer': DotDict({'producer_consumer_class': ThreadedTaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1} ) }) fts_app = FetchTransformSaveApp(config) with pytest.raises(TypeError): fts_app.main()
def get_standard_config_manager( more_definitions=None, overrides=None, ): # MOCKED CONFIG DONE HERE required_config = Namespace() required_config.add_option( 'logger', default=SilentFakeLogger(), doc='a logger', ) required_config.add_option( 'executor_identity', default=Mock() ) if isinstance(more_definitions, Sequence): definitions = [required_config] definitions.extend(more_definitions) elif more_definitions is not None: definitions = [required_config, more_definitions] else: definitions = [required_config] local_overrides = [ environment, ] if isinstance(overrides, Sequence): overrides.extend(local_overrides) elif overrides is not None: overrides = [overrides].extend(local_overrides) else: overrides = local_overrides config_manager = ConfigurationManager( definitions, values_source_list=overrides, app_name='test-crontabber', app_description=__doc__, argv_source=[] ) # very useful debug #import contextlib #import sys #@contextlib.contextmanager #def stdout_opener(): #yield sys.stdout #config_manager.write_conf('conf', stdout_opener) return config_manager
def test_no_destination(self): class FakeStorageSource(object): def __init__(self, config, quit_check_callback): self.store = DotDict({'1234': DotDict({'ooid': '1234', 'Product': 'FireFloozy', 'Version': '1.0'}), '1235': DotDict({'ooid': '1235', 'Product': 'ThunderRat', 'Version': '1.0'}), '1236': DotDict({'ooid': '1236', 'Product': 'Caminimal', 'Version': '1.0'}), '1237': DotDict({'ooid': '1237', 'Product': 'Fennicky', 'Version': '1.0'}), }) def get_raw_crash(self, ooid): return self.store[ooid] def get_raw_dumps(self, ooid): return {'upload_file_minidump': 'this is a fake dump', 'flash1': 'broken flash dump'} def new_ooids(self): for k in self.store.keys(): yield k logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'source': DotDict({'crashstorage_class': FakeStorageSource}), 'destination': DotDict({'crashstorage_class': None}), 'producer_consumer': DotDict({'producer_consumer_class': ThreadedTaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1} ) }) fts_app = CrashMoverApp(config) self.assertRaises(TypeError, fts_app.main)
def test_bogus_source_iter_and_worker(self): class TestCrashMoverClass(CrashMoverApp): def __init__(self, config): super(TestCrashMoverClass, self).__init__(config) self.the_list = [] def _setup_source_and_destination(self): self.source = Mock() self.destination = Mock() pass def _basic_iterator(self): for x in xrange(5): yield ((x, ), {}) def transform(self, anItem): self.the_list.append(anItem) logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'number_of_submissions': "all", 'source': DotDict({'crashstorage_class': None}), 'destination': DotDict({'crashstorage_class': None}), 'producer_consumer': DotDict({ 'producer_consumer_class': ThreadedTaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1 }) }) fts_app = TestCrashMoverClass(config) fts_app.main() ok_( len(fts_app.the_list) == 5, 'expected to do 5 inserts, ' 'but %d were done instead' % len(fts_app.the_list)) ok_( sorted(fts_app.the_list) == range(5), 'expected %s, but got %s' % (range(5), sorted(fts_app.the_list)))
def test_bogus_source_iter_and_worker(self): class TestFTSAppClass(FetchTransformSaveApp): def __init__(self, config): super(TestFTSAppClass, self).__init__(config) self.the_list = [] def _setup_source_and_destination(self): self.source = Mock() self.destination = Mock() pass def _create_iter(self): for x in range(5): yield ((x, ), {}) def transform(self, anItem): self.the_list.append(anItem) logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'number_of_submissions': 'all', 'source': DotDict({'crashstorage_class': None}), 'destination': DotDict({'crashstorage_class': None}), 'producer_consumer': DotDict({ 'producer_consumer_class': TaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1 }) }) fts_app = TestFTSAppClass(config) fts_app.main() assert len(fts_app.the_list) == 5 assert sorted(fts_app.the_list) == range(5)
def test_hbase_usage_with_transaction(self): local_config = DotDict({ 'hbase_host': 'host', 'database_name': 'name', 'hbase_port': 9090, 'hbase_timeout': 9000, 'number_of_retries': 2, 'logger': SilentFakeLogger(), 'executor_identity': lambda: 'dwight' # bogus thread id }) a_fake_hbase_connection = FakeHB_Connection(local_config) with mock.patch.object( connection_context, 'HBaseConnection', mock.Mock(return_value=a_fake_hbase_connection)): hb_context = connection_context.HBasePooledConnectionContext( local_config) def all_ok(connection, dummy): eq_(dummy, 'hello') return True transaction = TransactionExecutor(local_config, hb_context) result = transaction(all_ok, 'hello') ok_(result) eq_(a_fake_hbase_connection.close_counter, 0) eq_(a_fake_hbase_connection.rollback_counter, 0) eq_(a_fake_hbase_connection.commit_counter, 1) def bad_deal(connection, dummy): raise KeyError('fred') assert_raises(KeyError, transaction, bad_deal, 'hello') # at this point, the underlying connection has been deleted from # the pool, because it was considered to be a bad connection. eq_(a_fake_hbase_connection.close_counter, 0) eq_(a_fake_hbase_connection.commit_counter, 1) hb_context.close() # because the connection was previously deleted from the pool, # no connection gets closed at this point. eq_(a_fake_hbase_connection.close_counter, 0)
def test_basic_hbase_usage(self, mocked_hbcl): local_config = DotDict({ 'hbase_host': 'host', 'database_name': 'name', 'hbase_port': 9090, 'hbase_timeout': 9000, 'number_of_retries': 2, 'logger': SilentFakeLogger(), }) a_fake_hbase_connection = FakeHB_Connection() mocked_hbcl.HBaseConnectionForCrashReports = \ mock.Mock(return_value=a_fake_hbase_connection) hb_context = HBaseConnectionContextPooled(local_config, local_config) self.assertEqual(mocked_hbcl.HBaseConnectionForCrashReports.call_count, 1) self.assertEqual(a_fake_hbase_connection.close_counter, 1) # open a connection with hb_context() as conn: self.assertEqual( mocked_hbcl.HBaseConnectionForCrashReports.call_count, 2) self.assertEqual(a_fake_hbase_connection.close_counter, 1) # get that same connection again with hb_context() as conn: self.assertEqual( mocked_hbcl.HBaseConnectionForCrashReports.call_count, 2) self.assertEqual(a_fake_hbase_connection.close_counter, 1) # get a named connection with hb_context('fred') as conn: self.assertEqual( mocked_hbcl.HBaseConnectionForCrashReports.call_count, 3) self.assertEqual(a_fake_hbase_connection.close_counter, 1) self.assertEqual(len(hb_context.pool), 2) # get that original same connection again with hb_context() as conn: self.assertEqual( mocked_hbcl.HBaseConnectionForCrashReports.call_count, 3) self.assertEqual(a_fake_hbase_connection.close_counter, 1) # close all connections hb_context.close() self.assertEqual(a_fake_hbase_connection.close_counter, 3)
def test_hbase_usage_with_transaction(self, mocked_hbcl): local_config = DotDict({ 'hbase_host': 'host', 'database_name': 'name', 'hbase_port': 9090, 'hbase_timeout': 9000, 'number_of_retries': 2, 'logger': SilentFakeLogger(), }) a_fake_hbase_connection = FakeHB_Connection() mocked_hbcl.HBaseConnectionForCrashReports = \ mock.Mock(return_value=a_fake_hbase_connection) hb_context = HBaseConnectionContextPooled(local_config, local_config) def all_ok(connection, dummy): self.assertEqual(dummy, 'hello') return True transaction = TransactionExecutor(local_config, hb_context) result = transaction(all_ok, 'hello') self.assertTrue(result) self.assertEqual(mocked_hbcl.HBaseConnectionForCrashReports.call_count, 2) self.assertEqual(a_fake_hbase_connection.close_counter, 1) self.assertEqual(a_fake_hbase_connection.rollback_counter, 0) self.assertEqual(a_fake_hbase_connection.commit_counter, 1) def bad_deal(connection, dummy): raise KeyError('fred') self.assertRaises(KeyError, transaction, bad_deal, 'hello') self.assertEqual(mocked_hbcl.HBaseConnectionForCrashReports.call_count, 2) self.assertEqual(a_fake_hbase_connection.close_counter, 1) self.assertEqual(a_fake_hbase_connection.rollback_counter, 1) self.assertEqual(a_fake_hbase_connection.commit_counter, 1) hb_context.close() self.assertEqual(a_fake_hbase_connection.close_counter, 2)
def test_hbase_usage_with_transaction(self): local_config = DotDict({ 'hbase_host': 'host', 'database_name': 'name', 'hbase_port': 9090, 'hbase_timeout': 9000, 'number_of_retries': 2, 'logger': SilentFakeLogger(), 'executor_identity': lambda: 'dwight' # bogus thread id }) a_fake_hbase_connection = FakeHB_Connection(local_config) with mock.patch.object( connection_context, 'HBaseConnection', mock.Mock(return_value=a_fake_hbase_connection)): hb_context = connection_context.HBaseConnectionContext( local_config) def all_ok(connection, dummy): eq_(dummy, 'hello') return True transaction = TransactionExecutor(local_config, hb_context) result = transaction(all_ok, 'hello') ok_(result) eq_(a_fake_hbase_connection.close_counter, 1) eq_(a_fake_hbase_connection.rollback_counter, 0) eq_(a_fake_hbase_connection.commit_counter, 1) def bad_deal(connection, dummy): raise KeyError('fred') assert_raises(KeyError, transaction, bad_deal, 'hello') eq_(a_fake_hbase_connection.close_counter, 2) eq_(a_fake_hbase_connection.commit_counter, 1) hb_context.close() eq_(a_fake_hbase_connection.close_counter, 2)
def testCopyFromPermissions(self): dirPermissions=0777 dumpPermissions=0755 sfl = SilentFakeLogger() j = JDS.JsonDumpStorage(root=self.testDir,dirPermissions=dirPermissions,dumpPermissions=dumpPermissions,logger=sfl) os.makedirs(self.testMoveFrom) u = str(socorro_uuid.uuid1()) jopath = os.path.join(self.testMoveFrom,u+j.jsonSuffix) dopath = os.path.join(self.testMoveFrom,u+j.dumpSuffix) fj = open(jopath,'w') fd = open(dopath,'w') fj.close() fd.close() j.copyFrom(u,jopath,dopath,'w', DT.datetime(2008,8,8,8,8, tzinfo=UTC),createLinks = True) jpath = j.getJson(u) gotPermissions = stat.S_IMODE(os.stat(jpath)[0]) assert dumpPermissions == stat.S_IMODE(os.stat(jpath)[0]), "%s: Expected %o, got %o" % (jpath, dumpPermissions, gotPermissions) dpath = j.getDump(u) gotPermissions = stat.S_IMODE(os.stat(dpath)[0]) assert dumpPermissions == stat.S_IMODE(os.stat(dpath)[0]), "%s: Expected %o, got %o" % (dpath, dumpPermissions, gotPermissions) udir = os.path.split(dpath)[0] datePath = os.path.abspath(os.path.join(udir,os.readlink(os.path.splitext(dpath)[0]))) namePath = os.path.abspath(os.path.splitext(dpath)[0]) topPath = os.path.abspath(self.testDir) dailies = os.listdir(topPath) def assertPermVisitor(p): gotPerm = stat.S_IMODE(os.stat(p)[0]) assert dirPermissions == gotPerm, "%s: Expected %0o, got %0o"%(p,dirPermissions,gotPerm) for d in dailies: # visitPath quietly ignores a file as the leaf socorro_fs.visitPath(os.path.join(topPath,d),datePath,assertPermVisitor) socorro_fs.visitPath(os.path.join(topPath,d),namePath,assertPermVisitor)
def test_hbase_usage_with_transaction(self): local_config = DotDict({ 'hbase_host': 'host', 'database_name': 'name', 'hbase_port': 9090, 'hbase_timeout': 9000, 'number_of_retries': 2, 'logger': SilentFakeLogger(), }) a_fake_hbase_connection = FakeHB_Connection2(local_config) a_fake_hbase_pool = mock.MagicMock() a_fake_hbase_pool.connection = a_fake_hbase_connection with mock.patch.object(happybase, 'ConnectionPool', mock.Mock(return_value=a_fake_hbase_pool)): hb_context = HappyBasePooledConnectionContextMock(local_config) def all_ok(connection, dummy): self.assertEqual(dummy, 'hello') return True transaction = TransactionExecutor(local_config, hb_context) result = transaction(all_ok, 'hello') self.assertTrue(result) self.assertEqual(a_fake_hbase_connection.close_counter, 0) self.assertEqual(a_fake_hbase_connection.rollback_counter, 0) self.assertEqual(a_fake_hbase_connection.commit_counter, 1) def bad_deal(connection, dummy): raise KeyError('fred') self.assertRaises(KeyError, transaction, bad_deal, 'hello') self.assertEqual(a_fake_hbase_connection.close_counter, 0) self.assertEqual(a_fake_hbase_connection.commit_counter, 1) hb_context.close() self.assertEqual(a_fake_hbase_connection.close_counter, 0)
def get_standard_config_manager( more_definitions=None, service_classes=None, overrides=None, ): # MOCKED CONFIG DONE HERE required_config = Namespace() required_config.add_option( 'logger', default=SilentFakeLogger(), doc='a logger', ) required_config.add_option('executor_identity', default=Mock()) if service_classes: required_config.namespace('services') if not isinstance(service_classes, Sequence): service_classes = (service_classes, ) for service_class in service_classes: # config for the services being tested service_name = service_class.__name__.split('.')[-1] required_config.services.namespace(service_name) # adding the service as if it had been put in via the # classes_in_namespaces converter defined in the dataservice # package. Configman will pull the services additional # requirements required_config.services[service_name].add_option( 'cls', default=service_class, from_string_converter=class_converter) if isinstance(more_definitions, Sequence): definitions = [required_config] definitions.extend(more_definitions) elif more_definitions is not None: definitions = [required_config, more_definitions] else: definitions = [required_config] local_overrides = [ environment, ] if isinstance(overrides, Sequence): overrides.extend(local_overrides) elif overrides is not None: overrides = [overrides] + local_overrides else: overrides = local_overrides config_manager = ConfigurationManager(definitions, values_source_list=overrides, app_name='ES tests', app_description=__doc__, argv_source=[]) # very useful debug #import contextlib #import sys #@contextlib.contextmanager #def stdout_opener(): #yield sys.stdout #config_manager.write_conf('conf', stdout_opener) return config_manager
def test_bogus_source_and_destination(self): class NonInfiniteFTSAppClass(FetchTransformSaveApp): def source_iterator(self): for x in self.source.new_crashes(): yield ((x, ), {}) class FakeStorageSource(object): def __init__(self, config, quit_check_callback): self.store = DotDict({ '1234': DotDict({ 'ooid': '1234', 'Product': 'FireFloozy', 'Version': '1.0' }), '1235': DotDict({ 'ooid': '1235', 'Product': 'ThunderRat', 'Version': '1.0' }), '1236': DotDict({ 'ooid': '1236', 'Product': 'Caminimal', 'Version': '1.0' }), '1237': DotDict({ 'ooid': '1237', 'Product': 'Fennicky', 'Version': '1.0' }), }) def get_raw_crash(self, ooid): return self.store[ooid] def get_dump(self, ooid): return 'this is a fake dump' def new_crashes(self): for k in self.store.keys(): yield k class FakeStorageDestination(object): def __init__(self, config, quit_check_callback): self.store = DotDict() self.dumps = DotDict() def save_raw_crash(self, raw_crash, dump, crash_id): self.store[crash_id] = raw_crash self.dumps[crash_id] = dump logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'source': DotDict({'crashstorage_class': FakeStorageSource}), 'destination': DotDict({'crashstorage_class': FakeStorageDestination}), 'producer_consumer': DotDict({ 'producer_consumer_class': ThreadedTaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1 }) }) fts_app = NonInfiniteFTSAppClass(config) fts_app.main() source = fts_app.source destination = fts_app.destination self.assertEqual(source.store, destination.store) self.assertEqual(len(destination.dumps), 4) self.assertEqual(destination.dumps['1237'], source.get_dump('1237'))
def setUp(self): super(TestFileSystemRawCrashStorage, self).setUp() self.logger = SilentFakeLogger()
def test_bogus_source_and_destination(self): class NonInfiniteFTSAppClass(CrashMoverApp): def _basic_iterator(self): for x in self.source.new_crashes(): yield ((x, ), {}) class FakeStorageSource(object): def __init__(self, config, quit_check_callback): self.store = DotDict({ '1234': DotDict({ 'ooid': '1234', 'Product': 'FireSquid', 'Version': '1.0' }), '1235': DotDict({ 'ooid': '1235', 'Product': 'ThunderRat', 'Version': '1.0' }), '1236': DotDict({ 'ooid': '1236', 'Product': 'Caminimal', 'Version': '1.0' }), '1237': DotDict({ 'ooid': '1237', 'Product': 'Fennicky', 'Version': '1.0' }), }) self.number_of_close_calls = 0 def close(): self.number_of_close_calls += 1 def get_raw_crash(self, ooid): return self.store[ooid] def get_raw_dumps(self, ooid): return { 'upload_file_minidump': 'this is a fake dump', 'flash1': 'broken flash dump' } def new_crashes(self): for k in self.store.keys(): yield k def close(self): self.number_of_close_calls += 1 pass class FakeStorageDestination(object): def __init__(self, config, quit_check_callback): self.store = DotDict() self.dumps = DotDict() self.number_of_close_calls = 0 def save_raw_crash(self, raw_crash, dumps, crash_id): self.store[crash_id] = raw_crash self.dumps[crash_id] = dumps def close(self): self.number_of_close_calls += 1 logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'number_of_submissions': "all", 'source': DotDict({'crashstorage_class': FakeStorageSource}), 'destination': DotDict({'crashstorage_class': FakeStorageDestination}), 'producer_consumer': DotDict({ 'producer_consumer_class': ThreadedTaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1 }) }) fts_app = NonInfiniteFTSAppClass(config) fts_app.main() source = fts_app.source destination = fts_app.destination eq_(source.store, destination.store) eq_(len(destination.dumps), 4) eq_(destination.dumps['1237'], source.get_raw_dumps('1237')) # ensure that each storage system had its close called eq_(source.number_of_close_calls, 1) eq_(destination.number_of_close_calls, 1)
def setUp(self): super(TestStatsd, self).setUp() self.logger = SilentFakeLogger()
def setUp(self): super(TestTaskManager, self).setUp() self.logger = SilentFakeLogger()
def setUp(self): self.logger = SilentFakeLogger() pass
def test_source_iterator(self): faked_finished_func = Mock() class FakeStorageSource(object): def __init__(self): self.first = True def new_crashes(self): if self.first: # make the iterator act as if exhausted on the very # first try self.first = False else: for k in range(999): # ensure that both forms (a single value or the # (args, kwargs) form are accepted.) if k % 4: yield k else: yield ((k, ), { "finished_func": faked_finished_func }) for k in range(2): yield None class FakeStorageDestination(object): def __init__(self, config, quit_check_callback): self.store = DotDict() self.dumps = DotDict() def save_raw_crash(self, raw_crash, dump, crash_id): self.store[crash_id] = raw_crash self.dumps[crash_id] = dump logger = SilentFakeLogger() config = DotDict({ 'logger': logger, 'number_of_threads': 2, 'maximum_queue_size': 2, 'number_of_submissions': 'forever', 'source': DotDict({'crashstorage_class': FakeStorageSource}), 'destination': DotDict({'crashstorage_class': FakeStorageDestination}), 'producer_consumer': DotDict({ 'producer_consumer_class': ThreadedTaskManager, 'logger': logger, 'number_of_threads': 1, 'maximum_queue_size': 1 }) }) fts_app = FetchTransformSaveApp(config) fts_app.source = FakeStorageSource() fts_app.destination = FakeStorageDestination error_detected = False no_finished_function_counter = 0 for x, y in zip(range(1002), (a for a in fts_app.source_iterator())): if x == 0: # the iterator is exhausted on the 1st try and should have # yielded a None before starting over assert y is None elif x < 1000: if x - 1 != y[0][0] and not error_detected: error_detected = True assert x == y, 'iterator fails on iteration %d: %s' % (x, y) # invoke that finished func to ensure that we've got the # right object try: y[1]['finished_func']() except KeyError: no_finished_function_counter += 1 else: if y is not None and not error_detected: error_detected = True assert x is None, 'iterator fails on iteration %d: %s' % ( x, y) assert faked_finished_func.call_count == (999 - no_finished_function_counter)