def test_is_alive(self): oslo_cache.register_config(self.conf) cache = oslo_cache.get_cache(self.conf) swift_driver = driver.DataDriver( self.conf, cache, mongodb.ControlDriver(self.conf, cache)) self.assertTrue(swift_driver.is_alive())
def setUp(self): super(PoolCatalogTest, self).setUp() oslo_cache.register_config(self.conf) cache = oslo_cache.get_cache(self.conf) control = utils.load_storage_driver(self.conf, cache, control_mode=True) self.pools_ctrl = control.pools_controller self.flavors_ctrl = control.flavors_controller self.catalogue_ctrl = control.catalogue_controller # NOTE(cpp-cabrera): populate catalogue self.pool = str(uuid.uuid1()) self.pool2 = str(uuid.uuid1()) self.pool_group = "pool-group" self.queue = str(uuid.uuid1()) self.flavor = str(uuid.uuid1()) self.project = str(uuid.uuid1()) # FIXME(therve) This is horrible, we need to manage duplication in a # nicer way if "localhost" in self.mongodb_url: other_url = self.mongodb_url.replace("localhost", "127.0.0.1") elif "127.0.0.1" in self.mongodb_url: other_url = self.mongodb_url.replace("127.0.0.1", "localhost") else: self.skipTest("Can't build a dummy mongo URL.") self.pools_ctrl.create(self.pool, 100, self.mongodb_url) self.pools_ctrl.create(self.pool2, 100, other_url, group=self.pool_group) self.catalogue_ctrl.insert(self.project, self.queue, self.pool) self.catalog = pooling.Catalog(self.conf, cache, control) self.flavors_ctrl.create(self.flavor, self.pool_group, project=self.project)
def test_db_instance(self): oslo_cache.register_config(self.conf) cache = oslo_cache.get_cache(self.conf) redis_driver = driver.DataDriver( self.conf, cache, driver.ControlDriver(self.conf, cache)) self.assertIsInstance(redis_driver.connection, redis.StrictRedis)
def test_db_instance(self): oslo_cache.register_config(self.conf) cache = oslo_cache.get_cache(self.conf) redis_driver = driver.DataDriver(self.conf, cache, driver.ControlDriver (self.conf, cache)) self.assertIsInstance(redis_driver.connection, redis.StrictRedis)
def test_is_alive(self): oslo_cache.register_config(self.conf) cache = oslo_cache.get_cache(self.conf) swift_driver = driver.DataDriver(self.conf, cache, mongodb.ControlDriver (self.conf, cache)) self.assertTrue(swift_driver.is_alive())
def cache(self): LOG.debug(u'Loading proxy cache driver') try: oslo_cache.register_config(self.conf) return oslo_cache.get_cache(self.conf) except RuntimeError as exc: LOG.exception(exc) raise errors.InvalidDriver(exc)
def test_cached(self): conf = cfg.ConfigOpts() oslo_cache.register_config(conf) conf.cache.backend = 'dogpile.cache.memory' conf.cache.enabled = True cache = oslo_cache.get_cache(conf) sample_project = { u'name': u'Cats Abound', u'bits': b'\x80\x81\x82\x83\x84', b'key': u'Value. \x80', } def create_key(user, project=None): return user + ':' + str(project) class TestClass(object): def __init__(self, cache): self._cache = cache self.project_gets = 0 self.project_dels = 0 @decorators.caches(create_key, 60) def get_project(self, user, project=None): self.project_gets += 1 return sample_project @get_project.purges def del_project(self, user, project=None): self.project_dels += 1 instance = TestClass(cache) args = ('23', 'cats') project = instance.get_project(*args) self.assertEqual(sample_project, project) self.assertEqual(1, instance.project_gets) # Should be in the cache now. project = msgpack.unpackb(cache.get(create_key(*args)), encoding='utf-8') self.assertEqual(sample_project, project) # Should read from the cache this time (counter will not # be incremented). project = instance.get_project(*args) self.assertEqual(sample_project, project) self.assertEqual(1, instance.project_gets) # Use kwargs this time instance.del_project('23', project='cats') self.assertEqual(1, instance.project_dels) # Should be a cache miss since we purged (above) project = instance.get_project(*args) self.assertEqual(2, instance.project_gets)
def test_db_instance(self): self.config(unreliable=True) cache = oslo_cache.get_cache(self.conf) control = mongodb.ControlDriver(self.conf, cache) data = mongodb.DataDriver(self.conf, cache, control) for db in data.message_databases: self.assertThat(db.name, matchers.StartsWith( data.mongodb_conf.database))
def test_replicaset_or_mongos_needed(self): cache = oslo_cache.get_cache(self.conf) with mock.patch('pymongo.MongoClient.nodes') as nodes: nodes.__get__ = mock.Mock(return_value=[]) with mock.patch('pymongo.MongoClient.is_mongos') as is_mongos: is_mongos.__get__ = mock.Mock(return_value=False) self.assertRaises(RuntimeError, mongodb.DataDriver, self.conf, cache, mongodb.ControlDriver(self.conf, cache))
def test_using_mongos(self): cache = oslo_cache.get_cache(self.conf) with mock.patch('pymongo.MongoClient.is_mongos') as is_mongos: is_mongos.__get__ = mock.Mock(return_value=True) with mock.patch('pymongo.MongoClient.write_concern') as wc: write_concern = pymongo.WriteConcern(w=2) wc.__get__ = mock.Mock(return_value=write_concern) mongodb.DataDriver(self.conf, cache, mongodb.ControlDriver(self.conf, cache))
def test_using_replset(self): cache = oslo_cache.get_cache(self.conf) with mock.patch('pymongo.MongoClient.nodes') as nodes: nodes.__get__ = mock.Mock(return_value=['node1', 'node2']) with mock.patch('pymongo.MongoClient.write_concern') as wc: write_concern = pymongo.WriteConcern(w=2) wc.__get__ = mock.Mock(return_value=write_concern) mongodb.DataDriver(self.conf, cache, mongodb.ControlDriver(self.conf, cache))
def test_write_concern_is_set(self): cache = oslo_cache.get_cache(self.conf) with mock.patch('pymongo.MongoClient.is_mongos') as is_mongos: is_mongos.__get__ = mock.Mock(return_value=True) self.config(unreliable=True) driver = mongodb.DataDriver( self.conf, cache, mongodb.ControlDriver(self.conf, cache)) driver.server_version = (2, 6) for db in driver.message_databases: wc = db.write_concern self.assertEqual('majority', wc.document['w']) self.assertFalse(wc.document['j'])
def test_version_match(self): self.config(unreliable=True) cache = oslo_cache.get_cache(self.conf) with mock.patch('pymongo.MongoClient.server_info') as info: info.return_value = {'version': '2.1'} self.assertRaises(RuntimeError, mongodb.DataDriver, self.conf, cache, mongodb.ControlDriver(self.conf, cache)) info.return_value = {'version': '2.11'} try: mongodb.DataDriver(self.conf, cache, mongodb.ControlDriver(self.conf, cache)) except RuntimeError: self.fail('version match failed')
def test_version_match(self): oslo_cache.register_config(self.conf) cache = oslo_cache.get_cache(self.conf) with mock.patch('redis.StrictRedis.info') as info: info.return_value = {'redis_version': '2.4.6'} self.assertRaises(RuntimeError, driver.DataDriver, self.conf, cache, driver.ControlDriver(self.conf, cache)) info.return_value = {'redis_version': '2.11'} try: driver.DataDriver(self.conf, cache, driver.ControlDriver(self.conf, cache)) except RuntimeError: self.fail('version match failed')
def test_write_concern_is_set(self): cache = oslo_cache.get_cache(self.conf) with mock.patch('pymongo.MongoClient.is_mongos') as is_mongos: is_mongos.__get__ = mock.Mock(return_value=True) self.config(unreliable=True) driver = mongodb.DataDriver(self.conf, cache, mongodb.ControlDriver (self.conf, cache)) driver.server_version = (2, 6) for db in driver.message_databases: wc = db.write_concern self.assertEqual('majority', wc.document['w']) self.assertFalse(wc.document['j'])
def test_write_concern_check_works(self): cache = oslo_cache.get_cache(self.conf) with mock.patch('pymongo.MongoClient.is_mongos') as is_mongos: is_mongos.__get__ = mock.Mock(return_value=True) with mock.patch('pymongo.MongoClient.write_concern') as wc: write_concern = pymongo.WriteConcern(w=1) wc.__get__ = mock.Mock(return_value=write_concern) self.assertRaises(RuntimeError, mongodb.DataDriver, self.conf, cache, mongodb.ControlDriver(self.conf, cache)) write_concern = pymongo.WriteConcern(w=2) wc.__get__ = mock.Mock(return_value=write_concern) mongodb.DataDriver(self.conf, cache, mongodb.ControlDriver(self.conf, cache))
def test_cached_with_cond(self): conf = cfg.ConfigOpts() oslo_cache.register_config(conf) conf.cache.backend = 'dogpile.cache.memory' conf.cache.enabled = True cache = oslo_cache.get_cache(conf) class TestClass(object): def __init__(self, cache): self._cache = cache self.user_gets = 0 @decorators.caches(lambda x: x, 60, lambda v: v != 'kgriffs') def get_user(self, name): self.user_gets += 1 return name instance = TestClass(cache) name = 'malini' user = instance.get_user(name) self.assertEqual(name, user) self.assertEqual(1, instance.user_gets) # Should be in the cache now. user = msgpack.unpackb(cache.get(name), encoding='utf-8') self.assertEqual(name, user) # Should read from the cache this time (counter will not # be incremented). user = instance.get_user(name) self.assertEqual(name, user) self.assertEqual(1, instance.user_gets) # Won't go into the cache because of cond name = 'kgriffs' for i in range(3): user = instance.get_user(name) self.assertEqual(cache.get(name), core.NO_VALUE) self.assertEqual(name, user) self.assertEqual(2 + i, instance.user_gets)
def setUp(self): super(PoolCatalogTest, self).setUp() oslo_cache.register_config(self.conf) cache = oslo_cache.get_cache(self.conf) control = utils.load_storage_driver(self.conf, cache, control_mode=True) self.pools_ctrl = control.pools_controller self.flavors_ctrl = control.flavors_controller self.catalogue_ctrl = control.catalogue_controller # NOTE(cpp-cabrera): populate catalogue self.pool = str(uuid.uuid1()) self.pool2 = str(uuid.uuid1()) self.pool_group = 'pool-group' self.queue = str(uuid.uuid1()) self.flavor = str(uuid.uuid1()) self.project = str(uuid.uuid1()) # FIXME(therve) This is horrible, we need to manage duplication in a # nicer way if 'localhost' in self.mongodb_url: other_url = self.mongodb_url.replace('localhost', '127.0.0.1') elif '127.0.0.1' in self.mongodb_url: other_url = self.mongodb_url.replace('127.0.0.1', 'localhost') else: self.skipTest("Can't build a dummy mongo URL.") self.pools_ctrl.create(self.pool, 100, self.mongodb_url) self.pools_ctrl.create(self.pool2, 100, other_url, group=self.pool_group) self.catalogue_ctrl.insert(self.project, self.queue, self.pool) self.catalog = pooling.Catalog(self.conf, cache, control) self.flavors_ctrl.create(self.flavor, self.pool_group, project=self.project)