Example #1
0
    def test_db_instance(self):
        oslo_cache.register_config(self.conf)
        cache = oslo_cache.get_cache(self.conf)
        redis_driver = driver.DataDriver(
            self.conf, cache, driver.ControlDriver(self.conf, cache))

        self.assertIsInstance(redis_driver.connection, redis.StrictRedis)
Example #2
0
    def setUp(self):
        super(PoolCatalogTest, self).setUp()

        oslo_cache.register_config(self.conf)
        cache = oslo_cache.get_cache(self.conf)
        control = utils.load_storage_driver(self.conf, cache, control_mode=True)

        self.pools_ctrl = control.pools_controller
        self.flavors_ctrl = control.flavors_controller
        self.catalogue_ctrl = control.catalogue_controller

        # NOTE(cpp-cabrera): populate catalogue
        self.pool = str(uuid.uuid1())
        self.pool2 = str(uuid.uuid1())
        self.pool_group = "pool-group"
        self.queue = str(uuid.uuid1())
        self.flavor = str(uuid.uuid1())
        self.project = str(uuid.uuid1())

        # FIXME(therve) This is horrible, we need to manage duplication in a
        # nicer way
        if "localhost" in self.mongodb_url:
            other_url = self.mongodb_url.replace("localhost", "127.0.0.1")
        elif "127.0.0.1" in self.mongodb_url:
            other_url = self.mongodb_url.replace("127.0.0.1", "localhost")
        else:
            self.skipTest("Can't build a dummy mongo URL.")

        self.pools_ctrl.create(self.pool, 100, self.mongodb_url)
        self.pools_ctrl.create(self.pool2, 100, other_url, group=self.pool_group)
        self.catalogue_ctrl.insert(self.project, self.queue, self.pool)
        self.catalog = pooling.Catalog(self.conf, cache, control)
        self.flavors_ctrl.create(self.flavor, self.pool_group, project=self.project)
Example #3
0
    def test_is_alive(self):
        oslo_cache.register_config(self.conf)
        cache = oslo_cache.get_cache(self.conf)
        swift_driver = driver.DataDriver(
            self.conf, cache, mongodb.ControlDriver(self.conf, cache))

        self.assertTrue(swift_driver.is_alive())
Example #4
0
 def cache(self):
     LOG.debug(u'Loading proxy cache driver')
     try:
         oslo_cache.register_config(self.conf)
         return oslo_cache.get_cache(self.conf)
     except RuntimeError as exc:
         LOG.exception(exc)
         raise errors.InvalidDriver(exc)
Example #5
0
    def test_db_instance(self):
        oslo_cache.register_config(self.conf)
        cache = oslo_cache.get_cache(self.conf)
        redis_driver = driver.DataDriver(self.conf, cache,
                                         driver.ControlDriver
                                         (self.conf, cache))

        self.assertIsInstance(redis_driver.connection, redis.StrictRedis)
Example #6
0
 def cache(self):
     LOG.debug(u'Loading proxy cache driver')
     try:
         oslo_cache.register_config(self.conf)
         return oslo_cache.get_cache(self.conf)
     except RuntimeError as exc:
         LOG.exception(exc)
         raise errors.InvalidDriver(exc)
Example #7
0
    def test_is_alive(self):
        oslo_cache.register_config(self.conf)
        cache = oslo_cache.get_cache(self.conf)
        swift_driver = driver.DataDriver(self.conf, cache,
                                         mongodb.ControlDriver
                                         (self.conf, cache))

        self.assertTrue(swift_driver.is_alive())
Example #8
0
    def test_cached(self):
        conf = cfg.ConfigOpts()
        oslo_cache.register_config(conf)
        conf.cache.backend = 'dogpile.cache.memory'
        conf.cache.enabled = True
        cache = oslo_cache.get_cache(conf)

        sample_project = {
            u'name': u'Cats Abound',
            u'bits': b'\x80\x81\x82\x83\x84',
            b'key': u'Value. \x80',
        }

        def create_key(user, project=None):
            return user + ':' + str(project)

        class TestClass(object):

            def __init__(self, cache):
                self._cache = cache
                self.project_gets = 0
                self.project_dels = 0

            @decorators.caches(create_key, 60)
            def get_project(self, user, project=None):
                self.project_gets += 1
                return sample_project

            @get_project.purges
            def del_project(self, user, project=None):
                self.project_dels += 1

        instance = TestClass(cache)

        args = ('23', 'cats')

        project = instance.get_project(*args)
        self.assertEqual(sample_project, project)
        self.assertEqual(1, instance.project_gets)

        # Should be in the cache now.
        project = msgpack.unpackb(cache.get(create_key(*args)),
                                  encoding='utf-8')
        self.assertEqual(sample_project, project)

        # Should read from the cache this time (counter will not
        # be incremented).
        project = instance.get_project(*args)
        self.assertEqual(sample_project, project)
        self.assertEqual(1, instance.project_gets)

        # Use kwargs this time
        instance.del_project('23', project='cats')
        self.assertEqual(1, instance.project_dels)

        # Should be a cache miss since we purged (above)
        project = instance.get_project(*args)
        self.assertEqual(2, instance.project_gets)
Example #9
0
    def test_cached(self):
        conf = cfg.ConfigOpts()
        oslo_cache.register_config(conf)
        conf.cache.backend = 'dogpile.cache.memory'
        conf.cache.enabled = True
        cache = oslo_cache.get_cache(conf)

        sample_project = {
            u'name': u'Cats Abound',
            u'bits': b'\x80\x81\x82\x83\x84',
            b'key': u'Value. \x80',
        }

        def create_key(user, project=None):
            return user + ':' + str(project)

        class TestClass(object):

            def __init__(self, cache):
                self._cache = cache
                self.project_gets = 0
                self.project_dels = 0

            @decorators.caches(create_key, 60)
            def get_project(self, user, project=None):
                self.project_gets += 1
                return sample_project

            @get_project.purges
            def del_project(self, user, project=None):
                self.project_dels += 1

        instance = TestClass(cache)

        args = ('23', 'cats')

        project = instance.get_project(*args)
        self.assertEqual(sample_project, project)
        self.assertEqual(1, instance.project_gets)

        # Should be in the cache now.
        project = msgpack.unpackb(cache.get(create_key(*args)),
                                  encoding='utf-8')
        self.assertEqual(sample_project, project)

        # Should read from the cache this time (counter will not
        # be incremented).
        project = instance.get_project(*args)
        self.assertEqual(sample_project, project)
        self.assertEqual(1, instance.project_gets)

        # Use kwargs this time
        instance.del_project('23', project='cats')
        self.assertEqual(1, instance.project_dels)

        # Should be a cache miss since we purged (above)
        project = instance.get_project(*args)
        self.assertEqual(2, instance.project_gets)
Example #10
0
    def test_version_match(self):
        oslo_cache.register_config(self.conf)
        cache = oslo_cache.get_cache(self.conf)

        with mock.patch('redis.StrictRedis.info') as info:
            info.return_value = {'redis_version': '2.4.6'}
            self.assertRaises(RuntimeError, driver.DataDriver, self.conf,
                              cache, driver.ControlDriver(self.conf, cache))

            info.return_value = {'redis_version': '2.11'}

            try:
                driver.DataDriver(self.conf, cache,
                                  driver.ControlDriver(self.conf, cache))
            except RuntimeError:
                self.fail('version match failed')
Example #11
0
    def test_version_match(self):
        oslo_cache.register_config(self.conf)
        cache = oslo_cache.get_cache(self.conf)

        with mock.patch('redis.StrictRedis.info') as info:
            info.return_value = {'redis_version': '2.4.6'}
            self.assertRaises(RuntimeError, driver.DataDriver,
                              self.conf, cache,
                              driver.ControlDriver(self.conf, cache))

            info.return_value = {'redis_version': '2.11'}

            try:
                driver.DataDriver(self.conf, cache,
                                  driver.ControlDriver(self.conf, cache))
            except RuntimeError:
                self.fail('version match failed')
Example #12
0
    def test_cached_with_cond(self):
        conf = cfg.ConfigOpts()
        oslo_cache.register_config(conf)
        conf.cache.backend = 'dogpile.cache.memory'
        conf.cache.enabled = True
        cache = oslo_cache.get_cache(conf)

        class TestClass(object):

            def __init__(self, cache):
                self._cache = cache
                self.user_gets = 0

            @decorators.caches(lambda x: x, 60, lambda v: v != 'kgriffs')
            def get_user(self, name):
                self.user_gets += 1
                return name

        instance = TestClass(cache)

        name = 'malini'

        user = instance.get_user(name)
        self.assertEqual(name, user)
        self.assertEqual(1, instance.user_gets)

        # Should be in the cache now.
        user = msgpack.unpackb(cache.get(name), encoding='utf-8')
        self.assertEqual(name, user)

        # Should read from the cache this time (counter will not
        # be incremented).
        user = instance.get_user(name)
        self.assertEqual(name, user)
        self.assertEqual(1, instance.user_gets)

        # Won't go into the cache because of cond
        name = 'kgriffs'
        for i in range(3):
            user = instance.get_user(name)

            self.assertEqual(cache.get(name), core.NO_VALUE)

            self.assertEqual(name, user)
            self.assertEqual(2 + i, instance.user_gets)
Example #13
0
    def test_cached_with_cond(self):
        conf = cfg.ConfigOpts()
        oslo_cache.register_config(conf)
        conf.cache.backend = 'dogpile.cache.memory'
        conf.cache.enabled = True
        cache = oslo_cache.get_cache(conf)

        class TestClass(object):

            def __init__(self, cache):
                self._cache = cache
                self.user_gets = 0

            @decorators.caches(lambda x: x, 60, lambda v: v != 'kgriffs')
            def get_user(self, name):
                self.user_gets += 1
                return name

        instance = TestClass(cache)

        name = 'malini'

        user = instance.get_user(name)
        self.assertEqual(name, user)
        self.assertEqual(1, instance.user_gets)

        # Should be in the cache now.
        user = msgpack.unpackb(cache.get(name), encoding='utf-8')
        self.assertEqual(name, user)

        # Should read from the cache this time (counter will not
        # be incremented).
        user = instance.get_user(name)
        self.assertEqual(name, user)
        self.assertEqual(1, instance.user_gets)

        # Won't go into the cache because of cond
        name = 'kgriffs'
        for i in range(3):
            user = instance.get_user(name)

            self.assertEqual(cache.get(name), core.NO_VALUE)

            self.assertEqual(name, user)
            self.assertEqual(2 + i, instance.user_gets)
Example #14
0
    def setUp(self):
        super(PoolCatalogTest, self).setUp()

        oslo_cache.register_config(self.conf)
        cache = oslo_cache.get_cache(self.conf)
        control = utils.load_storage_driver(self.conf,
                                            cache,
                                            control_mode=True)

        self.pools_ctrl = control.pools_controller
        self.flavors_ctrl = control.flavors_controller
        self.catalogue_ctrl = control.catalogue_controller

        # NOTE(cpp-cabrera): populate catalogue
        self.pool = str(uuid.uuid1())
        self.pool2 = str(uuid.uuid1())
        self.pool_group = 'pool-group'
        self.queue = str(uuid.uuid1())
        self.flavor = str(uuid.uuid1())
        self.project = str(uuid.uuid1())

        # FIXME(therve) This is horrible, we need to manage duplication in a
        # nicer way
        if 'localhost' in self.mongodb_url:
            other_url = self.mongodb_url.replace('localhost', '127.0.0.1')
        elif '127.0.0.1' in self.mongodb_url:
            other_url = self.mongodb_url.replace('127.0.0.1', 'localhost')
        else:
            self.skipTest("Can't build a dummy mongo URL.")

        self.pools_ctrl.create(self.pool, 100, self.mongodb_url)
        self.pools_ctrl.create(self.pool2,
                               100,
                               other_url,
                               group=self.pool_group)
        self.catalogue_ctrl.insert(self.project, self.queue, self.pool)
        self.catalog = pooling.Catalog(self.conf, cache, control)
        self.flavors_ctrl.create(self.flavor,
                                 self.pool_group,
                                 project=self.project)
Example #15
0
    def setUp(self):
        super(MongodbDriverTest, self).setUp()

        self.conf.register_opts(configs._GENERAL_OPTIONS)
        self.config(unreliable=False)
        oslo_cache.register_config(self.conf)
Example #16
0
    def setUp(self):
        super(MongodbDriverTest, self).setUp()

        self.conf.register_opts(default.ALL_OPTS)
        self.config(unreliable=False)
        oslo_cache.register_config(self.conf)
Example #17
0
    def setUp(self):
        super(MongodbDriverTest, self).setUp()

        self.conf.register_opts(configs._GENERAL_OPTIONS)
        self.config(unreliable=False)
        oslo_cache.register_config(self.conf)
Example #18
0
    def setUp(self):
        super(MongodbDriverTest, self).setUp()

        self.conf.register_opts(default.ALL_OPTS)
        self.config(unreliable=False)
        oslo_cache.register_config(self.conf)