Ejemplo n.º 1
0
class RedisDataSource(AbstractDataSource):

    _r = None
    def __init__(self,config):
        if self._validateConfig(config):
            self._r = StrictRedis(host=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_HOST],
                                        port=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_PORT],
                                        db=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_DB])
            logger.debug("Obtained internal redis handler" + str(self._r))
        else:
            raise BaseException("Error validating config ")


    def update(self,item):
        self.store(item)

    def store(self,item):
        self._r.set(item.getHash(), item.getValue())

    def get(self,item):
        return self._r.get(item.getHash())

    def exists(self,item):
        return self.get(item) is not None

    def all(self):

        result = []
        # Obtain all keys
        keys = self._r.keys()

        #For each key, get value
        for k in keys:
            value = self._r.get(k)
            result.append(BaseItem({"origin":"redis"},value))
        #return result
        return result

    def _validateConfig(self,config):

        validator = MultipleConfigValidator(
                        {VALIDATORS_LIST:[ContainsKeyConfigValidator({KEY_VALUE:REDIS_DATASOURCE_CONFIG})]})
        if not validator.validate(config):
            raise BaseException("Config validation error : does not contain " + REDIS_DATASOURCE_CONFIG)

        # Validate redis datasource config
        validator = MultipleConfigValidator(
                        {VALIDATORS_LIST:[ContainsKeysConfigValidator({KEYS_LIST:[REDIS_DATASOURCE_CONFIG_DB,
                                                                                  REDIS_DATASOURCE_CONFIG_HOST,
                                                                                  REDIS_DATASOURCE_CONFIG_PORT]})]})

        if not validator.validate(config[REDIS_DATASOURCE_CONFIG]):
            raise BaseException("Config validation error : config not complete ")

        return True


    def delete(self,item):
        self._r.delete(item.getHash())
Ejemplo n.º 2
0
    def test_ensure_timeline_scheduled_script(self):
        client = StrictRedis(db=9)

        timeline = 'timeline'
        timestamp = 100.0

        waiting_set_size = functools.partial(client.zcard, 'waiting')
        ready_set_size = functools.partial(client.zcard, 'ready')

        timeline_score_in_waiting_set = functools.partial(client.zscore, 'waiting', timeline)
        timeline_score_in_ready_set = functools.partial(client.zscore, 'ready', timeline)

        keys = ('waiting', 'ready', 'last-processed')

        # The first addition should cause the timeline to be added to the ready set.
        with self.assertChanges(ready_set_size, before=0, after=1), \
                self.assertChanges(timeline_score_in_ready_set, before=None, after=timestamp):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp, 1, 10), client) == 1

        # Adding it again with a timestamp in the future should not change the schedule time.
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertDoesNotChange(ready_set_size), \
                self.assertDoesNotChange(timeline_score_in_ready_set):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp + 50, 1, 10), client) is None

        # Move the timeline from the ready set to the waiting set.
        client.zrem('ready', timeline)
        client.zadd('waiting', timestamp, timeline)
        client.set('last-processed', timestamp)

        increment = 1
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp + increment):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp, increment, 10), client) is None

        # Make sure the schedule respects the maximum value.
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp + 1, after=timestamp):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp, increment, 0), client) is None

        # Test to ensure a missing last processed timestamp can be handled
        # correctly (chooses minimum of schedule value and record timestamp.)
        client.zadd('waiting', timestamp, timeline)
        client.delete('last-processed')
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertDoesNotChange(timeline_score_in_waiting_set):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp + 100, increment, 10), client) is None

        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp - 100):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp - 100, increment, 10), client) is None
Ejemplo n.º 3
0
    def test_ensure_timeline_scheduled_script(self):
        client = StrictRedis(db=9)

        timeline = 'timeline'
        timestamp = 100.0

        waiting_set_size = functools.partial(client.zcard, 'waiting')
        ready_set_size = functools.partial(client.zcard, 'ready')

        timeline_score_in_waiting_set = functools.partial(client.zscore, 'waiting', timeline)
        timeline_score_in_ready_set = functools.partial(client.zscore, 'ready', timeline)

        keys = ('waiting', 'ready', 'last-processed')

        # The first addition should cause the timeline to be added to the ready set.
        with self.assertChanges(ready_set_size, before=0, after=1), \
                self.assertChanges(timeline_score_in_ready_set, before=None, after=timestamp):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp, 1, 10), client) == 1

        # Adding it again with a timestamp in the future should not change the schedule time.
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertDoesNotChange(ready_set_size), \
                self.assertDoesNotChange(timeline_score_in_ready_set):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp + 50, 1, 10), client) is None

        # Move the timeline from the ready set to the waiting set.
        client.zrem('ready', timeline)
        client.zadd('waiting', timestamp, timeline)
        client.set('last-processed', timestamp)

        increment = 1
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp + increment):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp, increment, 10), client) is None

        # Make sure the schedule respects the maximum value.
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp + 1, after=timestamp):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp, increment, 0), client) is None

        # Test to ensure a missing last processed timestamp can be handled
        # correctly (chooses minimum of schedule value and record timestamp.)
        client.zadd('waiting', timestamp, timeline)
        client.delete('last-processed')
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertDoesNotChange(timeline_score_in_waiting_set):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp + 100, increment, 10), client) is None

        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp - 100):
            assert ensure_timeline_scheduled(keys, (timeline, timestamp - 100, increment, 10), client) is None
Ejemplo n.º 4
0
class RedisDataStorage(DataStorage):
    def __init__(self, host, port, level):
        DataStorage.__init__(self, level)
        self._storage = StrictRedis(host=host, port=port)

    def get_key(self, pid):
        return "{}_{}".format(self.storage_level(), pid)

    def get_data(self, pid, default=None):
        result = self._storage.get(name=self.get_key(pid))
        if result is None:
            result = default
        return result

    def store_data(self, pid, data):
        self._storage.set(name=self.get_key(pid), value=data)

    def delete_data(self, pid):
        self._storage.delete(self.get_key(pid))

    def exists(self, pid):
        return self._storage.exists(self.get_key(pid))
Ejemplo n.º 5
0
class RedisDataSource(AbstractDataSource):

    _r = None

    def __init__(self, config):
        if self._validateConfig(config):
            self._r = StrictRedis(
                host=config[REDIS_DATASOURCE_CONFIG]
                [REDIS_DATASOURCE_CONFIG_HOST],
                port=config[REDIS_DATASOURCE_CONFIG]
                [REDIS_DATASOURCE_CONFIG_PORT],
                db=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_DB])
            logger.debug("Obtained internal redis handler" + str(self._r))
        else:
            raise BaseException("Error validating config ")

    def update(self, item):
        self.store(item)

    def store(self, item):
        self._r.set(item.getHash(), item.getValue())

    def get(self, item):
        return self._r.get(item.getHash())

    def exists(self, item):
        return self.get(item) is not None

    def all(self):

        result = []
        # Obtain all keys
        keys = self._r.keys()

        #For each key, get value
        for k in keys:
            value = self._r.get(k)
            result.append(BaseItem({"origin": "redis"}, value))
        #return result
        return result

    def _validateConfig(self, config):

        validator = MultipleConfigValidator({
            VALIDATORS_LIST:
            [ContainsKeyConfigValidator({KEY_VALUE: REDIS_DATASOURCE_CONFIG})]
        })
        if not validator.validate(config):
            raise BaseException("Config validation error : does not contain " +
                                REDIS_DATASOURCE_CONFIG)

        # Validate redis datasource config
        validator = MultipleConfigValidator({
            VALIDATORS_LIST: [
                ContainsKeysConfigValidator({
                    KEYS_LIST: [
                        REDIS_DATASOURCE_CONFIG_DB,
                        REDIS_DATASOURCE_CONFIG_HOST,
                        REDIS_DATASOURCE_CONFIG_PORT
                    ]
                })
            ]
        })

        if not validator.validate(config[REDIS_DATASOURCE_CONFIG]):
            raise BaseException(
                "Config validation error : config not complete ")

        return True

    def delete(self, item):
        self._r.delete(item.getHash())
Ejemplo n.º 6
0
    class RedisSessionObject():
        implements(ISession)

        def __init__(self, request):
            self._options = _options
            self.rd = None
            self._master_rd = False
            self.request = request
            self._data = None
            self.id = None
            self._new_session = True
            self._changed = False

            cookie = self.request.headers.get('Cookie')
            if cookie is None:
                self.__create_id()
            else:
                c = SimpleCookie()
                c.load(cookie)
                session_cookie = c.get(self._options['_cookie_name'])
                if session_cookie is None:
                    #new session!
                    self.__create_id()
                else:
                    self.id = session_cookie.value
                    self._new_session = False

            def session_callback(request, response):
                exception = getattr(request, 'exception', None)
                commit = self._changed
                increase_expire_mod = _options['_increase_expire_mod']
                if increase_expire_mod > 0:
                    rnd = round(random.random() * 1000000)
                    mod = rnd % increase_expire_mod
                    if not mod:
                        #                        print 'Saving due to increase_expire_mod'
                        commit = True

                if exception is None and commit:
                    self.__save()
                    cookie = SimpleCookie()
                    _cname = self._options['_cookie_name']
                    cookie[_cname] = self.id
                    domain = self._options.get('cookie_domain')
                    cookie[_cname]['path'] = _options['_path']
                    if domain is not None:
                        cookie[_cname]['domain'] = domain
                    if self._options['_secure']:
                        cookie[_cname]['secure'] = True
                    header = cookie[_cname].output(header='')
                    #                    print 'Writing cookie header:',header
                    response.headerlist.append(('Set-Cookie', header))

            request.add_response_callback(session_callback)

        # private methods
        def __init_rd(self, master=False):
            if self.rd is None:
                if master:
                    self.rd = StrictRedis(host=_redis_servers[0][0],
                                          port=_redis_servers[0][1],
                                          db=_redis_servers[0][2])
                    self._master_rd = True
                else:
                    server = random.choice(_redis_servers)
                    self.rd = StrictRedis(host=server[0],
                                          port=server[1],
                                          db=server[2])
                    self._master_rd = False
            elif master and not self._master_rd:
                self.rd = StrictRedis(host=_redis_servers[0][0],
                                      port=_redis_servers[0][1],
                                      db=_redis_servers[0][2])
                self._master_rd = True

        def __key(self):
            return 'rd:ses:%s' % self.id

        def __load(self):
            if self._data is None:
                self.__init_rd()
                data = self.rd.get(self.__key())
                if data is not None:
                    self._data = msgpack.unpackb(data,
                                                 use_list=False,
                                                 encoding='utf-8')
                else:
                    self._data = {}

        def __save(self):
            if self._data is not None and len(self._data):
                self.__init_rd(master=True)
                self.rd.setex(self.__key(), self._options['_expire'],
                              msgpack.packb(self._data, encoding='utf-8'))

        def __create_id(self):
            self.id = hashlib.sha1(
                hashlib.sha1("%f%s%f%s" %
                             (time.time(), id({}), random.random(),
                              getpid())).hexdigest(), ).hexdigest()

        def init_with_id(self, session_id):
            """
            Init the session with custom id. the session data is no loaded immediately but loaded only when data is accessed
            :param session_id:
            :return: self
            """
            self.id = session_id
            self._data = None
            return self

        def set_expire(self, expire):
            self._options['_expire'] = expire

        # ISession API
        def save(self):
            self._changed = True

        def invalidate(self):
            self.__init_rd(master=True)
            self.rd.delete(self.__key())
            #todo: delete cookie

        def changed(self):
            self._changed = True

        def flash(self, msg, queue='', allow_duplicate=True):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if not allow_duplicate:
                if msg not in q:
                    q.append(msg)
            else:
                q.append(msg)
            self[key] = q

        def pop_flash(self, queue=''):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if len(q):
                e = q.pop()
                self[key] = q
                return e
            return None

        def peek_flash(self, queue=''):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if len(q):
                e = q[0]
                return e
            return None

        def new_csrf_token(self):
            token = os.urandom(20).encode('hex')
            self['_csrft_'] = token
            return token

        def get_csrf_token(self):
            token = self.get('_csrft_', None)
            if token is None:
                token = self.new_csrf_token()
            return token

        # mapping methods
        def __getitem__(self, key):
            self.__load()
            return self._data[key]

        def get(self, key, default=None):
            self.__load()
            return self._data.get(key, default)

        def __delitem__(self, key):
            self.__load()
            del self._data[key]
            self._changed = True

        def __setitem__(self, key, value):
            self.__load()
            self._data[key] = value
            self._changed = True

        def keys(self):
            self.__load()
            return self._data.keys()

        def values(self):
            self.__load()
            return self._data.values()

        def items(self):
            self.__load()
            return self._data.items()

        def iterkeys(self):
            self.__load()
            return iter(self._data.keys())

        def itervalues(self):
            self.__load()
            return iter(self._data.values())

        def iteritems(self):
            self.__load()
            return iter(self._data.items())

        def clear(self):
            self.__load()
            self._data = {}
            self._changed = True

        def update(self, d):
            self.__load()
            for k in self._data.keys():
                d[k] = self._data[k]

        def multi_set(self, d):
            #            print '[update]', self.id
            self.__load()
            for k in d.keys():
                self._data[k] = d[k]
            self._changed = True

        def setdefault(self, key, default=None):
            """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D"""
            self.__load()
            if key in self._data:
                return self._data[key]
            else:
                self._data[key] = default
                self._changed = True
                return default

        def pop(self, k, *args):
            """remove specified key and return the corresponding value
            ``*args`` may contain a single default value, or may not be supplied.
            If key is not found, default is returned if given, otherwise
            ``KeyError`` is raised"""
            pass

        def popitem(self):
            """remove and return some (key, value) pair as a
            2-tuple; but raise ``KeyError`` if mapping is empty"""
            pass

        def __len__(self):
            self.__load()
            return len(self._data)

        def __iter__(self):
            return self.iterkeys()

        def __contains__(self, key):
            self.__load()
            return key in self._data

        @property
        def new(self):
            return self._new_session
Ejemplo n.º 7
0
    class RedisSessionObject():
        implements(ISession)

        def __init__(self, request):
            self._options = _options
            self.rd = None
            self._master_rd = False
            self.request = request
            self._data = None
            self.id = None
            self._new_session = True
            self._changed = False

            cookie = self.request.headers.get('Cookie')
            if cookie is None:
                self.__create_id()
            else:
                c = SimpleCookie()
                c.load(cookie)
                session_cookie = c.get(self._options['_cookie_name'])
                if session_cookie is None:
                    #new session!
                    self.__create_id()
                else:
                    self.id = session_cookie.value
                    self._new_session = False

            def session_callback(request, response):
                exception = getattr(request, 'exception', None)
                commit = self._changed
                increase_expire_mod = _options['_increase_expire_mod']
                if increase_expire_mod > 0:
                    rnd = round(random.random() * 1000000)
                    mod = rnd % increase_expire_mod
                    if not mod:
                    #                        print 'Saving due to increase_expire_mod'
                        commit = True

                if exception is None and commit:
                    self.__save()
                    cookie = SimpleCookie()
                    _cname = self._options['_cookie_name']
                    cookie[_cname] = self.id
                    domain = self._options.get('cookie_domain')
                    cookie[_cname]['path'] = _options['_path']
                    if domain is not None:
                        cookie[_cname]['domain'] = domain
                    if self._options['_secure']:
                        cookie[_cname]['secure'] = True
                    header = cookie[_cname].output(header='')
                    #                    print 'Writing cookie header:',header
                    response.headerlist.append(('Set-Cookie', header))

            request.add_response_callback(session_callback)

        # private methods
        def __init_rd(self, master=False):
            if self.rd is None:
                if master:
                    self.rd = StrictRedis(host=_redis_servers[0][0], port=_redis_servers[0][1], db=_redis_servers[0][2])
                    self._master_rd = True
                else:
                    server = random.choice(_redis_servers)
                    self.rd = StrictRedis(host=server[0], port=server[1], db=server[2])
                    self._master_rd = False
            elif master and not self._master_rd:
                self.rd = StrictRedis(host=_redis_servers[0][0], port=_redis_servers[0][1], db=_redis_servers[0][2])
                self._master_rd = True

        def __key(self):
            return 'rd:ses:%s' % self.id

        def __load(self):
            if self._data is None:
                self.__init_rd()
                data = self.rd.get(self.__key())
                if data is not None:
                    self._data = msgpack.unpackb(data, use_list=True, encoding='utf-8')
                else:
                    self._data = {}

        def __save(self):
            if self._data is not None and len(self._data):
                self.__init_rd(master=True)
                self.rd.setex(self.__key(), self._options['_expire'], msgpack.packb(self._data, encoding='utf-8'))

        def __create_id(self):
            self.id = hashlib.sha1(hashlib.sha1("%f%s%f%s" % (time.time(), id({}), random.random(), getpid())).hexdigest(), ).hexdigest()

        def init_with_id(self, session_id):
            """
            Init the session with custom id. the session data is no loaded immediately but loaded only when data is accessed
            :param session_id:
            :return: self
            """
            self.id = session_id
            self._data = None
            return self

        def set_expire(self, expire):
            self._options['_expire'] = expire

        # ISession API
        def save(self):
            self._changed = True

        def invalidate(self):
            self.__init_rd(master=True)
            self.rd.delete(self.__key())
            #todo: delete cookie

        def changed(self):
            self._changed = True

        def flash(self, msg, queue='', allow_duplicate=True):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if not allow_duplicate:
                if msg not in q:
                    q.append(msg)
            else:
                q.append(msg)
            self[key] = q

        def pop_flash(self, queue=''):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if len(q):
                e = q.pop()
                self[key] = q
                return e
            return None

        def peek_flash(self, queue=''):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if len(q):
                e = q[0]
                return e
            return None

        def new_csrf_token(self):
            token = os.urandom(20).encode('hex')
            self['_csrft_'] = token
            return token

        def get_csrf_token(self):
            token = self.get('_csrft_', None)
            if token is None:
                token = self.new_csrf_token()
            return token

        # mapping methods
        def __getitem__(self, key):
            self.__load()
            return self._data[key]

        def get(self, key, default=None):
            self.__load()
            return self._data.get(key, default)

        def __delitem__(self, key):
            self.__load()
            del self._data[key]
            self._changed = True

        def __setitem__(self, key, value):
            self.__load()
            self._data[key] = value
            self._changed = True

        def keys(self):
            self.__load()
            return self._data.keys()

        def values(self):
            self.__load()
            return self._data.values()

        def items(self):
            self.__load()
            return self._data.items()

        def iterkeys(self):
            self.__load()
            return iter(self._data.keys())

        def itervalues(self):
            self.__load()
            return iter(self._data.values())

        def iteritems(self):
            self.__load()
            return iter(self._data.items())

        def clear(self):
            self.__load()
            self._data = {}
            self._changed = True

        def update(self, d):
            self.__load()
            for k in self._data.keys():
                d[k] = self._data[k]

        def multi_set(self, d):
#            print '[update]', self.id
            self.__load()
            for k in d.keys():
                self._data[k] = d[k]
            self._changed = True

        def setdefault(self, key, default=None):
            """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D"""
            pass

        def pop(self, k, *args):
            """remove specified key and return the corresponding value
            ``*args`` may contain a single default value, or may not be supplied.
            If key is not found, default is returned if given, otherwise
            ``KeyError`` is raised"""
            pass

        def popitem(self):
            """remove and return some (key, value) pair as a
            2-tuple; but raise ``KeyError`` if mapping is empty"""
            pass

        def __len__(self):
            self.__load()
            return len(self._data)


        def __iter__(self):
            return self.iterkeys()

        def __contains__(self, key):
            self.__load()
            return key in self._data

        @property
        def new(self):
            return self._new_session
Ejemplo n.º 8
0
class RedisStore():
    def __init__(self, dispatcher, db_host, db_port, db_num, db_pw):
        self.dispatcher = dispatcher
        pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw)
        self.redis = StrictRedis(connection_pool=pool)
        self.encoder = JSONEncoder()
        self.decoder = JSONDecoder()
        self.class_map = {}
        self.object_map = {}
    
    def create_object(self, dbo, update_rev=False):
        self.save_object(dbo)
        dbo.on_loaded()
            
    def save_object(self, dbo, update_rev=False, autosave=False):
        if update_rev:
            dbo.dbo_rev = getattr(dbo, "dbo_rev", 0) + 1
        json_obj = self.build_json(dbo)
        key = dbo.dbo_key
        self.redis.set(key, self.encoder.encode(json_obj))
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, key)
        self.dispatcher.dispatch("db_log{0}".format("_auto" if autosave else ""), "object saved: " + key)
        self.object_map[dbo.dbo_key] = dbo;
    
    def build_json(self, dbo):
        dbo.before_save()
        json_obj = {}
        if dbo.__class__ != dbo.dbo_base_class:
            json_obj["class_name"] = dbo.__module__ + "." + dbo.__class__.__name__
        for field_name in dbo.dbo_fields:
            json_obj[field_name] = getattr(dbo, field_name, None)
        for dbo_col in dbo.dbo_collections:
            coll_list = list()
            for child_dbo in getattr(dbo, dbo_col.field_name):
                if dbo_col.key_type:
                    coll_list.append(child_dbo.dbo_id)
                else:
                    coll_list.append(self.build_json(child_dbo))
            json_obj[dbo_col.field_name] = coll_list
        for dbo_ref in dbo.dbo_refs:
            ref = getattr(dbo, dbo_ref.field_name, None)
            if ref:
                json_obj[dbo_ref.field_name] = ref.dbo_id   
        return json_obj
    
    def cache_object(self, dbo):
        self.object_map[dbo.dbo_key]
    
    def load_cached(self, key):
        return self.object_map.get(key)
    
    def evict(self, dbo):
        try:
            del self.object_map[dbo.dbo_key]
        except:
            self.dispatcher.dispatch("db_log", "Failed to evict " + dbo.dbo_key + " from db cache")
                
    def load_by_key(self, key_type, key, base_class=None):
        dbo_key = key_type + ":" + key
        cached_dbo = self.object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            return None
        json_obj = self.decoder.decode(json_str)
        dbo = self.load_class(json_obj, base_class)(key)
        if dbo.dbo_key_type:
            self.object_map[dbo.dbo_key] = dbo
        self.load_json(dbo, json_obj)
        return dbo
        
    def load_class(self, json_obj, base_class):
        class_path = json_obj.get("class_name")
        if not class_path:
            return base_class
        clazz = self.class_map.get(class_path)
        if clazz:
            return clazz
        split_path = class_path.split(".")
        module_name = ".".join(split_path[:-1])
        class_name = split_path[-1]
        module = __import__(module_name, globals(), locals(), [class_name])
        clazz = getattr(module, class_name)
        self.class_map[class_path] = clazz
        return clazz 
    
    def load_object(self, dbo_class, key):
        return self.load_by_key(dbo_class.dbo_key_type, key, dbo_class)
    
    def load_json(self, dbo, json_obj):
        
        for field_name in dbo.dbo_fields:
            try:
                setattr(dbo, field_name, json_obj[field_name])
            except KeyError:
                self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing field " + field_name)
        for dbo_col in dbo.dbo_collections:
            coll = getattr(dbo, dbo_col.field_name, [])
            try:
                for child_json in json_obj[dbo_col.field_name]:
                    if dbo_col.key_type:
                        child_dbo = self.load_by_key(dbo_col.key_type, child_json, dbo_col.base_class)
                    else:
                        child_dbo = self.load_class(child_json, dbo_col.base_class)()
                        self.load_json(child_dbo, child_json)
                    coll.append(child_dbo)
            except AttributeError:
                self.dispatcher.dispatch("db_log", "{0} json failed to load for coll {1} in {2}".format(child_json, dbo_col.field_name, dbo.dbo_id))
            except KeyError:
                self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing collection " + dbo_col.field_name)
        
        for dbo_ref in dbo.dbo_refs:
            try:
                ref_key = json_obj[dbo_ref.field_name]
                ref_obj = self.load_by_key(dbo_ref.key_type, ref_key, dbo_ref.base_class)
                setattr(dbo, dbo_ref.field_name, ref_obj)    
            except:
                self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing ref " + dbo_ref.field_name)
        dbo.on_loaded()    
        return True
                    
    def delete_object(self, dbo):
        key = dbo.dbo_key
        self.redis.delete(key)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, key)
        for dbo_col in dbo.dbo_collections:
            if dbo_col.key_type:
                coll = getattr(dbo, dbo_col.field_name, set())
                for child_dbo in coll:
                    self.delete_object(child_dbo)
        self.dispatcher.dispatch("db_log", "object deleted: " + key)
        if self.object_map.get(dbo.dbo_key):
            del self.object_map[dbo.dbo_key]
        return True
        
    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)
Ejemplo n.º 9
0
class RedisStore:
    def __init__(self, db_host, db_port, db_num, db_pw):
        self.pool = ConnectionPool(max_connections=2,
                                   db=db_num,
                                   host=db_host,
                                   port=db_port,
                                   password=db_pw,
                                   decode_responses=True)
        self.redis = StrictRedis(connection_pool=self.pool)
        self.redis.ping()
        self._object_map = WeakValueDictionary()

    def create_object(self, dbo_class, dbo_dict, update_timestamp=True):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type',
                                          dbo_class))
        if not dbo_class:
            return
        try:
            dbo_id = dbo_dict['dbo_id']
        except KeyError:
            dbo_id, dbo_dict = dbo_dict, {}
        if dbo_id is None or dbo_id == '':
            log.warn("create_object called with empty dbo_id")
            return
        dbo_id = str(dbo_id).lower()
        if self.object_exists(dbo_class.dbo_key_type, dbo_id):
            raise ObjectExistsError(dbo_id)
        dbo = dbo_class()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        dbo.db_created()
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, dbo.dbo_id)
        self.save_object(dbo, update_timestamp)
        return dbo

    def load_object(self, dbo_key, key_type=None, silent=False):
        if key_type:
            try:
                key_type = key_type.dbo_key_type
            except AttributeError:
                pass
            try:
                dbo_key, dbo_id = ':'.join((key_type, dbo_key)), dbo_key
            except TypeError:
                if not silent:
                    log.exception("Invalid dbo_key passed to load_object",
                                  stack_info=True)
                return
        else:
            key_type, _, dbo_id = dbo_key.partition(':')
        cached_dbo = self._object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            if not silent:
                log.warn("Failed to find {} in database", dbo_key)
            return
        return self._json_to_obj(json_str, key_type, dbo_id)

    def save_object(self, dbo, update_timestamp=False, autosave=False):
        if update_timestamp:
            dbo.dbo_ts = int(time.time())
        if dbo.dbo_indexes:
            self._update_indexes(dbo)
        self._clear_old_refs(dbo)
        save_root, new_refs = dbo.to_db_value()
        self.redis.set(dbo.dbo_key, json_encode(save_root))
        if new_refs:
            self._set_new_refs(dbo, new_refs)
        log.debug("db object {} {}saved", dbo.dbo_key,
                  "auto" if autosave else "")
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def update_object(self, dbo, dbo_dict):
        dbo.hydrate(dbo_dict)
        return self.save_object(dbo, True)

    def delete_object(self, dbo):
        key = dbo.dbo_key
        dbo.db_deleted()
        self.delete_key(key)
        self._clear_old_refs(dbo)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, dbo.dbo_id)
        for children_type in dbo.dbo_children_types:
            self.delete_object_set(
                get_dbo_class(children_type),
                "{}_{}s:{}".format(dbo.dbo_key_type, children_type,
                                   dbo.dbo_id))
        for ix_name in dbo.dbo_indexes:
            ix_value = getattr(dbo, ix_name, None)
            if ix_value is not None and ix_value != '':
                self.delete_index('ix:{}:{}'.format(dbo.dbo_key_type, ix_name),
                                  ix_value)
        log.debug("object deleted: {}", key)
        self.evict_object(dbo)

    def load_cached(self, dbo_key):
        return self._object_map.get(dbo_key)

    def object_exists(self, obj_type, obj_id):
        return self.redis.exists('{}:{}'.format(obj_type, obj_id))

    def load_object_set(self, dbo_class, set_key=None):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type',
                                          dbo_class))
        key_type = dbo_class.dbo_key_type
        if not set_key:
            set_key = dbo_class.dbo_set_key
        results = set()
        keys = deque()
        pipeline = self.redis.pipeline()
        for key in self.fetch_set_keys(set_key):
            dbo_key = ':'.join([key_type, key])
            try:
                results.add(self._object_map[dbo_key])
            except KeyError:
                keys.append(key)
                pipeline.get(dbo_key)
        for dbo_id, json_str in zip(keys, pipeline.execute()):
            if json_str:
                obj = self._json_to_obj(json_str, key_type, dbo_id)
                if obj:
                    results.add(obj)
                continue
            log.warn("Removing missing object from set {}", set_key)
            self.delete_set_key(set_key, dbo_id)
        return results

    def delete_object_set(self, dbo_class, set_key=None):
        if not set_key:
            set_key = dbo_class.dbo_set_key
        for dbo in self.load_object_set(dbo_class, set_key):
            self.delete_object(dbo)
        self.delete_key(set_key)

    def reload_object(self, dbo_key):
        dbo = self._object_map.get(dbo_key)
        if dbo:
            json_str = self.redis.get(dbo_key)
            if not json_str:
                log.warn("Failed to find {} in database for reload", dbo_key)
                return None
            return self.update_object(dbo, json_decode(json_str))
        return self.load_object(dbo_key)

    def evict_object(self, dbo):
        self._object_map.pop(dbo.dbo_key, None)

    def load_value(self, key, default=None):
        json = self.redis.get(key)
        if json:
            return json_decode(json)
        return default

    def save_value(self, key, value):
        self.redis.set(key, json_encode(value))

    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)

    def add_set_key(self, set_key, *values):
        self.redis.sadd(set_key, *values)

    def delete_set_key(self, set_key, value):
        self.redis.srem(set_key, value)

    def set_key_exists(self, set_key, value):
        return self.redis.sismember(set_key, value)

    def db_counter(self, counter_id, inc=1):
        return self.redis.incr("counter:{}".format(counter_id), inc)

    def delete_key(self, key):
        self.redis.delete(key)

    def set_index(self, index_name, key, value):
        return self.redis.hset(index_name, key, value)

    def get_index(self, index_name, key):
        return self.redis.hget(index_name, key)

    def get_full_index(self, index_name):
        return self.redis.hgetall(index_name)

    def delete_index(self, index_name, key):
        return self.redis.hdel(index_name, key)

    def get_all_hash(self, index_name):
        return {
            key: json_decode(value)
            for key, value in self.redis.hgetall(index_name).items()
        }

    def get_hash_keys(self, hash_id):
        return self.redis.hkeys(hash_id)

    def set_db_hash(self, hash_id, hash_key, value):
        return self.redis.hset(hash_id, hash_key, json_encode(value))

    def get_db_hash(self, hash_id, hash_key):
        return json_decode(self.redis.hget(hash_id, hash_key))

    def remove_db_hash(self, hash_id, hash_key):
        self.redis.hdel(hash_id, hash_key)

    def get_all_db_hash(self, hash_id):
        return [
            json_decode(value)
            for value in self.redis.hgetall(hash_id).values()
        ]

    def get_db_list(self, list_id, start=0, end=-1):
        return [
            json_decode(value)
            for value in self.redis.lrange(list_id, start, end)
        ]

    def add_db_list(self, list_id, value):
        self.redis.lpush(list_id, json_encode(value))

    def trim_db_list(self, list_id, start, end):
        return self.redis.ltrim(list_id, start, end)

    def dbo_holders(self, dbo_key, degrees=0):
        all_keys = set()

        def find(find_key, degree):
            holder_keys = self.fetch_set_keys('{}:holders'.format(find_key))
            for new_key in holder_keys:
                if new_key != dbo_key and new_key not in all_keys:
                    all_keys.add(new_key)
                    if degree < degrees:
                        find(new_key, degree + 1)

        find(dbo_key, 0)
        return all_keys

    def _json_to_obj(self, json_str, key_type, dbo_id):
        dbo_dict = json_decode(json_str)
        dbo = get_mixed_type(key_type, dbo_dict.get('mixins'))()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def _update_indexes(self, dbo):
        try:
            old_dbo = json_decode(self.redis.get(dbo.dbo_key))
        except TypeError:
            old_dbo = None

        for ix_name in dbo.dbo_indexes:
            new_val = getattr(dbo, ix_name, None)
            old_val = old_dbo.get(ix_name) if old_dbo else None
            if old_val == new_val:
                continue
            ix_key = 'ix:{}:{}'.format(dbo.dbo_key_type, ix_name)
            if old_val is not None:
                self.delete_index(ix_key, old_val)
            if new_val is not None and new_val != '':
                if self.get_index(ix_key, new_val):
                    raise NonUniqueError(ix_key, new_val)
                self.set_index(ix_key, new_val, dbo.dbo_id)

    def _clear_old_refs(self, dbo):
        dbo_key = dbo.dbo_key
        ref_key = '{}:refs'.format(dbo_key)
        for ref_id in self.fetch_set_keys(ref_key):
            holder_key = '{}:holders'.format(ref_id)
            self.delete_set_key(holder_key, dbo_key)
        self.delete_key(ref_key)

    def _set_new_refs(self, dbo, new_refs):
        dbo_key = dbo.dbo_key
        self.add_set_key("{}:refs".format(dbo_key), *new_refs)
        for ref_id in new_refs:
            holder_key = '{}:holders'.format(ref_id)
            self.add_set_key(holder_key, dbo_key)
Ejemplo n.º 10
0
class RedisStore():
    def __init__(self, db_host, db_port, db_num, db_pw):
        pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw)
        self.redis = StrictRedis(connection_pool=pool)
        self.class_map = {}
        self.object_map = {}

    def create_object(self, dbo, update_rev=False):
        self.save_object(dbo, update_rev)
        dbo.on_loaded()

    def save_object(self, dbo, update_rev=False, autosave=False):
        if update_rev:
            rev = getattr(dbo, "dbo_rev", None)
            dbo.dbo_rev = 1 if not rev else rev + 1
        dbo.before_save()
        key = dbo.dbo_key
        self.redis.set(key, self.json_encode(dbo.save_json_obj))
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, key)
        self.dispatch("db_log{0}".format("_auto" if autosave else ""), "object saved: " + key)
        self.object_map[dbo.dbo_key] = dbo

    def load_cached(self, key):
        return self.object_map.get(key)

    def evict_object(self, dbo):
        try:
            del self.object_map[dbo.dbo_key]
        except:
            debug("Failed to evict " + dbo.dbo_key + " from db cache", self)

    @logged
    def load_by_key(self, key_type, key, base_class=None):
        dbo_key = unicode('{0}:{1}'.format(key_type, key))
        cached_dbo = self.object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            return None
        json_obj = self.json_decode(json_str)
        dbo = self._load_class(json_obj, base_class)(key)
        if dbo.dbo_key_type:
            self.object_map[dbo.dbo_key] = dbo
        self.load_json(dbo, json_obj)
        return dbo

    def object_exists(self, obj_type, obj_id):
        key = unicode('{0}:{1}'.format(obj_type, obj_id))
        return key in self.redis.keys(key)

    def load_object(self, dbo_class, key):
        return self.load_by_key(dbo_class.dbo_key_type, key, dbo_class)

    def update_object(self, dbo, json_obj):
        self.load_json(dbo, json_obj)
        self.save_object(dbo, True)

    def delete_object(self, dbo):
        key = dbo.dbo_key
        self.redis.delete(key)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, key)
        for dbo_col in dbo.dbo_collections:
            if dbo_col.key_type:
                coll = getattr(dbo, dbo_col.field_name, set())
                for child_dbo in coll:
                    self.delete_object(child_dbo)
        debug("object deleted: " + key, self)
        if self.object_map.get(dbo.dbo_key):
            del self.object_map[dbo.dbo_key]
        return True

    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)

    def set_index(self, index_name, key, value):
        return self.redis.hset(index_name, key, value)

    def get_index(self, index_name, key):
        return self.redis.hget(index_name, key)

    def delete_index(self, index_name, key):
        return self.redis.hdel(index_name, key)

    def _load_class(self, json_obj, base_class):
        class_path = json_obj.get("class_name")
        if not class_path:
            return self.cls_registry(base_class)
        clazz = self.class_map.get(class_path)
        if clazz:
            return clazz
        split_path = class_path.split(".")
        module_name = ".".join(split_path[:-1])
        class_name = split_path[-1]
        module = __import__(module_name, globals(), locals(), [class_name])
        clazz = getattr(module, class_name)
        clazz = self.cls_registry(clazz)
        self.class_map[class_path] = clazz
        return clazz

    def load_json(self, dbo, json_obj):
        for field_name in dbo.dbo_fields:
            try:
                setattr(dbo, field_name, json_obj[field_name])
            except KeyError:
                pass
        for dbo_col in dbo.dbo_collections:
            coll = getattr(dbo, dbo_col.field_name, [])
            try:
                for child_json in json_obj[dbo_col.field_name]:
                    try:
                        if dbo_col.key_type:
                            child_dbo = self.load_by_key(dbo_col.key_type, child_json, dbo_col.base_class)
                        else:
                            child_dbo = self._load_class(child_json, dbo_col.base_class)()
                            self.load_json(child_dbo, child_json)
                        coll.append(child_dbo)
                    except AttributeError:
                        warn("{0} json failed to load for coll {1} in {2}".format(child_json, dbo_col.field_name, unicode(dbo.dbo_id)), self)
            except KeyError:
                if dbo.dbo_key_type:
                    trace("db: Object " + unicode(dbo.dbo_debug_key) + " json missing collection " + dbo_col.field_name, self)

        for dbo_ref in dbo.dbo_refs:
            try:
                ref_key = json_obj[dbo_ref.field_name]
                ref_obj = self.load_by_key(dbo_ref.key_type, ref_key, dbo_ref.base_class)
                setattr(dbo, dbo_ref.field_name, ref_obj)
            except:
                if dbo.dbo_key_type:
                    trace("db: Object " + unicode(dbo.dbo_debug_key) + " json missing ref " + dbo_ref.field_name, self)
        dbo.on_loaded()
        return True
Ejemplo n.º 11
0
class RedisStore():
    def __init__(self, db_host, db_port, db_num, db_pw):
        self.pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw,
                                   decode_responses=True)
        self.redis = StrictRedis(connection_pool=self.pool)
        self.redis.ping()
        self._object_map = WeakValueDictionary()

    def create_object(self, dbo_class, dbo_dict, update_timestamp=True):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type', dbo_class))
        if not dbo_class:
            return
        try:
            dbo_id = dbo_dict['dbo_id']
        except KeyError:
            dbo_id, dbo_dict = dbo_dict, {}
        if dbo_id is None or dbo_id == '':
            warn("create_object called with empty dbo_id")
            return
        dbo_id = str(dbo_id).lower()
        if self.object_exists(dbo_class.dbo_key_type, dbo_id):
            raise ObjectExistsError(dbo_id)
        dbo = dbo_class()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        dbo.db_created()
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, dbo.dbo_id)
        self.save_object(dbo, update_timestamp)
        return dbo

    def save_object(self, dbo, update_timestamp=False, autosave=False):
        if update_timestamp:
            dbo.dbo_ts = int(time.time())
        if dbo.dbo_indexes:
            self._update_indexes(dbo)
        self._clear_old_refs(dbo)
        save_root, new_refs = dbo.to_db_value()
        self.redis.set(dbo.dbo_key, json_encode(save_root))
        if new_refs:
            self._set_new_refs(dbo, new_refs)
        debug("db object {} {}saved", dbo.dbo_key, "auto" if autosave else "")
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def save_raw(self, key, raw):
        self.redis.set(key, json_encode(raw))

    def load_raw(self, key, default=None):
        json = self.redis.get(key)
        if json:
            return json_decode(json)
        return default

    def load_cached(self, dbo_key):
        return self._object_map.get(dbo_key)

    def load_object(self, dbo_key, key_type=None, silent=False):
        if key_type:
            try:
                key_type = key_type.dbo_key_type
            except AttributeError:
                pass
            try:
                dbo_key, dbo_id = ':'.join([key_type, dbo_key]), dbo_key
            except TypeError:
                if not silent:
                    exception("Invalid dbo_key passed to load_object", stack_info=True)
                return
        else:
            key_type, _, dbo_id = dbo_key.partition(':')
        cached_dbo = self._object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            if not silent:
                warn("Failed to find {} in database", dbo_key)
            return
        return self.load_from_json(json_str, key_type, dbo_id)

    def load_from_json(self, json_str, key_type, dbo_id):
        dbo_dict = json_decode(json_str)
        dbo = get_mixed_type(key_type, dbo_dict.get('mixins'))()
        dbo.dbo_id = dbo_id
        self._object_map[dbo.dbo_key] = dbo
        dbo.hydrate(dbo_dict)
        return dbo

    def object_exists(self, obj_type, obj_id):
        return self.redis.exists('{}:{}'.format(obj_type, obj_id))

    def load_object_set(self, dbo_class, set_key=None):
        key_type = dbo_class.dbo_key_type
        if not set_key:
            set_key = dbo_class.dbo_set_key
        results = set()
        keys = deque()
        pipeline = self.redis.pipeline()
        for key in self.fetch_set_keys(set_key):
            dbo_key = ':'.join([key_type, key])
            try:
                results.add(self._object_map[dbo_key])
            except KeyError:
                keys.append(key)
                pipeline.get(dbo_key)
        for dbo_id, json_str in zip(keys, pipeline.execute()):
            if json_str:
                obj = self.load_from_json(json_str, key_type, dbo_id)
                if obj:
                    results.add(obj)
                continue
            warn("Removing missing object from set {}", set_key)
            self.delete_set_key(set_key, dbo_id)
        return results

    def delete_object_set(self, dbo_class, set_key=None):
        if not set_key:
            set_key = dbo_class.dbo_set_key
        for dbo in self.load_object_set(dbo_class, set_key):
            self.delete_object(dbo)
        self.delete_key(set_key)

    def update_object(self, dbo, dbo_dict):
        dbo.hydrate(dbo_dict)
        return self.save_object(dbo, True)

    def delete_object(self, dbo):
        key = dbo.dbo_key
        dbo.db_deleted()
        self.delete_key(key)
        self._clear_old_refs(dbo)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, dbo.dbo_id)
        for children_type in dbo.dbo_children_types:
            self.delete_object_set(get_dbo_class(children_type),
                                   "{}_{}s:{}".format(dbo.dbo_key_type, children_type, dbo.dbo_id))
        for ix_name in dbo.dbo_indexes:
            ix_value = getattr(dbo, ix_name, None)
            if ix_value is not None and ix_value != '':
                self.delete_index('ix:{}:{}'.format(dbo.dbo_key_type, ix_name), ix_value)
        debug("object deleted: {}", key)
        self.evict_object(dbo)

    def reload_object(self, dbo_key):
        dbo = self._object_map.get(dbo_key)
        if dbo:
            json_str = self.redis.get(dbo_key)
            if not json_str:
                warn("Failed to find {} in database for reload", dbo_key)
                return None
            return self.update_object(dbo, json_decode(json_str))
        return self.load_object(dbo_key)

    def evict_object(self, dbo):
        self._object_map.pop(dbo.dbo_key, None)

    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)

    def add_set_key(self, set_key, *values):
        self.redis.sadd(set_key, *values)

    def delete_set_key(self, set_key, value):
        self.redis.srem(set_key, value)

    def set_key_exists(self, set_key, value):
        return self.redis.sismember(set_key, value)

    def db_counter(self, counter_id, inc=1):
        return self.redis.incr("counter:{}".format(counter_id), inc)

    def delete_key(self, key):
        self.redis.delete(key)

    def set_index(self, index_name, key, value):
        return self.redis.hset(index_name, key, value)

    def get_index(self, index_name, key):
        return self.redis.hget(index_name, key)

    def get_full_index(self, index_name):
        return self.redis.hgetall(index_name)

    def delete_index(self, index_name, key):
        return self.redis.hdel(index_name, key)

    def get_all_hash(self, index_name):
        return {key: json_decode(value) for key, value in self.redis.hgetall(index_name).items()}

    def set_db_hash(self, hash_id, hash_key, value):
        return self.redis.hset(hash_id, hash_key, json_encode(value))

    def get_db_hash(self, hash_id, hash_key):
        return json_decode(self.redis.hget(hash_id, hash_key))

    def remove_db_hash(self, hash_id, hash_key):
        self.redis.hdel(hash_id, hash_key)

    def get_all_db_hash(self, hash_id):
        return [json_decode(value) for value in self.redis.hgetall(hash_id).values()]

    def get_db_list(self, list_id, start=0, end=-1):
        return [json_decode(value) for value in self.redis.lrange(list_id, start, end)]

    def add_db_list(self, list_id, value):
        self.redis.lpush(list_id, json_encode(value))

    def trim_db_list(self, list_id, start, end):
        return self.redis.ltrim(list_id, start, end)

    def _update_indexes(self, dbo):
        try:
            old_dbo = json_decode(self.redis.get(dbo.dbo_key))
        except TypeError:
            old_dbo = None

        for ix_name in dbo.dbo_indexes:
            new_val = getattr(dbo, ix_name, None)
            old_val = old_dbo.get(ix_name) if old_dbo else None
            if old_val == new_val:
                continue
            ix_key = 'ix:{}:{}'.format(dbo.dbo_key_type, ix_name)
            if old_val is not None:
                self.delete_index(ix_key, old_val)
            if new_val is not None and new_val != '':
                if self.get_index(ix_key, new_val):
                    raise NonUniqueError(ix_key, new_val)
                self.set_index(ix_key, new_val, dbo.dbo_id)

    def _clear_old_refs(self, dbo):
        dbo_key = dbo.dbo_key
        ref_key = '{}:refs'.format(dbo_key)
        for ref_id in self.fetch_set_keys(ref_key):
            holder_key = '{}:holders'.format(ref_id)
            self.delete_set_key(holder_key, dbo_key)
        self.delete_key(ref_key)

    def _set_new_refs(self, dbo, new_refs):
        dbo_key = dbo.dbo_key
        self.add_set_key("{}:refs".format(dbo_key), *new_refs)
        for ref_id in new_refs:
            holder_key = '{}:holders'.format(ref_id)
            self.add_set_key(holder_key, dbo_key)