Esempio n. 1
0
def test_is_rate_limited_script():
    now = int(time.time())

    client = StrictRedis(db=9)

    # The item should not be rate limited by either key.
    assert map(
        bool,
        is_rate_limited(client, ('foo', 'bar'),
                        (1, now + 60, 2, now + 120))) == [False, False]

    # The item should be rate limited by the first key (1).
    assert map(
        bool,
        is_rate_limited(client, ('foo', 'bar'),
                        (1, now + 60, 2, now + 120))) == [True, False]

    # The item should still be rate limited by the first key (1), but *not*
    # rate limited by the second key (2) even though this is the third time
    # we've checked the quotas. This ensures items that are rejected by a lower
    # quota don't affect unrelated items that share a parent quota.
    assert map(
        bool,
        is_rate_limited(client, ('foo', 'bar'),
                        (1, now + 60, 2, now + 120))) == [True, False]

    assert client.get('foo') == '1'
    assert 59 <= client.ttl('foo') <= 60

    assert client.get('bar') == '1'
    assert 119 <= client.ttl('bar') <= 120
Esempio n. 2
0
class RedisDataSource(AbstractDataSource):

    _r = None
    def __init__(self,config):
        if self._validateConfig(config):
            self._r = StrictRedis(host=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_HOST],
                                        port=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_PORT],
                                        db=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_DB])
            logger.debug("Obtained internal redis handler" + str(self._r))
        else:
            raise BaseException("Error validating config ")


    def update(self,item):
        self.store(item)

    def store(self,item):
        self._r.set(item.getHash(), item.getValue())

    def get(self,item):
        return self._r.get(item.getHash())

    def exists(self,item):
        return self.get(item) is not None

    def all(self):

        result = []
        # Obtain all keys
        keys = self._r.keys()

        #For each key, get value
        for k in keys:
            value = self._r.get(k)
            result.append(BaseItem({"origin":"redis"},value))
        #return result
        return result

    def _validateConfig(self,config):

        validator = MultipleConfigValidator(
                        {VALIDATORS_LIST:[ContainsKeyConfigValidator({KEY_VALUE:REDIS_DATASOURCE_CONFIG})]})
        if not validator.validate(config):
            raise BaseException("Config validation error : does not contain " + REDIS_DATASOURCE_CONFIG)

        # Validate redis datasource config
        validator = MultipleConfigValidator(
                        {VALIDATORS_LIST:[ContainsKeysConfigValidator({KEYS_LIST:[REDIS_DATASOURCE_CONFIG_DB,
                                                                                  REDIS_DATASOURCE_CONFIG_HOST,
                                                                                  REDIS_DATASOURCE_CONFIG_PORT]})]})

        if not validator.validate(config[REDIS_DATASOURCE_CONFIG]):
            raise BaseException("Config validation error : config not complete ")

        return True


    def delete(self,item):
        self._r.delete(item.getHash())
Esempio n. 3
0
class Record(object):
    def __init__(self,host='127.0.0.1',port=6379):         
        self.r=StrictRedis()
    
    def run(self):
        while True:
            value=self.r.rpop('alerts')
            if value:
                obj=json.loads(value)
                keyredis=obj['src_ip']+'_'+str(obj['src_port'])+'_'+ obj['dest_ip']+'_'+str(obj['dest_port'])
                entry=self.r.get(keyredis)
                if entry:
                    restruct=json.loads(entry)
                else:
                    restruct={}
                if not 'http' in restruct:
                    restruct['http']=[]
                if not 'alerts' in restruct:
                    restruct['alerts']=[]
                if not 'files' in restruct:
                    restruct['files']=[]  
                if 'alert' in obj:    
                    restruct['alerts'].append(obj['alert']['signature'])
                if 'fileinfo' in obj:
                    restruct['files'].append(obj['fileinfo'])
                if 'http' in obj:
                    restruct['http'].append(obj['http'])
                if len(restruct)>0:
                    self.r.set(keyredis, json.dumps(restruct))
            else:
                sleep(1)
Esempio n. 4
0
def test_is_rate_limited_script():
    now = int(time.time())

    client = StrictRedis(db=9)

    # The item should not be rate limited by either key.
    assert map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120))) == [False, False]

    # The item should be rate limited by the first key (1).
    assert map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120))) == [True, False]

    # The item should still be rate limited by the first key (1), but *not*
    # rate limited by the second key (2) even though this is the third time
    # we've checked the quotas. This ensures items that are rejected by a lower
    # quota don't affect unrelated items that share a parent quota.
    assert map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120))) == [True, False]

    assert client.get('foo') == '1'
    assert 59 <= client.ttl('foo') <= 60

    assert client.get('bar') == '1'
    assert 119 <= client.ttl('bar') <= 120
Esempio n. 5
0
class RedisDataStorage(DataStorage):
    def __init__(self, host, port, level):
        DataStorage.__init__(self, level)
        self._storage = StrictRedis(host=host, port=port)

    def get_key(self, pid):
        return "{}_{}".format(self.storage_level(), pid)

    def get_data(self, pid, default=None):
        result = self._storage.get(name=self.get_key(pid))
        if result is None:
            result = default
        return result

    def store_data(self, pid, data):
        self._storage.set(name=self.get_key(pid), value=data)

    def delete_data(self, pid):
        self._storage.delete(self.get_key(pid))

    def exists(self, pid):
        return self._storage.exists(self.get_key(pid))
class RedisCache(CacheBase):
    def __init__(self, config, section):
        from redis.client import StrictRedis
        self.conn = StrictRedis(
            config.get(section, 'redis-server'),
            config.getint(section, 'redis-port'),
            config.getint(section, 'redis-db'),
            decode_responses=True
        )

    def check_password(self, user, password):
        """Check the given user and password.

        Returns None on cache miss, True if password matches, False if not.
        """
        cached = self.conn.get(self.prefix('%s-pass' % user))
        if cached is None:
            return cached
        else:
            return cached == self.hash(password, cached)

    def set_password(self, user, password):
        self.conn.set(self.prefix('%s-pass' % user), self.hash(password, None), ex=self.expire)

    def in_groups(self, user, groups):
        key = self.prefix('%s-groups' % user)
        if not self.conn.exists(key):
            return None

        return not self.conn.smembers(key).isdisjoint(groups)

    def set_groups(self, user, groups):
        key = self.prefix('%s-groups' % user)
        pipe = self.conn.pipeline()
        pipe.sadd(key, *groups).expire(key, self.expire)
        pipe.execute()
class RedisCache(CacheBase):
    def __init__(self, config, section):
        from redis.client import StrictRedis
        self.conn = StrictRedis(config.get(section, 'redis-server'),
                                config.getint(section, 'redis-port'),
                                config.getint(section, 'redis-db'),
                                decode_responses=True)

    def check_password(self, user, password):
        """Check the given user and password.

        Returns None on cache miss, True if password matches, False if not.
        """
        cached = self.conn.get(self.prefix('%s-pass' % user))
        if cached is None:
            return cached
        else:
            return cached == self.hash(password, cached)

    def set_password(self, user, password):
        self.conn.set(self.prefix('%s-pass' % user),
                      self.hash(password, None),
                      ex=self.expire)

    def in_groups(self, user, groups):
        key = self.prefix('%s-groups' % user)
        if not self.conn.exists(key):
            return None

        return not self.conn.smembers(key).isdisjoint(groups)

    def set_groups(self, user, groups):
        key = self.prefix('%s-groups' % user)
        pipe = self.conn.pipeline()
        pipe.sadd(key, *groups).expire(key, self.expire)
        pipe.execute()
Esempio n. 8
0
class DistanceCalculator(object):
    _geoip4 = None
    _geoip6 = None

    def __init__(self):
        # Load the GeoIP databases into class attributes since they each need 20+ MB in memory
        if not self.__class__._geoip4:
            self.__class__._geoip4 = GeoIP(Config.GEOIP_PATH_V4, MEMORY_CACHE)
        if not self.__class__._geoip6:
            self.__class__._geoip6 = GeoIP(Config.GEOIP_PATH_V6, MEMORY_CACHE)
        self.redis = StrictRedis(Config.REDIS['HOST'], Config.REDIS['PORT'],
                                 Config.REDIS['DB'])

    @staticmethod
    def _haversine(lon1, lat1, lon2, lat2):
        """
        Calculate the great circle distance between two points
        on the earth (specified in decimal degrees)
        """
        # convert decimal degrees to radians
        lon1, lat1, lon2, lat2 = map(lambda v: radians(float(v)),
                                     [lon1, lat1, lon2, lat2])
        # haversine formula
        dlon = lon2 - lon1
        dlat = lat2 - lat1
        a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
        c = 2 * asin(sqrt(a))
        km = 6367 * c  # convert to km
        return km

    def get_mirror_distances(self, address):
        last_update = self.redis.get(Config.KEY_LAST_UPDATE)
        key = Config.KEY_MIRROR.format(address, last_update)
        distances = OrderedDict(self.redis.zrange(key, 0, -1, withscores=True))
        if not distances:
            if address.startswith("::ffff:"):
                address = address.replace("::ffff:", "")
            try:
                if ":" in address:
                    record = self._geoip6.record_by_addr(address)
                else:
                    record = self._geoip4.record_by_addr(address)
            except socket.error:
                raise GeoIPLookupError()
            if not record:
                raise GeoIPLookupError()
            lat = record['latitude']
            lon = record['longitude']

            distances = OrderedDict(
                sorted(((mirror.name,
                         self._haversine(lon, lat, mirror.lon, mirror.lat))
                        for mirror in Mirror.objects.filter(age__lt=3601)),
                       key=itemgetter(1)))
            if distances:
                self.redis.zadd(key, **distances)
                self.redis.expire(key, 60 * 10)  # 10 min
        return distances

    def get_nearest_mirror(self, address):
        try:
            distances = self.get_mirror_distances(address)
            if distances:
                return next(distances.iteritems())[0]
            return Config.FALLBACK_MIRROR
        except GeoIPLookupError:
            return Config.FALLBACK_MIRROR
Esempio n. 9
0
    class RedisSessionObject():
        implements(ISession)

        def __init__(self, request):
            self._options = _options
            self.rd = None
            self._master_rd = False
            self.request = request
            self._data = None
            self.id = None
            self._new_session = True
            self._changed = False

            cookie = self.request.headers.get('Cookie')
            if cookie is None:
                self.__create_id()
            else:
                c = SimpleCookie()
                c.load(cookie)
                session_cookie = c.get(self._options['_cookie_name'])
                if session_cookie is None:
                    #new session!
                    self.__create_id()
                else:
                    self.id = session_cookie.value
                    self._new_session = False

            def session_callback(request, response):
                exception = getattr(request, 'exception', None)
                commit = self._changed
                increase_expire_mod = _options['_increase_expire_mod']
                if increase_expire_mod > 0:
                    rnd = round(random.random() * 1000000)
                    mod = rnd % increase_expire_mod
                    if not mod:
                        #                        print 'Saving due to increase_expire_mod'
                        commit = True

                if exception is None and commit:
                    self.__save()
                    cookie = SimpleCookie()
                    _cname = self._options['_cookie_name']
                    cookie[_cname] = self.id
                    domain = self._options.get('cookie_domain')
                    cookie[_cname]['path'] = _options['_path']
                    if domain is not None:
                        cookie[_cname]['domain'] = domain
                    if self._options['_secure']:
                        cookie[_cname]['secure'] = True
                    header = cookie[_cname].output(header='')
                    #                    print 'Writing cookie header:',header
                    response.headerlist.append(('Set-Cookie', header))

            request.add_response_callback(session_callback)

        # private methods
        def __init_rd(self, master=False):
            if self.rd is None:
                if master:
                    self.rd = StrictRedis(host=_redis_servers[0][0],
                                          port=_redis_servers[0][1],
                                          db=_redis_servers[0][2])
                    self._master_rd = True
                else:
                    server = random.choice(_redis_servers)
                    self.rd = StrictRedis(host=server[0],
                                          port=server[1],
                                          db=server[2])
                    self._master_rd = False
            elif master and not self._master_rd:
                self.rd = StrictRedis(host=_redis_servers[0][0],
                                      port=_redis_servers[0][1],
                                      db=_redis_servers[0][2])
                self._master_rd = True

        def __key(self):
            return 'rd:ses:%s' % self.id

        def __load(self):
            if self._data is None:
                self.__init_rd()
                data = self.rd.get(self.__key())
                if data is not None:
                    self._data = msgpack.unpackb(data,
                                                 use_list=False,
                                                 encoding='utf-8')
                else:
                    self._data = {}

        def __save(self):
            if self._data is not None and len(self._data):
                self.__init_rd(master=True)
                self.rd.setex(self.__key(), self._options['_expire'],
                              msgpack.packb(self._data, encoding='utf-8'))

        def __create_id(self):
            self.id = hashlib.sha1(
                hashlib.sha1("%f%s%f%s" %
                             (time.time(), id({}), random.random(),
                              getpid())).hexdigest(), ).hexdigest()

        def init_with_id(self, session_id):
            """
            Init the session with custom id. the session data is no loaded immediately but loaded only when data is accessed
            :param session_id:
            :return: self
            """
            self.id = session_id
            self._data = None
            return self

        def set_expire(self, expire):
            self._options['_expire'] = expire

        # ISession API
        def save(self):
            self._changed = True

        def invalidate(self):
            self.__init_rd(master=True)
            self.rd.delete(self.__key())
            #todo: delete cookie

        def changed(self):
            self._changed = True

        def flash(self, msg, queue='', allow_duplicate=True):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if not allow_duplicate:
                if msg not in q:
                    q.append(msg)
            else:
                q.append(msg)
            self[key] = q

        def pop_flash(self, queue=''):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if len(q):
                e = q.pop()
                self[key] = q
                return e
            return None

        def peek_flash(self, queue=''):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if len(q):
                e = q[0]
                return e
            return None

        def new_csrf_token(self):
            token = os.urandom(20).encode('hex')
            self['_csrft_'] = token
            return token

        def get_csrf_token(self):
            token = self.get('_csrft_', None)
            if token is None:
                token = self.new_csrf_token()
            return token

        # mapping methods
        def __getitem__(self, key):
            self.__load()
            return self._data[key]

        def get(self, key, default=None):
            self.__load()
            return self._data.get(key, default)

        def __delitem__(self, key):
            self.__load()
            del self._data[key]
            self._changed = True

        def __setitem__(self, key, value):
            self.__load()
            self._data[key] = value
            self._changed = True

        def keys(self):
            self.__load()
            return self._data.keys()

        def values(self):
            self.__load()
            return self._data.values()

        def items(self):
            self.__load()
            return self._data.items()

        def iterkeys(self):
            self.__load()
            return iter(self._data.keys())

        def itervalues(self):
            self.__load()
            return iter(self._data.values())

        def iteritems(self):
            self.__load()
            return iter(self._data.items())

        def clear(self):
            self.__load()
            self._data = {}
            self._changed = True

        def update(self, d):
            self.__load()
            for k in self._data.keys():
                d[k] = self._data[k]

        def multi_set(self, d):
            #            print '[update]', self.id
            self.__load()
            for k in d.keys():
                self._data[k] = d[k]
            self._changed = True

        def setdefault(self, key, default=None):
            """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D"""
            self.__load()
            if key in self._data:
                return self._data[key]
            else:
                self._data[key] = default
                self._changed = True
                return default

        def pop(self, k, *args):
            """remove specified key and return the corresponding value
            ``*args`` may contain a single default value, or may not be supplied.
            If key is not found, default is returned if given, otherwise
            ``KeyError`` is raised"""
            pass

        def popitem(self):
            """remove and return some (key, value) pair as a
            2-tuple; but raise ``KeyError`` if mapping is empty"""
            pass

        def __len__(self):
            self.__load()
            return len(self._data)

        def __iter__(self):
            return self.iterkeys()

        def __contains__(self, key):
            self.__load()
            return key in self._data

        @property
        def new(self):
            return self._new_session
Esempio n. 10
0
class MainWindow(QtWidgets.QMainWindow):

    def __init__(self):
        super().__init__()
        self.setWindowTitle("Redis Explorer")
        self.resize(QtWidgets.QDesktopWidget().availableGeometry(self).size() * 0.5)
        self.tree = QtWidgets.QTreeWidget()
        self.label = QtWidgets.QTextEdit()
        font = self.label.font()
        font.setPointSize(12)
        self.label.setFont(font)
        self.tree.setColumnCount(2)
        self.tree.header().setSectionResizeMode(0, QtWidgets.QHeaderView.ResizeToContents)
        self.tree.setHeaderHidden(True)
        splitter = QtWidgets.QSplitter()
        splitter.addWidget(self.tree)
        scroll_area = QtWidgets.QScrollArea()
        scroll_area.setWidgetResizable(True)
        scroll_area.setWidget(self.label)
        splitter.addWidget(scroll_area)
        splitter.setSizes([1, 1])
        self.setCentralWidget(splitter)
        toolbar = self.addToolBar("")
        toolbar.setMovable(False)
        toolbar.setIconSize(QtCore.QSize(32, 32))
        toolbar.addAction(QtGui.QIcon("resources/list-add.png"), "").triggered.connect(lambda: self.plus_font(1))
        toolbar.addAction(QtGui.QIcon("resources/list-remove.png"), "").triggered.connect(lambda: self.plus_font(-1))
        toolbar.addAction(QtGui.QIcon("resources/view-refresh.png"), "").triggered.connect(lambda: self.refresh())
        self.redis = StrictRedis()

        def item_clicked(item: QtWidgets.QTreeWidgetItem):
            if item.parent() is None:
                return
            value = self.redis.get(item.text(0))
            value = value.decode()
            text = json.dumps(json.loads(value), ensure_ascii=False, indent=4)
            text = text.replace("  ", "&nbsp;").replace("\n", "<br/>").replace(":", "<font color=red>:</font>")
            text = re.sub(r'"(.*?)(?<!\\)"', r'<font color=green>"\g<1>"</font>', text)
            self.label.setHtml(text)

        self.tree.itemClicked.connect(item_clicked)

    def plus_font(self, number: int):
        font = self.label.font()
        font.setPointSize(font.pointSize() + number)
        self.label.setFont(font)

    def refresh(self):
        self.tree.clear()
        self.label.setPlainText("")
        keys = sorted(self.redis.keys("*"))
        keys = [x.decode() for x in keys]
        dct = dict()
        for key in keys:
            parts = key.split(":")
            prefix = parts[0]
            if prefix not in dct:
                dct[prefix] = [key]
            else:
                dct[prefix].append(key)
        for prefix, keys in dct.items():
            item = QtWidgets.QTreeWidgetItem([prefix])
            for key in keys:
                item.addChild(QtWidgets.QTreeWidgetItem([key]))
            self.tree.addTopLevelItem(item)

    def showEvent(self, event: QtGui.QShowEvent):
        super().showEvent(event)
        self.refresh()
Esempio n. 11
0
class DistanceCalculator(object):
    _geoip4 = None
    _geoip6 = None

    def __init__(self):
        # Load the GeoIP databases into class attributes since they each need 20+ MB in memory
        if not self.__class__._geoip4:
            self.__class__._geoip4 = GeoIP(Config.GEOIP_PATH_V4, MEMORY_CACHE)
        if not self.__class__._geoip6:
            self.__class__._geoip6 = GeoIP(Config.GEOIP_PATH_V6, MEMORY_CACHE)
        self.redis = StrictRedis(Config.REDIS['HOST'], Config.REDIS['PORT'], Config.REDIS['DB'])

    @staticmethod
    def _haversine(lon1, lat1, lon2, lat2):
        """
        Calculate the great circle distance between two points
        on the earth (specified in decimal degrees)
        """
        # convert decimal degrees to radians
        lon1, lat1, lon2, lat2 = map(lambda v: radians(float(v)), [lon1, lat1, lon2, lat2])
        # haversine formula
        dlon = lon2 - lon1
        dlat = lat2 - lat1
        a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
        c = 2 * asin(sqrt(a))
        km = 6367 * c  # convert to km
        return km

    def get_mirror_distances(self, address):
        last_update = self.redis.get(Config.KEY_LAST_UPDATE)
        key = Config.KEY_MIRROR.format(address, last_update)
        distances = OrderedDict(self.redis.zrange(key, 0, -1, withscores=True))
        if not distances:
            if address.startswith("::ffff:"):
                address = address.replace("::ffff:", "")
            try:
                if ":" in address:
                    record = self._geoip6.record_by_addr(address)
                else:
                    record = self._geoip4.record_by_addr(address)
            except socket.error:
                raise GeoIPLookupError()
            if not record:
                raise GeoIPLookupError()
            lat = record['latitude']
            lon = record['longitude']

            distances = OrderedDict(
                sorted(
                    (
                        (mirror.name, self._haversine(lon, lat, mirror.lon, mirror.lat))
                        for mirror in Mirror.objects.filter(age__lt=3601)
                    ),
                    key=itemgetter(1)
                )
            )
            if distances:
                self.redis.zadd(key, **distances)
                self.redis.expire(key, 60 * 10)  # 10 min
        return distances

    def get_nearest_mirror(self, address):
        try:
            distances = self.get_mirror_distances(address)
            if distances:
                return next(distances.iteritems())[0]
            return Config.FALLBACK_MIRROR
        except GeoIPLookupError:
            return Config.FALLBACK_MIRROR
from redis.client import StrictRedis

if __name__ == '__main__':
    strict_redis = StrictRedis(host='127.0.0.1',
                               port=6379,
                               db=0,
                               decode_responses=True)

    strict_redis.set('aa', '111')
    strict_redis.set('bb', '222')
    strict_redis.set('cc', '333')

    aa = strict_redis.get('aa')
    bb = strict_redis.get('bb')
    cc = strict_redis.get('cc')
    dd = strict_redis.get('dd')

    print(aa, bb, cc)

    keys = strict_redis.keys()
    print(keys, type(keys))
Esempio n. 13
0
class RedisStore():
    def __init__(self, db_host, db_port, db_num, db_pw):
        self.pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw,
                                   decode_responses=True)
        self.redis = StrictRedis(connection_pool=self.pool)
        self.redis.ping()
        self._object_map = WeakValueDictionary()

    def create_object(self, dbo_class, dbo_dict, update_timestamp=True):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type', dbo_class))
        if not dbo_class:
            return
        try:
            dbo_id = dbo_dict['dbo_id']
        except KeyError:
            dbo_id, dbo_dict = dbo_dict, {}
        if dbo_id is None or dbo_id == '':
            warn("create_object called with empty dbo_id")
            return
        dbo_id = str(dbo_id).lower()
        if self.object_exists(dbo_class.dbo_key_type, dbo_id):
            raise ObjectExistsError(dbo_id)
        dbo = dbo_class()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        dbo.db_created()
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, dbo.dbo_id)
        self.save_object(dbo, update_timestamp)
        return dbo

    def save_object(self, dbo, update_timestamp=False, autosave=False):
        if update_timestamp:
            dbo.dbo_ts = int(time.time())
        if dbo.dbo_indexes:
            self._update_indexes(dbo)
        self._clear_old_refs(dbo)
        save_root, new_refs = dbo.to_db_value()
        self.redis.set(dbo.dbo_key, json_encode(save_root))
        if new_refs:
            self._set_new_refs(dbo, new_refs)
        debug("db object {} {}saved", dbo.dbo_key, "auto" if autosave else "")
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def save_raw(self, key, raw):
        self.redis.set(key, json_encode(raw))

    def load_raw(self, key, default=None):
        json = self.redis.get(key)
        if json:
            return json_decode(json)
        return default

    def load_cached(self, dbo_key):
        return self._object_map.get(dbo_key)

    def load_object(self, dbo_key, key_type=None, silent=False):
        if key_type:
            try:
                key_type = key_type.dbo_key_type
            except AttributeError:
                pass
            try:
                dbo_key, dbo_id = ':'.join([key_type, dbo_key]), dbo_key
            except TypeError:
                if not silent:
                    exception("Invalid dbo_key passed to load_object", stack_info=True)
                return
        else:
            key_type, _, dbo_id = dbo_key.partition(':')
        cached_dbo = self._object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            if not silent:
                warn("Failed to find {} in database", dbo_key)
            return
        return self.load_from_json(json_str, key_type, dbo_id)

    def load_from_json(self, json_str, key_type, dbo_id):
        dbo_dict = json_decode(json_str)
        dbo = get_mixed_type(key_type, dbo_dict.get('mixins'))()
        dbo.dbo_id = dbo_id
        self._object_map[dbo.dbo_key] = dbo
        dbo.hydrate(dbo_dict)
        return dbo

    def object_exists(self, obj_type, obj_id):
        return self.redis.exists('{}:{}'.format(obj_type, obj_id))

    def load_object_set(self, dbo_class, set_key=None):
        key_type = dbo_class.dbo_key_type
        if not set_key:
            set_key = dbo_class.dbo_set_key
        results = set()
        keys = deque()
        pipeline = self.redis.pipeline()
        for key in self.fetch_set_keys(set_key):
            dbo_key = ':'.join([key_type, key])
            try:
                results.add(self._object_map[dbo_key])
            except KeyError:
                keys.append(key)
                pipeline.get(dbo_key)
        for dbo_id, json_str in zip(keys, pipeline.execute()):
            if json_str:
                obj = self.load_from_json(json_str, key_type, dbo_id)
                if obj:
                    results.add(obj)
                continue
            warn("Removing missing object from set {}", set_key)
            self.delete_set_key(set_key, dbo_id)
        return results

    def delete_object_set(self, dbo_class, set_key=None):
        if not set_key:
            set_key = dbo_class.dbo_set_key
        for dbo in self.load_object_set(dbo_class, set_key):
            self.delete_object(dbo)
        self.delete_key(set_key)

    def update_object(self, dbo, dbo_dict):
        dbo.hydrate(dbo_dict)
        return self.save_object(dbo, True)

    def delete_object(self, dbo):
        key = dbo.dbo_key
        dbo.db_deleted()
        self.delete_key(key)
        self._clear_old_refs(dbo)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, dbo.dbo_id)
        for children_type in dbo.dbo_children_types:
            self.delete_object_set(get_dbo_class(children_type),
                                   "{}_{}s:{}".format(dbo.dbo_key_type, children_type, dbo.dbo_id))
        for ix_name in dbo.dbo_indexes:
            ix_value = getattr(dbo, ix_name, None)
            if ix_value is not None and ix_value != '':
                self.delete_index('ix:{}:{}'.format(dbo.dbo_key_type, ix_name), ix_value)
        debug("object deleted: {}", key)
        self.evict_object(dbo)

    def reload_object(self, dbo_key):
        dbo = self._object_map.get(dbo_key)
        if dbo:
            json_str = self.redis.get(dbo_key)
            if not json_str:
                warn("Failed to find {} in database for reload", dbo_key)
                return None
            return self.update_object(dbo, json_decode(json_str))
        return self.load_object(dbo_key)

    def evict_object(self, dbo):
        self._object_map.pop(dbo.dbo_key, None)

    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)

    def add_set_key(self, set_key, *values):
        self.redis.sadd(set_key, *values)

    def delete_set_key(self, set_key, value):
        self.redis.srem(set_key, value)

    def set_key_exists(self, set_key, value):
        return self.redis.sismember(set_key, value)

    def db_counter(self, counter_id, inc=1):
        return self.redis.incr("counter:{}".format(counter_id), inc)

    def delete_key(self, key):
        self.redis.delete(key)

    def set_index(self, index_name, key, value):
        return self.redis.hset(index_name, key, value)

    def get_index(self, index_name, key):
        return self.redis.hget(index_name, key)

    def get_full_index(self, index_name):
        return self.redis.hgetall(index_name)

    def delete_index(self, index_name, key):
        return self.redis.hdel(index_name, key)

    def get_all_hash(self, index_name):
        return {key: json_decode(value) for key, value in self.redis.hgetall(index_name).items()}

    def set_db_hash(self, hash_id, hash_key, value):
        return self.redis.hset(hash_id, hash_key, json_encode(value))

    def get_db_hash(self, hash_id, hash_key):
        return json_decode(self.redis.hget(hash_id, hash_key))

    def remove_db_hash(self, hash_id, hash_key):
        self.redis.hdel(hash_id, hash_key)

    def get_all_db_hash(self, hash_id):
        return [json_decode(value) for value in self.redis.hgetall(hash_id).values()]

    def get_db_list(self, list_id, start=0, end=-1):
        return [json_decode(value) for value in self.redis.lrange(list_id, start, end)]

    def add_db_list(self, list_id, value):
        self.redis.lpush(list_id, json_encode(value))

    def trim_db_list(self, list_id, start, end):
        return self.redis.ltrim(list_id, start, end)

    def _update_indexes(self, dbo):
        try:
            old_dbo = json_decode(self.redis.get(dbo.dbo_key))
        except TypeError:
            old_dbo = None

        for ix_name in dbo.dbo_indexes:
            new_val = getattr(dbo, ix_name, None)
            old_val = old_dbo.get(ix_name) if old_dbo else None
            if old_val == new_val:
                continue
            ix_key = 'ix:{}:{}'.format(dbo.dbo_key_type, ix_name)
            if old_val is not None:
                self.delete_index(ix_key, old_val)
            if new_val is not None and new_val != '':
                if self.get_index(ix_key, new_val):
                    raise NonUniqueError(ix_key, new_val)
                self.set_index(ix_key, new_val, dbo.dbo_id)

    def _clear_old_refs(self, dbo):
        dbo_key = dbo.dbo_key
        ref_key = '{}:refs'.format(dbo_key)
        for ref_id in self.fetch_set_keys(ref_key):
            holder_key = '{}:holders'.format(ref_id)
            self.delete_set_key(holder_key, dbo_key)
        self.delete_key(ref_key)

    def _set_new_refs(self, dbo, new_refs):
        dbo_key = dbo.dbo_key
        self.add_set_key("{}:refs".format(dbo_key), *new_refs)
        for ref_id in new_refs:
            holder_key = '{}:holders'.format(ref_id)
            self.add_set_key(holder_key, dbo_key)
Esempio n. 14
0
class RedisStore():
    def __init__(self, dispatcher, db_host, db_port, db_num, db_pw):
        self.dispatcher = dispatcher
        pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw)
        self.redis = StrictRedis(connection_pool=pool)
        self.encoder = JSONEncoder()
        self.decoder = JSONDecoder()
        self.class_map = {}
        self.object_map = {}
    
    def create_object(self, dbo, update_rev=False):
        self.save_object(dbo)
        dbo.on_loaded()
            
    def save_object(self, dbo, update_rev=False, autosave=False):
        if update_rev:
            dbo.dbo_rev = getattr(dbo, "dbo_rev", 0) + 1
        json_obj = self.build_json(dbo)
        key = dbo.dbo_key
        self.redis.set(key, self.encoder.encode(json_obj))
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, key)
        self.dispatcher.dispatch("db_log{0}".format("_auto" if autosave else ""), "object saved: " + key)
        self.object_map[dbo.dbo_key] = dbo;
    
    def build_json(self, dbo):
        dbo.before_save()
        json_obj = {}
        if dbo.__class__ != dbo.dbo_base_class:
            json_obj["class_name"] = dbo.__module__ + "." + dbo.__class__.__name__
        for field_name in dbo.dbo_fields:
            json_obj[field_name] = getattr(dbo, field_name, None)
        for dbo_col in dbo.dbo_collections:
            coll_list = list()
            for child_dbo in getattr(dbo, dbo_col.field_name):
                if dbo_col.key_type:
                    coll_list.append(child_dbo.dbo_id)
                else:
                    coll_list.append(self.build_json(child_dbo))
            json_obj[dbo_col.field_name] = coll_list
        for dbo_ref in dbo.dbo_refs:
            ref = getattr(dbo, dbo_ref.field_name, None)
            if ref:
                json_obj[dbo_ref.field_name] = ref.dbo_id   
        return json_obj
    
    def cache_object(self, dbo):
        self.object_map[dbo.dbo_key]
    
    def load_cached(self, key):
        return self.object_map.get(key)
    
    def evict(self, dbo):
        try:
            del self.object_map[dbo.dbo_key]
        except:
            self.dispatcher.dispatch("db_log", "Failed to evict " + dbo.dbo_key + " from db cache")
                
    def load_by_key(self, key_type, key, base_class=None):
        dbo_key = key_type + ":" + key
        cached_dbo = self.object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            return None
        json_obj = self.decoder.decode(json_str)
        dbo = self.load_class(json_obj, base_class)(key)
        if dbo.dbo_key_type:
            self.object_map[dbo.dbo_key] = dbo
        self.load_json(dbo, json_obj)
        return dbo
        
    def load_class(self, json_obj, base_class):
        class_path = json_obj.get("class_name")
        if not class_path:
            return base_class
        clazz = self.class_map.get(class_path)
        if clazz:
            return clazz
        split_path = class_path.split(".")
        module_name = ".".join(split_path[:-1])
        class_name = split_path[-1]
        module = __import__(module_name, globals(), locals(), [class_name])
        clazz = getattr(module, class_name)
        self.class_map[class_path] = clazz
        return clazz 
    
    def load_object(self, dbo_class, key):
        return self.load_by_key(dbo_class.dbo_key_type, key, dbo_class)
    
    def load_json(self, dbo, json_obj):
        
        for field_name in dbo.dbo_fields:
            try:
                setattr(dbo, field_name, json_obj[field_name])
            except KeyError:
                self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing field " + field_name)
        for dbo_col in dbo.dbo_collections:
            coll = getattr(dbo, dbo_col.field_name, [])
            try:
                for child_json in json_obj[dbo_col.field_name]:
                    if dbo_col.key_type:
                        child_dbo = self.load_by_key(dbo_col.key_type, child_json, dbo_col.base_class)
                    else:
                        child_dbo = self.load_class(child_json, dbo_col.base_class)()
                        self.load_json(child_dbo, child_json)
                    coll.append(child_dbo)
            except AttributeError:
                self.dispatcher.dispatch("db_log", "{0} json failed to load for coll {1} in {2}".format(child_json, dbo_col.field_name, dbo.dbo_id))
            except KeyError:
                self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing collection " + dbo_col.field_name)
        
        for dbo_ref in dbo.dbo_refs:
            try:
                ref_key = json_obj[dbo_ref.field_name]
                ref_obj = self.load_by_key(dbo_ref.key_type, ref_key, dbo_ref.base_class)
                setattr(dbo, dbo_ref.field_name, ref_obj)    
            except:
                self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing ref " + dbo_ref.field_name)
        dbo.on_loaded()    
        return True
                    
    def delete_object(self, dbo):
        key = dbo.dbo_key
        self.redis.delete(key)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, key)
        for dbo_col in dbo.dbo_collections:
            if dbo_col.key_type:
                coll = getattr(dbo, dbo_col.field_name, set())
                for child_dbo in coll:
                    self.delete_object(child_dbo)
        self.dispatcher.dispatch("db_log", "object deleted: " + key)
        if self.object_map.get(dbo.dbo_key):
            del self.object_map[dbo.dbo_key]
        return True
        
    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)
Esempio n. 15
0
class RedisStore:
    def __init__(self, db_host, db_port, db_num, db_pw):
        self.pool = ConnectionPool(max_connections=2,
                                   db=db_num,
                                   host=db_host,
                                   port=db_port,
                                   password=db_pw,
                                   decode_responses=True)
        self.redis = StrictRedis(connection_pool=self.pool)
        self.redis.ping()
        self._object_map = WeakValueDictionary()

    def create_object(self, dbo_class, dbo_dict, update_timestamp=True):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type',
                                          dbo_class))
        if not dbo_class:
            return
        try:
            dbo_id = dbo_dict['dbo_id']
        except KeyError:
            dbo_id, dbo_dict = dbo_dict, {}
        if dbo_id is None or dbo_id == '':
            log.warn("create_object called with empty dbo_id")
            return
        dbo_id = str(dbo_id).lower()
        if self.object_exists(dbo_class.dbo_key_type, dbo_id):
            raise ObjectExistsError(dbo_id)
        dbo = dbo_class()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        dbo.db_created()
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, dbo.dbo_id)
        self.save_object(dbo, update_timestamp)
        return dbo

    def load_object(self, dbo_key, key_type=None, silent=False):
        if key_type:
            try:
                key_type = key_type.dbo_key_type
            except AttributeError:
                pass
            try:
                dbo_key, dbo_id = ':'.join((key_type, dbo_key)), dbo_key
            except TypeError:
                if not silent:
                    log.exception("Invalid dbo_key passed to load_object",
                                  stack_info=True)
                return
        else:
            key_type, _, dbo_id = dbo_key.partition(':')
        cached_dbo = self._object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            if not silent:
                log.warn("Failed to find {} in database", dbo_key)
            return
        return self._json_to_obj(json_str, key_type, dbo_id)

    def save_object(self, dbo, update_timestamp=False, autosave=False):
        if update_timestamp:
            dbo.dbo_ts = int(time.time())
        if dbo.dbo_indexes:
            self._update_indexes(dbo)
        self._clear_old_refs(dbo)
        save_root, new_refs = dbo.to_db_value()
        self.redis.set(dbo.dbo_key, json_encode(save_root))
        if new_refs:
            self._set_new_refs(dbo, new_refs)
        log.debug("db object {} {}saved", dbo.dbo_key,
                  "auto" if autosave else "")
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def update_object(self, dbo, dbo_dict):
        dbo.hydrate(dbo_dict)
        return self.save_object(dbo, True)

    def delete_object(self, dbo):
        key = dbo.dbo_key
        dbo.db_deleted()
        self.delete_key(key)
        self._clear_old_refs(dbo)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, dbo.dbo_id)
        for children_type in dbo.dbo_children_types:
            self.delete_object_set(
                get_dbo_class(children_type),
                "{}_{}s:{}".format(dbo.dbo_key_type, children_type,
                                   dbo.dbo_id))
        for ix_name in dbo.dbo_indexes:
            ix_value = getattr(dbo, ix_name, None)
            if ix_value is not None and ix_value != '':
                self.delete_index('ix:{}:{}'.format(dbo.dbo_key_type, ix_name),
                                  ix_value)
        log.debug("object deleted: {}", key)
        self.evict_object(dbo)

    def load_cached(self, dbo_key):
        return self._object_map.get(dbo_key)

    def object_exists(self, obj_type, obj_id):
        return self.redis.exists('{}:{}'.format(obj_type, obj_id))

    def load_object_set(self, dbo_class, set_key=None):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type',
                                          dbo_class))
        key_type = dbo_class.dbo_key_type
        if not set_key:
            set_key = dbo_class.dbo_set_key
        results = set()
        keys = deque()
        pipeline = self.redis.pipeline()
        for key in self.fetch_set_keys(set_key):
            dbo_key = ':'.join([key_type, key])
            try:
                results.add(self._object_map[dbo_key])
            except KeyError:
                keys.append(key)
                pipeline.get(dbo_key)
        for dbo_id, json_str in zip(keys, pipeline.execute()):
            if json_str:
                obj = self._json_to_obj(json_str, key_type, dbo_id)
                if obj:
                    results.add(obj)
                continue
            log.warn("Removing missing object from set {}", set_key)
            self.delete_set_key(set_key, dbo_id)
        return results

    def delete_object_set(self, dbo_class, set_key=None):
        if not set_key:
            set_key = dbo_class.dbo_set_key
        for dbo in self.load_object_set(dbo_class, set_key):
            self.delete_object(dbo)
        self.delete_key(set_key)

    def reload_object(self, dbo_key):
        dbo = self._object_map.get(dbo_key)
        if dbo:
            json_str = self.redis.get(dbo_key)
            if not json_str:
                log.warn("Failed to find {} in database for reload", dbo_key)
                return None
            return self.update_object(dbo, json_decode(json_str))
        return self.load_object(dbo_key)

    def evict_object(self, dbo):
        self._object_map.pop(dbo.dbo_key, None)

    def load_value(self, key, default=None):
        json = self.redis.get(key)
        if json:
            return json_decode(json)
        return default

    def save_value(self, key, value):
        self.redis.set(key, json_encode(value))

    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)

    def add_set_key(self, set_key, *values):
        self.redis.sadd(set_key, *values)

    def delete_set_key(self, set_key, value):
        self.redis.srem(set_key, value)

    def set_key_exists(self, set_key, value):
        return self.redis.sismember(set_key, value)

    def db_counter(self, counter_id, inc=1):
        return self.redis.incr("counter:{}".format(counter_id), inc)

    def delete_key(self, key):
        self.redis.delete(key)

    def set_index(self, index_name, key, value):
        return self.redis.hset(index_name, key, value)

    def get_index(self, index_name, key):
        return self.redis.hget(index_name, key)

    def get_full_index(self, index_name):
        return self.redis.hgetall(index_name)

    def delete_index(self, index_name, key):
        return self.redis.hdel(index_name, key)

    def get_all_hash(self, index_name):
        return {
            key: json_decode(value)
            for key, value in self.redis.hgetall(index_name).items()
        }

    def get_hash_keys(self, hash_id):
        return self.redis.hkeys(hash_id)

    def set_db_hash(self, hash_id, hash_key, value):
        return self.redis.hset(hash_id, hash_key, json_encode(value))

    def get_db_hash(self, hash_id, hash_key):
        return json_decode(self.redis.hget(hash_id, hash_key))

    def remove_db_hash(self, hash_id, hash_key):
        self.redis.hdel(hash_id, hash_key)

    def get_all_db_hash(self, hash_id):
        return [
            json_decode(value)
            for value in self.redis.hgetall(hash_id).values()
        ]

    def get_db_list(self, list_id, start=0, end=-1):
        return [
            json_decode(value)
            for value in self.redis.lrange(list_id, start, end)
        ]

    def add_db_list(self, list_id, value):
        self.redis.lpush(list_id, json_encode(value))

    def trim_db_list(self, list_id, start, end):
        return self.redis.ltrim(list_id, start, end)

    def dbo_holders(self, dbo_key, degrees=0):
        all_keys = set()

        def find(find_key, degree):
            holder_keys = self.fetch_set_keys('{}:holders'.format(find_key))
            for new_key in holder_keys:
                if new_key != dbo_key and new_key not in all_keys:
                    all_keys.add(new_key)
                    if degree < degrees:
                        find(new_key, degree + 1)

        find(dbo_key, 0)
        return all_keys

    def _json_to_obj(self, json_str, key_type, dbo_id):
        dbo_dict = json_decode(json_str)
        dbo = get_mixed_type(key_type, dbo_dict.get('mixins'))()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def _update_indexes(self, dbo):
        try:
            old_dbo = json_decode(self.redis.get(dbo.dbo_key))
        except TypeError:
            old_dbo = None

        for ix_name in dbo.dbo_indexes:
            new_val = getattr(dbo, ix_name, None)
            old_val = old_dbo.get(ix_name) if old_dbo else None
            if old_val == new_val:
                continue
            ix_key = 'ix:{}:{}'.format(dbo.dbo_key_type, ix_name)
            if old_val is not None:
                self.delete_index(ix_key, old_val)
            if new_val is not None and new_val != '':
                if self.get_index(ix_key, new_val):
                    raise NonUniqueError(ix_key, new_val)
                self.set_index(ix_key, new_val, dbo.dbo_id)

    def _clear_old_refs(self, dbo):
        dbo_key = dbo.dbo_key
        ref_key = '{}:refs'.format(dbo_key)
        for ref_id in self.fetch_set_keys(ref_key):
            holder_key = '{}:holders'.format(ref_id)
            self.delete_set_key(holder_key, dbo_key)
        self.delete_key(ref_key)

    def _set_new_refs(self, dbo, new_refs):
        dbo_key = dbo.dbo_key
        self.add_set_key("{}:refs".format(dbo_key), *new_refs)
        for ref_id in new_refs:
            holder_key = '{}:holders'.format(ref_id)
            self.add_set_key(holder_key, dbo_key)
Esempio n. 16
0
# -*- coding: gbk -*- 
'''
Created on 2012-5-26

@author: Sky
'''
from redis.client import StrictRedis
Sr = StrictRedis(host='localhost', port=6379, db=0)
Sr.set('foo', 'bar')
#print(str(Sr.get('foo'), encoding = "utf-8") == 'bar')
print(Sr.get('foo'))
Sr.hset("MyHash", "field1", "ль¤У")
print(Sr.hget("MyHash", "field11"))
Sr.rpush("list", "one")
Sr.rpush("list", "two")
print(Sr.llen("list"))
Sr.ltrim("list", 1, 0)
print(Sr.llen("list"))
Sr.hset("MyHash", "Key1", "Value1")
Sr.hset("MyHash", "Key2", "Value2")
for i in Sr.hkeys("MyHash"):
    print(i)
print(Sr.hlen("PlayerHash"))
print(Sr.get("XXX"))
print(type(Sr.smembers("EnemyTemplate:16:LOOT")))
for i in Sr.smembers("EnemyTemplate:16:LOOT"):
    print(i)

Esempio n. 17
0
class RedisStore():
    def __init__(self, db_host, db_port, db_num, db_pw):
        pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw)
        self.redis = StrictRedis(connection_pool=pool)
        self.class_map = {}
        self.object_map = {}

    def create_object(self, dbo, update_rev=False):
        self.save_object(dbo, update_rev)
        dbo.on_loaded()

    def save_object(self, dbo, update_rev=False, autosave=False):
        if update_rev:
            rev = getattr(dbo, "dbo_rev", None)
            dbo.dbo_rev = 1 if not rev else rev + 1
        dbo.before_save()
        key = dbo.dbo_key
        self.redis.set(key, self.json_encode(dbo.save_json_obj))
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, key)
        self.dispatch("db_log{0}".format("_auto" if autosave else ""), "object saved: " + key)
        self.object_map[dbo.dbo_key] = dbo

    def load_cached(self, key):
        return self.object_map.get(key)

    def evict_object(self, dbo):
        try:
            del self.object_map[dbo.dbo_key]
        except:
            debug("Failed to evict " + dbo.dbo_key + " from db cache", self)

    @logged
    def load_by_key(self, key_type, key, base_class=None):
        dbo_key = unicode('{0}:{1}'.format(key_type, key))
        cached_dbo = self.object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            return None
        json_obj = self.json_decode(json_str)
        dbo = self._load_class(json_obj, base_class)(key)
        if dbo.dbo_key_type:
            self.object_map[dbo.dbo_key] = dbo
        self.load_json(dbo, json_obj)
        return dbo

    def object_exists(self, obj_type, obj_id):
        key = unicode('{0}:{1}'.format(obj_type, obj_id))
        return key in self.redis.keys(key)

    def load_object(self, dbo_class, key):
        return self.load_by_key(dbo_class.dbo_key_type, key, dbo_class)

    def update_object(self, dbo, json_obj):
        self.load_json(dbo, json_obj)
        self.save_object(dbo, True)

    def delete_object(self, dbo):
        key = dbo.dbo_key
        self.redis.delete(key)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, key)
        for dbo_col in dbo.dbo_collections:
            if dbo_col.key_type:
                coll = getattr(dbo, dbo_col.field_name, set())
                for child_dbo in coll:
                    self.delete_object(child_dbo)
        debug("object deleted: " + key, self)
        if self.object_map.get(dbo.dbo_key):
            del self.object_map[dbo.dbo_key]
        return True

    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)

    def set_index(self, index_name, key, value):
        return self.redis.hset(index_name, key, value)

    def get_index(self, index_name, key):
        return self.redis.hget(index_name, key)

    def delete_index(self, index_name, key):
        return self.redis.hdel(index_name, key)

    def _load_class(self, json_obj, base_class):
        class_path = json_obj.get("class_name")
        if not class_path:
            return self.cls_registry(base_class)
        clazz = self.class_map.get(class_path)
        if clazz:
            return clazz
        split_path = class_path.split(".")
        module_name = ".".join(split_path[:-1])
        class_name = split_path[-1]
        module = __import__(module_name, globals(), locals(), [class_name])
        clazz = getattr(module, class_name)
        clazz = self.cls_registry(clazz)
        self.class_map[class_path] = clazz
        return clazz

    def load_json(self, dbo, json_obj):
        for field_name in dbo.dbo_fields:
            try:
                setattr(dbo, field_name, json_obj[field_name])
            except KeyError:
                pass
        for dbo_col in dbo.dbo_collections:
            coll = getattr(dbo, dbo_col.field_name, [])
            try:
                for child_json in json_obj[dbo_col.field_name]:
                    try:
                        if dbo_col.key_type:
                            child_dbo = self.load_by_key(dbo_col.key_type, child_json, dbo_col.base_class)
                        else:
                            child_dbo = self._load_class(child_json, dbo_col.base_class)()
                            self.load_json(child_dbo, child_json)
                        coll.append(child_dbo)
                    except AttributeError:
                        warn("{0} json failed to load for coll {1} in {2}".format(child_json, dbo_col.field_name, unicode(dbo.dbo_id)), self)
            except KeyError:
                if dbo.dbo_key_type:
                    trace("db: Object " + unicode(dbo.dbo_debug_key) + " json missing collection " + dbo_col.field_name, self)

        for dbo_ref in dbo.dbo_refs:
            try:
                ref_key = json_obj[dbo_ref.field_name]
                ref_obj = self.load_by_key(dbo_ref.key_type, ref_key, dbo_ref.base_class)
                setattr(dbo, dbo_ref.field_name, ref_obj)
            except:
                if dbo.dbo_key_type:
                    trace("db: Object " + unicode(dbo.dbo_debug_key) + " json missing ref " + dbo_ref.field_name, self)
        dbo.on_loaded()
        return True
Esempio n. 18
0
class RedisDataSource(AbstractDataSource):

    _r = None

    def __init__(self, config):
        if self._validateConfig(config):
            self._r = StrictRedis(
                host=config[REDIS_DATASOURCE_CONFIG]
                [REDIS_DATASOURCE_CONFIG_HOST],
                port=config[REDIS_DATASOURCE_CONFIG]
                [REDIS_DATASOURCE_CONFIG_PORT],
                db=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_DB])
            logger.debug("Obtained internal redis handler" + str(self._r))
        else:
            raise BaseException("Error validating config ")

    def update(self, item):
        self.store(item)

    def store(self, item):
        self._r.set(item.getHash(), item.getValue())

    def get(self, item):
        return self._r.get(item.getHash())

    def exists(self, item):
        return self.get(item) is not None

    def all(self):

        result = []
        # Obtain all keys
        keys = self._r.keys()

        #For each key, get value
        for k in keys:
            value = self._r.get(k)
            result.append(BaseItem({"origin": "redis"}, value))
        #return result
        return result

    def _validateConfig(self, config):

        validator = MultipleConfigValidator({
            VALIDATORS_LIST:
            [ContainsKeyConfigValidator({KEY_VALUE: REDIS_DATASOURCE_CONFIG})]
        })
        if not validator.validate(config):
            raise BaseException("Config validation error : does not contain " +
                                REDIS_DATASOURCE_CONFIG)

        # Validate redis datasource config
        validator = MultipleConfigValidator({
            VALIDATORS_LIST: [
                ContainsKeysConfigValidator({
                    KEYS_LIST: [
                        REDIS_DATASOURCE_CONFIG_DB,
                        REDIS_DATASOURCE_CONFIG_HOST,
                        REDIS_DATASOURCE_CONFIG_PORT
                    ]
                })
            ]
        })

        if not validator.validate(config[REDIS_DATASOURCE_CONFIG]):
            raise BaseException(
                "Config validation error : config not complete ")

        return True

    def delete(self, item):
        self._r.delete(item.getHash())
Esempio n. 19
0
    class RedisSessionObject():
        implements(ISession)

        def __init__(self, request):
            self._options = _options
            self.rd = None
            self._master_rd = False
            self.request = request
            self._data = None
            self.id = None
            self._new_session = True
            self._changed = False

            cookie = self.request.headers.get('Cookie')
            if cookie is None:
                self.__create_id()
            else:
                c = SimpleCookie()
                c.load(cookie)
                session_cookie = c.get(self._options['_cookie_name'])
                if session_cookie is None:
                    #new session!
                    self.__create_id()
                else:
                    self.id = session_cookie.value
                    self._new_session = False

            def session_callback(request, response):
                exception = getattr(request, 'exception', None)
                commit = self._changed
                increase_expire_mod = _options['_increase_expire_mod']
                if increase_expire_mod > 0:
                    rnd = round(random.random() * 1000000)
                    mod = rnd % increase_expire_mod
                    if not mod:
                    #                        print 'Saving due to increase_expire_mod'
                        commit = True

                if exception is None and commit:
                    self.__save()
                    cookie = SimpleCookie()
                    _cname = self._options['_cookie_name']
                    cookie[_cname] = self.id
                    domain = self._options.get('cookie_domain')
                    cookie[_cname]['path'] = _options['_path']
                    if domain is not None:
                        cookie[_cname]['domain'] = domain
                    if self._options['_secure']:
                        cookie[_cname]['secure'] = True
                    header = cookie[_cname].output(header='')
                    #                    print 'Writing cookie header:',header
                    response.headerlist.append(('Set-Cookie', header))

            request.add_response_callback(session_callback)

        # private methods
        def __init_rd(self, master=False):
            if self.rd is None:
                if master:
                    self.rd = StrictRedis(host=_redis_servers[0][0], port=_redis_servers[0][1], db=_redis_servers[0][2])
                    self._master_rd = True
                else:
                    server = random.choice(_redis_servers)
                    self.rd = StrictRedis(host=server[0], port=server[1], db=server[2])
                    self._master_rd = False
            elif master and not self._master_rd:
                self.rd = StrictRedis(host=_redis_servers[0][0], port=_redis_servers[0][1], db=_redis_servers[0][2])
                self._master_rd = True

        def __key(self):
            return 'rd:ses:%s' % self.id

        def __load(self):
            if self._data is None:
                self.__init_rd()
                data = self.rd.get(self.__key())
                if data is not None:
                    self._data = msgpack.unpackb(data, use_list=True, encoding='utf-8')
                else:
                    self._data = {}

        def __save(self):
            if self._data is not None and len(self._data):
                self.__init_rd(master=True)
                self.rd.setex(self.__key(), self._options['_expire'], msgpack.packb(self._data, encoding='utf-8'))

        def __create_id(self):
            self.id = hashlib.sha1(hashlib.sha1("%f%s%f%s" % (time.time(), id({}), random.random(), getpid())).hexdigest(), ).hexdigest()

        def init_with_id(self, session_id):
            """
            Init the session with custom id. the session data is no loaded immediately but loaded only when data is accessed
            :param session_id:
            :return: self
            """
            self.id = session_id
            self._data = None
            return self

        def set_expire(self, expire):
            self._options['_expire'] = expire

        # ISession API
        def save(self):
            self._changed = True

        def invalidate(self):
            self.__init_rd(master=True)
            self.rd.delete(self.__key())
            #todo: delete cookie

        def changed(self):
            self._changed = True

        def flash(self, msg, queue='', allow_duplicate=True):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if not allow_duplicate:
                if msg not in q:
                    q.append(msg)
            else:
                q.append(msg)
            self[key] = q

        def pop_flash(self, queue=''):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if len(q):
                e = q.pop()
                self[key] = q
                return e
            return None

        def peek_flash(self, queue=''):
            self.__load()
            key = '_flsh:%s_' % queue
            q = self.get(key, [])
            if len(q):
                e = q[0]
                return e
            return None

        def new_csrf_token(self):
            token = os.urandom(20).encode('hex')
            self['_csrft_'] = token
            return token

        def get_csrf_token(self):
            token = self.get('_csrft_', None)
            if token is None:
                token = self.new_csrf_token()
            return token

        # mapping methods
        def __getitem__(self, key):
            self.__load()
            return self._data[key]

        def get(self, key, default=None):
            self.__load()
            return self._data.get(key, default)

        def __delitem__(self, key):
            self.__load()
            del self._data[key]
            self._changed = True

        def __setitem__(self, key, value):
            self.__load()
            self._data[key] = value
            self._changed = True

        def keys(self):
            self.__load()
            return self._data.keys()

        def values(self):
            self.__load()
            return self._data.values()

        def items(self):
            self.__load()
            return self._data.items()

        def iterkeys(self):
            self.__load()
            return iter(self._data.keys())

        def itervalues(self):
            self.__load()
            return iter(self._data.values())

        def iteritems(self):
            self.__load()
            return iter(self._data.items())

        def clear(self):
            self.__load()
            self._data = {}
            self._changed = True

        def update(self, d):
            self.__load()
            for k in self._data.keys():
                d[k] = self._data[k]

        def multi_set(self, d):
#            print '[update]', self.id
            self.__load()
            for k in d.keys():
                self._data[k] = d[k]
            self._changed = True

        def setdefault(self, key, default=None):
            """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D"""
            pass

        def pop(self, k, *args):
            """remove specified key and return the corresponding value
            ``*args`` may contain a single default value, or may not be supplied.
            If key is not found, default is returned if given, otherwise
            ``KeyError`` is raised"""
            pass

        def popitem(self):
            """remove and return some (key, value) pair as a
            2-tuple; but raise ``KeyError`` if mapping is empty"""
            pass

        def __len__(self):
            self.__load()
            return len(self._data)


        def __iter__(self):
            return self.iterkeys()

        def __contains__(self, key):
            self.__load()
            return key in self._data

        @property
        def new(self):
            return self._new_session