def incr_count(cls, obj, attr):
     conn = RedisClient.get_connection()
     key = cls.get_count_key(obj, attr)
     if conn.exists(key):
         return conn.incr(key)
     obj.refresh_from_db()
     conn.set(key, getattr(obj, attr))
     conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
     return getattr(obj, attr)
示例#2
0
    def testRedisClient(self):
        conn = RedisClient.get_connection()
        # push values to Redis List
        for i in range(3):
            conn.lpush('testkey', f'{i}')
        self.assertEqual(conn.lrange('testkey', 0, -1), [b'2', b'1', b'0'])

        self.clear_cache()
        self.assertEqual(conn.lrange('testkey', 0, -1), [])
 def push_object(cls, key, obj, queryset):
     queryset = queryset[:settings.REDIS_LIST_LENGTH_LIMIT]
     conn = RedisClient.get_connection()
     if not conn.exists(key):
         # if key is not in cache, load from db
         cls._load_objects_to_cache(key, queryset)
         return
     serialized_data = DjangoModelSerializer.serialize(obj)
     conn.lpush(key, serialized_data)
     conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
示例#4
0
 def push_object(cls, key, obj, queryset):
     conn = RedisClient.get_connection()
     if not conn.exists(key):
         # 如果 key 不存在,直接从数据库里 load
         # 就不走单个 push 的方式加到 cache 里了
         cls._load_objects_to_cache(key, queryset)
         return
     serialized_data = DjangoModelSerializer.serialize(obj)
     conn.lpush(key, serialized_data)
     conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
示例#5
0
    def _load_objects_to_cache(cls, key, objects):
        conn = RedisClient.get_connection()
        serialized_list = []

        for obj in objects:
            serialized_data = DjangoModelSerializer.serialize(obj)
            serialized_list.append(serialized_data)
        if serialized_list:
            conn.rpush(key, *serialized_list)
            conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
示例#6
0
 def push_object(cls, key, obj, queryset):
     conn = RedisClient.get_connection()
     if not conn.exists(key):
         # if key doesn't exist in cache, read from db
         # don't use push single object to cache
         cls._load_objects_to_cache(key, queryset)
         return
     serialized_data = DjangoModelSerializer.serialize(obj)
     conn.lpush(key, serialized_data)
     conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
示例#7
0
 def get(cls, gk_name):
     conn = RedisClient.get_connection()
     name = f'gatekeeper:{gk_name}'
     if not conn.exists(name):
         return {'percent': 0, 'description': ''}
     redis_hash = conn.hgetall(name)
     return {
         'percent': int(redis_hash.get(b'percent', 0)),
         'description': str(redis_hash.get(b'description', '')),
     }
示例#8
0
 def get_count(cls, obj, attr):
     conn = RedisClient.get_connection()
     key = cls.get_count_key(obj, attr)
     count = conn.get(key)
     if count is not None:
         return int(count)
     obj.refresh_from_db()
     count = getattr(obj, attr)
     conn.set(key, count)
     return count
示例#9
0
    def push_object(cls, key, obj, queryset):
        conn = RedisClient.get_connection()

        # cache miss
        if not conn.exists(key):
            cls._load_objects_to_cache(key, queryset)
            return
        # cache hit
        serialized_data = DjangoModelSerializer.serialize(obj)
        conn.lpush(key, serialized_data)
        conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
示例#10
0
    def setUp(self):
        self.clear_cache()
        RedisClient.clear()
        self.user1_client = APIClient()
        self.user1 = self.create_user(username='******')
        self.user1_client.force_authenticate(user=self.user1)
        self.user1_tweets = [
            self.create_tweet(self.user1, i)
            for i in range(3)  # 3 tweets for user_1
        ]

        self.user2_client = APIClient()
        self.user2 = self.create_user(username='******')
        self.user2_client.force_authenticate(user=self.user2)
        self.user2_tweets = [
            self.create_tweet(self.user2, i)
            for i in range(2)  # 2 tweets for user_2
        ]
        self.user3, self.user3_client = self.create_user_and_client(
            username='******')
示例#11
0
 def decr_count(cls, obj, attr):
     conn = RedisClient.get_connection()
     key = cls.get_count_key(obj, attr)
     if not conn.exists(key):
         # back fill cache from db
         # 不执行 -1 操作, 因为必须保证调用 incr_count 之前 obj.attr 已经+1 过了
         obj.refresh_from_db()
         conn.set(key, getattr(obj, attr))
         conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
         return getattr(obj, attr)
     return conn.decr(key)
示例#12
0
    def test_cache_tweet_in_redis(self):
        tweet = self.tweets[0]
        conn = RedisClient.get_connection()
        serialized_data = DjangoModelSerializer.serialize(tweet)
        conn.set(f'tweet:{tweet.id}', serialized_data)
        data=conn.get('tweet:bogus')
        self.assertEqual(data, None)

        data = conn.get(f'tweet:{tweet.id}')
        cached_tweet = DjangoModelSerializer.deserialize(data)
        self.assertEqual(cached_tweet, tweet)
示例#13
0
    def test_cache_tweet_in_redis(self):
        tweet = self.create_tweet(self.linghu)
        conn = RedisClient.get_connection()
        serialized_data = DjangoModelSerializer.serialize(tweet)
        conn.set(f'tweet:{tweet.id}', serialized_data)
        data = conn.get(f'tweet:not_exists')
        self.assertEqual(data, None)

        data = conn.get(f'tweet:{tweet.id}')
        cached_tweet = DjangoModelSerializer.deserialize(data)
        self.assertEqual(tweet, cached_tweet)
示例#14
0
 def push_object(cls, key, obj, queryset):
     queryset = queryset[:settings.REDIS_LIST_LENGTH_LIMIT]
     conn = RedisClient.get_connection()
     if not conn.exists(key):
         # 如果 key 不存在,直接从数据库里 load
         # 就不走单个 push 的方式加到 cache 里了
         cls._load_objects_to_cache(key, queryset)
         return
     serialized_data = DjangoModelSerializer.serialize(obj)
     conn.lpush(key, serialized_data)
     # to ensure after push the new object, we stay within the length
     conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
示例#15
0
    def incr_count(cls, obj, attr):
        conn = RedisClient.get_connection()
        key = cls.get_count_key(obj, attr)
        if conn.exists(key):
            return conn.incr(key)

        # back fill cache from db
        # no +1 operation because we need to make sure
        # obj.attr has been increased before incr_count is called
        obj.refresh_from_db()
        conn.set(key, getattr(obj, attr))
        conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
        return getattr(obj, attr)
示例#16
0
    def test_create_new_newsfeed_before_get_cached_newsfeeds(self):
        feed1 = self.create_newsfeed(self.linghu, self.create_tweet(self.linghu))

        self.clear_cache()
        conn = RedisClient.get_connection()

        key = USER_NEWSFEEDS_PATTERN.format(user_id=self.linghu.id)
        self.assertEqual(conn.exists(key), False)
        feed2 = self.create_newsfeed(self.linghu, self.create_tweet(self.linghu))
        self.assertEqual(conn.exists(key), True)

        feeds = NewsFeedService.get_cached_newsfeeds(self.linghu.id)
        self.assertEqual([f.id for f in feeds], [feed2.id, feed1.id])
示例#17
0
    def load_objects(cls, key, queryset):
        conn = RedisClient.get_connection()

        if conn.exists(key):
            serialized_list = conn.lrange(key, 0, -1)
            objects = []
            for data in serialized_list:
                obj = DjangoModelSerializer.deserialize(data)
                objects.append(obj)
            return objects

        cls._load_object_to_cache(key, queryset)
        return list(queryset)
示例#18
0
    def _load_objects_to_cache(cls, key, objects, serializer):
        conn = RedisClient.get_connection()

        serialized_list = []
        # cache REDIS_LIST_LENGTH_LIMIT
        # if more than this number, go to DB to fetch
        for obj in objects:
            serialized_data = serializer.serialize(obj)
            serialized_list.append(serialized_data)

        if serialized_list:
            conn.rpush(key, *serialized_list)
            conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
示例#19
0
    def incr_count(cls, obj, attr):
        conn = RedisClient.get_connection()
        key = cls.get_count_key(obj, attr)
        if conn.exists(key):
            return conn.incr(key)

        # back fill cache from db
        # don't execute +1 operation, due to before call incr_count method,
        # obj.attr already +1 in db.
        if not conn.exists(key):
            obj.refresh_from_db()
            conn.set(key, getattr(obj, attr))
            conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
            return getattr(obj, attr)
示例#20
0
    def _load_objects_to_cache(cls, key, objects, serializer):
        conn = RedisClient.get_connection()

        serialized_list = []

        for obj in objects:
            serialized_data = serializer.serialize(obj)
            serialized_list.append(serialized_data)

        # 注意 N+1 queries问题:不要写在上面循环里一个一个rpush,每次rpush都是一次redis访问
        # 而是先构建好list,再rpush整个list
        if serialized_list:
            conn.rpush(key, *serialized_list)
            conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
示例#21
0
    def test_create_newsfeed_before_get_cached_newsfeeds(self):
        feed1 = self.create_newsfeed(user=self.user2,
                                     tweet=self.create_tweet(user=self.user1))

        self.clear_cache()
        conn = RedisClient.get_connection()
        name = USER_NEWSFEED_PATTERN.format(user_id=self.user2.id)
        self.assertFalse(conn.exists(name))
        feed2 = self.create_newsfeed(user=self.user2,
                                     tweet=self.create_tweet(user=self.user3))
        self.assertTrue(conn.exists(name))

        feeds = NewsFeedService.load_newsfeeds_through_cache(self.user2.id)
        self.assertEqual([f.id for f in feeds], [feed2.id, feed1.id])
示例#22
0
    def _load_objects_to_cache(cls, key, objects):
        conn = RedisClient.get_connection()

        serialized_list = []
        # 最多只 cache REDIS_LIST_LENGTH_LIMIT 那么多个 objects
        # 超过这个限制的 objects,就去数据库里读取。一般这个限制会比较大,比如 1000
        # 因此翻页翻到 1000 的用户访问量会比较少,从数据库读取也不是大问题
        for obj in objects[:settings.REDIS_LIST_LENGTH_LIMIT]:
            serialized_data = DjangoModelSerializer.serialize(obj)
            serialized_list.append(serialized_data)

        if serialized_list:
            conn.rpush(key, *serialized_list)
            conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
示例#23
0
    def _load_objects_to_cache(cls, key, objects):
        conn = RedisClient.get_connection()

        serialized_list = []
        # allowing to cache at most REDIS_LIST_LENGTH_LIMIT number of objects
        # when exceeding the limit, fetch data in DB instead
        # since the limit if often large, it's a edge case for user to read that many number of items directly
        for obj in objects[:settings.REDIS_LIST_LENGTH_LIMIT]:
            serialized_data = DjangoModelSerializer.serialize(obj)
            serialized_list.append(serialized_data)

        if serialized_list:
            conn.rpush(key, *serialized_list)
            conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
示例#24
0
    def test_get_user_tweets(self):
        tweet_ids = []
        for i in range(3):
            tweet = self.create_tweet(self.linghu, 'tweet {}'.format(i))
            tweet_ids.append(tweet.id)
        tweet_ids = tweet_ids[::-1]

        RedisClient.clear()
        conn = RedisClient.get_connection()

        # cache miss
        tweets = TweetService.get_cached_tweets(self.linghu.id)
        self.assertEqual([t.id for t in tweets], tweet_ids)

        # cache hit
        tweets = TweetService.get_cached_tweets(self.linghu.id)
        self.assertEqual([t.id for t in tweets], tweet_ids)

        # cache updated
        new_tweet = self.create_tweet(self.linghu, 'new tweet')
        tweets = TweetService.get_cached_tweets(self.linghu.id)
        tweet_ids.insert(0, new_tweet.id)
        self.assertEqual([t.id for t in tweets], tweet_ids)
示例#25
0
    def load_objects(cls, key, queryset):
        conn = RedisClient.get_connection()

        # 如果在 cache 里存在,则直接拿出来,然后返回
        if conn.exists(key):
            serialized_list = conn.lrange(key, 0, -1)
            objects = []
            for serialized_data in serialized_list:
                deserialized_obj = DjangoModelSerializer.deserialize(serialized_data)
                objects.append(deserialized_obj)
            return objects

        # 转换为 list 的原因是保持返回类型的统一,因为存在 redis 里的数据是 list 的形式
        cls._load_objects_to_cache(key, queryset)
        return list(queryset)
示例#26
0
    def _load_objects_to_cache(cls, key, objects):
        conn = RedisClient.get_connection()

        serialized_list = []
        for obj in objects[:settings.REDIS_LIST_LENGTH_LIMIT]:
            # it can only read REDIS_LIST_LENGTH_LIMIT number of objects
            # if the number of objects is over the limitation, read from db
            # usually, the limitation number is big, such as 1000
            # due to user normally not page down more than 1000 objects,
            # it is rarely need to read data from db
            serialized_data = DjangoModelSerializer.serialize(obj)
            serialized_list.append(serialized_data)

        if serialized_list:
            conn.rpush(key, *serialized_list)
            conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
示例#27
0
def main():

    psql = models.Model.set_parameters(**s.POSTGRES)
    redis = RedisClient(**s.REDIS)

    initializer = dict(relay=object(),
                       push=object(),
                       session=object(),
                       auth=AuthManager(redis))
    application = tornado.web.Application([
        (r'^/session/(?P<device_id>\d+)/(?P<number>[0-9\+]{3,15})$',
         MainHandler, initializer)
    ])
    server = tornado.httpserver.HTTPServer(application)
    server.listen(s.SERVER_PORT, s.SERVER_HOST)
    server.start()
    Log.d(TAG, "OpenRedPhoneServer is run")
示例#28
0
    def load_objects(cls, key, queryset):
        conn = RedisClient.get_connection()

        # if data is in cache, get from cache and return it
        if conn.exists(key):
            serialized_list = conn.lrange(key, 0, -1)
            objects = []
            for serialized_data in serialized_list:
                deserialized_obj = DjangoModelSerializer.deserialize(
                    serialized_data)
                objects.append(deserialized_obj)
            return objects

        cls._load_objects_to_cache(key, queryset)

        # the value is the redis is list type. so convert the queryset to list
        return list(queryset)
示例#29
0
    def push_object(cls, key, obj, lazy_load_objects):
        conn = RedisClient.get_connection()
        if isinstance(obj, HBaseModel):
            serializer = HBaseModelSerializer
        else:
            serializer = DjangoModelSerializer

        # 如果在 cache 里存在,直接把 obj 放在 list 的最前面,然后 trim 一下长度
        if conn.exists(key):
            serialized_data = serializer.serialize(obj)
            conn.lpush(key, serialized_data)
            conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
            return

        # 如果 key 不存在,直接从数据库里 load, 回填到cache
        # 就不走单个 push 的方式加到 cache 里了
        objects = lazy_load_objects(settings.REDIS_LIST_LENGTH_LIMIT)
        cls._load_objects_to_cache(key, objects, serializer)
    def load_objects(cls, key, queryset):
        queryset = queryset[:settings.REDIS_LIST_LENGTH_LIMIT]
        conn = RedisClient.get_connection()

        # if key is in cache, get and return it
        if conn.exists(key):
            serialized_list = conn.lrange(key, 0, -1)
            objects = []
            for serialized_data in serialized_list:
                deserialized_obj = DjangoModelSerializer.deserialize(
                    serialized_data)
                objects.append(deserialized_obj)
            return objects

        cls._load_objects_to_cache(key, queryset)

        # data is stored in list in redis, so keep data type same
        return list(queryset)