def test_cache_tweet_in_redis(self): tweet = self.tweets[0] conn = RedisClient.get_connection() serialized_data = DjangoModelSerializer.serialize(tweet) conn.set(f'tweet:{tweet.id}', serialized_data) data=conn.get('tweet:bogus') self.assertEqual(data, None) data = conn.get(f'tweet:{tweet.id}') cached_tweet = DjangoModelSerializer.deserialize(data) self.assertEqual(cached_tweet, tweet)
def test_cache_tweet_in_redis(self): tweet = self.create_tweet(self.linghu) conn = RedisClient.get_connection() serialized_data = DjangoModelSerializer.serialize(tweet) conn.set(f'tweet:{tweet.id}', serialized_data) data = conn.get(f'tweet:not_exists') self.assertEqual(data, None) data = conn.get(f'tweet:{tweet.id}') cached_tweet = DjangoModelSerializer.deserialize(data) self.assertEqual(tweet, cached_tweet)
def push_objects(cls, key, obj, queryset): conn = RedisClient.get_connection() if not conn.exists(key): cls._load_object_to_cache(key, queryset) return serialized_data = DjangoModelSerializer.serialize(obj) conn.lpush(key, serialized_data)
def push_object(cls, key, obj, queryset): conn = RedisClient.get_connection() if not conn.exists(key): cls._load_objects_to_cache(key, queryset) return serialized_data = DjangoModelSerializer.serialize(obj) conn.lpush(key, serialized_data) conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
def _load_objects_to_cache(cls, key, objects): conn = RedisClient.get_connection() serialized_list = [] for obj in objects: serialized_data = DjangoModelSerializer.serialize(obj) serialized_list.append(serialized_data) if serialized_list: conn.rpush(key, *serialized_list) conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
def push_object(cls, key, obj, queryset): conn = RedisClient.get_connection() if not conn.exists(key): # if key doesn't exist in cache, read from db # don't use push single object to cache cls._load_objects_to_cache(key, queryset) return serialized_data = DjangoModelSerializer.serialize(obj) conn.lpush(key, serialized_data) conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
def push_object(cls, key, obj, queryset): queryset = queryset[:settings.REDIS_LIST_LENGTH_LIMIT] conn = RedisClient.get_connection() if not conn.exists(key): # 如果key不存在,直接从数据库里面load # 就不走单个push的方式加到cache里面了 cls._load_objects_to_cache(key, queryset) return serialized_data = DjangoModelSerializer.serialize(obj) conn.lpush(key, serialized_data) conn.ltrim(key, 0, settings.REDIS_LIST_LENGTH_LIMIT - 1)
def load_objects(cls, key, queryset): conn = RedisClient.get_connection() if conn.exists(key): serialized_list = conn.lrange(key, 0, -1) objects = [] for data in serialized_list: obj = DjangoModelSerializer.deserialize(data) objects.append(obj) return objects cls._load_object_to_cache(key, queryset) return list(queryset)
def _load_objects_to_cache(cls, key, objects): conn = RedisClient.get_connection() serialized_list = [] # 最多只 cache REDIS_LIST_LENGTH_LIMIT 那么多个 objects # 超过这个限制的 objects,就去数据库里读取。一般这个限制会比较大,比如 1000 # 因此翻页翻到 1000 的用户访问量会比较少,从数据库读取也不是大问题 for obj in objects[:settings.REDIS_LIST_LENGTH_LIMIT]: serialized_data = DjangoModelSerializer.serialize(obj) serialized_list.append(serialized_data) if serialized_list: conn.rpush(key, *serialized_list) conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
def _load_objects_to_cache(cls, key, objects): conn = RedisClient.get_connection() serialized_list = [] # allowing to cache at most REDIS_LIST_LENGTH_LIMIT number of objects # when exceeding the limit, fetch data in DB instead # since the limit if often large, it's a edge case for user to read that many number of items directly for obj in objects[:settings.REDIS_LIST_LENGTH_LIMIT]: serialized_data = DjangoModelSerializer.serialize(obj) serialized_list.append(serialized_data) if serialized_list: conn.rpush(key, *serialized_list) conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
def load_objects(cls, key, queryset): conn = RedisClient.get_connection() # 如果在 cache 里存在,则直接拿出来,然后返回 if conn.exists(key): serialized_list = conn.lrange(key, 0, -1) objects = [] for serialized_data in serialized_list: deserialized_obj = DjangoModelSerializer.deserialize(serialized_data) objects.append(deserialized_obj) return objects # 转换为 list 的原因是保持返回类型的统一,因为存在 redis 里的数据是 list 的形式 cls._load_objects_to_cache(key, queryset) return list(queryset)
def _load_objects_to_cache(cls, key, objects): conn = RedisClient.get_connection() serialized_list = [] for obj in objects[:settings.REDIS_LIST_LENGTH_LIMIT]: # it can only read REDIS_LIST_LENGTH_LIMIT number of objects # if the number of objects is over the limitation, read from db # usually, the limitation number is big, such as 1000 # due to user normally not page down more than 1000 objects, # it is rarely need to read data from db serialized_data = DjangoModelSerializer.serialize(obj) serialized_list.append(serialized_data) if serialized_list: conn.rpush(key, *serialized_list) conn.expire(key, settings.REDIS_KEY_EXPIRE_TIME)
def load_objects(cls, key, queryset): conn = RedisClient.get_connection() # if data is in cache, get from cache and return it if conn.exists(key): serialized_list = conn.lrange(key, 0, -1) objects = [] for serialized_data in serialized_list: deserialized_obj = DjangoModelSerializer.deserialize( serialized_data) objects.append(deserialized_obj) return objects cls._load_objects_to_cache(key, queryset) # the value is the redis is list type. so convert the queryset to list return list(queryset)
def load_objects(cls, key, queryset): queryset = queryset[:settings.REDIS_LIST_LENGTH_LIMIT] conn = RedisClient.get_connection() # if key is in cache, get and return it if conn.exists(key): serialized_list = conn.lrange(key, 0, -1) objects = [] for serialized_data in serialized_list: deserialized_obj = DjangoModelSerializer.deserialize( serialized_data) objects.append(deserialized_obj) return objects cls._load_objects_to_cache(key, queryset) # data is stored in list in redis, so keep data type same return list(queryset)
def load_objects(cls, key, queryset): queryset = queryset[:settings.REDIS_LIST_LENGTH_LIMIT] conn = RedisClient.get_connection() # 如果在 cache 里面存在, 则直接拿出来, 然后返回 if conn.exists(key): serialized_list = conn.lrange(key, 0, -1) objects = [] for serialized_data in serialized_list: deserialized_obj = DjangoModelSerializer.deserialize( serialized_data) objects.append(deserialized_obj) return objects # 不存在 load 进 redis cache cls._load_objects_to_cache(key, queryset) # 转换list 的原因是保持 返回类型的统一, 因为存在redis 里的数据是list 的形式 return list(queryset)
def load_objects(cls, key, queryset): queryset = queryset[:settings.REDIS_LIST_LENGTH_LIMIT] conn = RedisClient.get_connection() # if there is a cache if conn.exists(key): # get all tweets in this key serialized_list = conn.lrange(key, 0, -1) objects = [] for serialized_data in serialized_list: deserialized_obj = DjangoModelSerializer.deserialize( serialized_data) objects.append(deserialized_obj) return objects # if cache miss cls._load_objects_to_cache(key, queryset) # 转换为 list 的原因是保持返回类型的统一,因为存在 redis 里的数据是 list 的形式 return list(queryset)
def load_objects(cls, key, queryset): # 最多只 cache REDIS_LIST_LENGTH_LIMIT 个 objects # 超过这个限制的 objects 就去数据库里面读取。一般这个限制会比较大,比如200 # 因此翻页翻到 200 的用户访问量会比较少,从数据库读取也不是大问题 queryset = queryset[:settings.REDIS_LIST_LENGTH_LIMIT] conn = RedisClient.get_connection() # 如果在cache里存在,则直接拿出来,然后返回 if conn.exists(key): # cache hit serialized_list = conn.lrange(key, 0, -1) objects = [] for serialized_data in serialized_list: deserialized_object = DjangoModelSerializer.deserialize( serialized_data) objects.append(deserialized_object) return objects # cache miss cls._load_objects_to_cache(key, queryset) # 转换为list的原因是保持返回类型的统一,因为存在redis里的数据是list的形式 return list(queryset)