def from_cache_multi(cls, ids): """ Check if a record is in cache. If it is load from there, if not load the record and then cache it, but in bulk. """ results = [] misses = [] if not cls.cache_backend: misses = ids else: for id in ids: key = cls.get_cache_key(id) cached_value = cls.cache_backend.get(key) if cached_value: results.append(cls(id=id, values=loads(cached_value))) else: misses.append(id) if misses: # Get the records in bulk for misses rows = cls.rpc.read(misses, tuple(cls._fields)) for row in rows: record = cls(id=row['id'], values=row) record.store_in_cache() results.append(record) return sorted(results, key=lambda r: ids.index(r.id))
def from_cache(cls, id): """ Check if a record is in cache. If it is load from there, if not load the record and then cache it. """ key = cls.get_cache_key(id) if cls.cache_backend and cls.cache_backend.exists(key): return cls(id=id, values=loads(cls.cache_backend.get(key))) record = cls(id=id) record.refresh() record.store_in_cache() return record
def from_cache(cls, id): """ Check if a record is in cache. If it is load from there, if not load the record and then cache it. """ if isinstance(id, (list, tuple)): return map(cls.from_cache, id) key = cls.get_cache_key(id) cached_value = cls.cache_backend and cls.cache_backend.get(key) if cached_value: cache_logger.debug("HIT::%s" % key) return cls(id=id, values=loads(cached_value)) cache_logger.warn("MISS::%s" % key) record = cls(id=id) record.refresh() record.store_in_cache() return record
def from_cache_multi(cls, ids, ignore_misses=False): """ Check if a record is in cache. If it is, load from there, if not load the record and then cache it, but in bulk. For performance, you can opt to ignore fetching records that are not already on the cache. While this can give you a performance boost, this may result in some or more records missing. Use with care, you've been warned! :param ignore_misses: If True, then the returned set will not include records that are not already in cache. """ if not ids: return [] results = [] misses = [] if not cls.cache_backend: misses = ids else: cached_values = cls.cache_backend.mget(map(cls.get_cache_key, ids)) for id, cached_value in zip(ids, cached_values): if cached_value: results.append(cls(id=id, values=loads(cached_value))) else: misses.append(id) if misses: cache_logger.warn("MISS::MULTI::%s::%s" % (cls.__model_name__, misses)) if misses and not ignore_misses: # Get the records in bulk for misses rows = cls.rpc.read(misses, tuple(cls._fields)) for row in rows: record = cls(id=row['id'], values=row) record.store_in_cache() results.append(record) return sorted(results, key=lambda r: ids.index(r.id))
def from_cache(cls, id): """ Check if a record is in cache. If it is load from there, if not load the record and then cache it. """ if isinstance(id, (list, tuple)): message = "For list and ids use from_cache_multi" warnings.warn(message, DeprecationWarning, stacklevel=2) return cls.from_cache_multi(id) key = cls.get_cache_key(id) cached_value = cls.cache_backend and cls.cache_backend.get(key) if cached_value: cache_logger.debug("HIT::%s" % key) return cls(id=id, values=loads(cached_value)) cache_logger.warn("MISS::%s" % key) record = cls(id=id) record.refresh() record.store_in_cache() return record