def find_urls(urls=None, purge=False): remembered_urls = cache.get(REMEMBERED_URLS_KEY, {}) _del_keys = [] if urls: regexes = _urls_to_regexes(urls) for url in remembered_urls: if not urls or _match(url, regexes): cache_key = remembered_urls[url] if not cache.get(cache_key): continue if purge: cache.delete(cache_key) _del_keys.append(url) misses_cache_key = '%s__misses' % url misses_cache_key = md5(misses_cache_key) hits_cache_key = '%s__hits' % url hits_cache_key = md5(hits_cache_key) misses = cache.get(misses_cache_key) hits = cache.get(hits_cache_key) if misses is None and hits is None: stats = None else: stats = { 'hits': hits or 0, 'misses': misses or 0 } yield (url, cache_key, stats) if _del_keys: # means something was changed for url in _del_keys: remembered_urls.pop(url) misses_cache_key = '%s__misses' % url hits_cache_key = '%s__hits' % url cache.delete(misses_cache_key) cache.delete(hits_cache_key) cache.set( REMEMBERED_URLS_KEY, remembered_urls, LONG_TIME )
def find_urls(urls: typing.List[str] = None, purge: bool = False): remembered_urls = cache.get(REMEMBERED_URLS_KEY, {}) keys_to_delete = [] if urls: regexes = _urls_to_regexes(urls) for url in remembered_urls: if not urls or _match(url, regexes): cache_key = remembered_urls[url] if not cache.get(cache_key): continue if purge: cache.delete(cache_key) keys_to_delete.append(url) misses_cache_key = "%s__misses" % url misses_cache_key = md5(misses_cache_key) hits_cache_key = "%s__hits" % url hits_cache_key = md5(hits_cache_key) misses = cache.get(misses_cache_key) hits = cache.get(hits_cache_key) if misses is None and hits is None: stats = None else: stats = {"hits": hits or 0, "misses": misses or 0} yield (url, cache_key, stats) if keys_to_delete: # means something was changed if USE_MEMCACHED_CAS is True: deleted = delete_keys_cas(keys_to_delete) if deleted is True: return remembered_urls = cache.get(REMEMBERED_URLS_KEY, {}) remembered_urls = delete_keys(keys_to_delete, remembered_urls) cache.set(REMEMBERED_URLS_KEY, remembered_urls, LONG_TIME)
def find_urls(urls=None, purge=False): remembered_urls = cache.get(REMEMBERED_URLS_KEY, {}) _del_keys = [] if urls: regexes = _urls_to_regexes(urls) for url in remembered_urls: if not urls or _match(url, regexes): cache_key = remembered_urls[url] if not cache.get(cache_key): continue if purge: cache.delete(cache_key) _del_keys.append(url) misses_cache_key = '%s__misses' % url misses_cache_key = md5(misses_cache_key) hits_cache_key = '%s__hits' % url hits_cache_key = md5(hits_cache_key) misses = cache.get(misses_cache_key) hits = cache.get(hits_cache_key) if misses is None and hits is None: stats = None else: stats = {'hits': hits or 0, 'misses': misses or 0} yield (url, cache_key, stats) if _del_keys: # means something was changed for url in _del_keys: remembered_urls.pop(url) misses_cache_key = '%s__misses' % url hits_cache_key = '%s__hits' % url cache.delete(misses_cache_key) cache.delete(hits_cache_key) cache.set(REMEMBERED_URLS_KEY, remembered_urls, LONG_TIME)
def process_request(self, request): """ Checks whether the page is already cached and returns the cached version if available. """ response = self._process_request(request) if self.remember_stats_all_urls: # then we're nosy cache_key = request.get_full_path() if response is None: cache_key += '__misses' else: cache_key += '__hits' cache_key = md5(cache_key) if self.cache.get(cache_key) is None: self.cache.set(cache_key, 0, LONG_TIME) self.cache.incr(cache_key) return response