def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = False if kw.has_key('_update'): update = kw['_update'] del kw['_update'] key = make_key(iden, *a, **kw) res = None if update else cache.get(key) if res is None: # not cached, we should calculate it. with make_lock('memoize_lock(%s)' % key): stored = None if update else cache.get(key) if stored is None: # okay now go and actually calculate it res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) else: # it was calculated while we were waiting on # the lock res = stored if res == NoneResult: res = None return res
def get_hot(sr): q = Link._query(Link.c.sr_id == sr._id, sort = desc('_hot'), write_cache = True, limit = 150) iden = q._iden() read_cache = True #if query is in the cache, the expire flag is true, and the access #time is old, set read_cache = False if cache.get(iden) is not None: if cache.get(expire_key(sr)): access_time = cache.get(access_key(sr)) if not access_time or datetime.now() > access_time + expire_delta: cache.delete(expire_key(sr)) read_cache = False #if the query isn't in the cache, set read_cache to false so we #record the access time else: read_cache = False if not read_cache: cache.set(access_key(sr), datetime.now()) q._read_cache = read_cache res = list(q) #set the #1 link so we can ignore it later. expire after TOP_CACHE #just in case something happens and that sr doesn't update if res: cache.set(top_key(sr), res[0]._fullname, TOP_CACHE) return res
def cached_query(query, sr): """Returns the results from running query. The results are cached and only recomputed after 'expire_delta'""" query._limit = 150 query._write_cache = True iden = query._iden() read_cache = True #if query is in the cache, the expire flag is true, and the access #time is old, set read_cache = False if cache.get(iden) is not None: if cache.get(expire_key(sr)): access_time = cache.get(access_key(sr)) if not access_time or datetime.now() > access_time + expire_delta: cache.delete(expire_key(sr)) read_cache = False #if the query isn't in the cache, set read_cache to false so we #record the access time else: read_cache = False #set access time to the last time the query was actually run (now) if not read_cache: cache.set(access_key(sr), datetime.now()) query._read_cache = read_cache res = list(query) return res
def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = False if kw.has_key('_update'): update = kw['_update'] del kw['_update'] key = make_key(iden, *a, **kw) res = None if update else cache.get(key) if res is None: # not cached, we should calculate it. with make_lock('memoize_lock(%s)' % key): stored = None if update else cache.get(key) if stored is None: # okay now go and actually calculate it res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time=time) else: # it was calculated while we were waiting on # the lock res = stored if res == NoneResult: res = None return res
def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = kw.pop('_update', False) key = make_key(iden, *a, **kw) res = None if update else cache.get(key, stale=stale) if res is None: # not cached, we should calculate it. with make_lock('memoize_lock(%s)' % key): # see if it was completed while we were waiting # for the lock stored = None if update else cache.get(key) if stored is not None: # it was calculated while we were waiting res = stored else: # okay now go and actually calculate it res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time=time) if res == NoneResult: res = None return res
def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = kw.pop('_update', False) key = make_key(iden, *a, **kw) res = None if update else cache.get(key, stale=stale) if res is None: # not cached, we should calculate it. with make_lock('memoize_lock(%s)' % key): # see if it was completed while we were waiting # for the lock stored = None if update else cache.get(key) if stored is not None: # it was calculated while we were waiting res = stored else: # okay now go and actually calculate it res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) if res == NoneResult: res = None return res
def __iter__(self): used_cache = False def _retrieve(): return self._cursor().fetchall() names = lst = [] names = cache.get(self._iden()) if self._read_cache else None if names is None and not self._write_cache: # it wasn't in the cache, and we're not going to # replace it, so just hit the db lst = _retrieve() elif names is None and self._write_cache: # it's not in the cache, and we have the power to # update it, which we should do in a lock to prevent # concurrent requests for the same data with g.make_lock("thing_query", "lock_%s" % self._iden()): # see if it was set while we were waiting for our # lock names = cache.get(self._iden(), allow_local = False) \ if self._read_cache else None if names is None: lst = _retrieve() cache.set(self._iden(), [ x._fullname for x in lst ], self._cache_time) if names and not lst: # we got our list of names from the cache, so we need to # turn them back into Things lst = Thing._by_fullname(names, data = self._data, return_dict = False) for item in lst: yield item
def __iter__(self): used_cache = False if self._stats_collector: self._stats_collector.add(self) lst = None if self._read_cache: names = cache.get(self._iden()) if names: lst = Thing._by_fullname(names, data = self._data, return_dict = False) if lst is None: #hit the db try: lst = self._cursor().fetchall() except NotFound: lst = () else: used_cache = True if self._write_cache and not used_cache: names = tuple(i._fullname for i in lst) cache.set(self._iden(), names, self._cache_time) for i in lst: yield i
def _incr(self, prop, amt = 1): if self._dirty: raise ValueError, "cannot incr dirty thing" prefix = thing_prefix(self.__class__.__name__) key = prefix + prop + '_' + str(self._id) cache_val = old_val = cache.get(key) if old_val is None: old_val = getattr(self, prop) if self._defaults.has_key(prop) and self._defaults[prop] == old_val: #potential race condition if the same property gets incr'd #from default at the same time setattr(self, prop, old_val + amt) self._commit(prop) else: self.__setattr__(prop, old_val + amt, False) #db if prop.startswith('_'): tdb.incr_thing_prop(self._type_id, self._id, prop[1:], amt) else: self._incr_data(self._type_id, self._id, prop, amt) cache.set(prefix + str(self._id), self) #cache if cache_val: cache.incr(key, amt) else: cache.set(key, getattr(self, prop))
def set_amount(cls, r, amount): old_amount = int(r._name) if old_amount != amount: r._name = str(amount) r._commit() #update the cache for the amount = 0 and amount = None cases rel = cls.rels[(r._thing1.__class__, r._thing2.__class__)] for a in set((old_amount, amount, None)): # clear memoizing around this thing's author if not r._thing2._loaded: r._thing2._load() if hasattr(r._thing2, "author_id"): clear_memo('report._by_author', cls, r._thing2.author_id, amount=a) for t in (r._thing1, r._thing2): thing_key = cls._cache_prefix(rel, t.__class__, amount=a) + str(t._id) v = cache.get(thing_key) if v is not None: if a == old_amount and old_amount != amount and r._id in v: v.remove(r._id) elif r._id not in v: v.append(r._id) cache.set(thing_key, v)
def set_amount(cls, r, amount): old_amount = int(r._name) if old_amount != amount: r._name = str(amount) r._commit() #update the cache for the amount = 0 and amount = None cases rel = cls.rels[(r._thing1.__class__, r._thing2.__class__)] for a in set((old_amount, amount, None)): # clear memoizing around this thing's author if not r._thing2._loaded: r._thing2._load() if hasattr(r._thing2, "author_id"): clear_memo('report._by_author', cls, r._thing2.author_id, amount = a) for t in (r._thing1, r._thing2): thing_key = cls._cache_prefix(rel, t.__class__, amount = a) + str(t._id) v = cache.get(thing_key) if v is not None: if a == old_amount and old_amount != amount and r._id in v: v.remove(r._id) elif r._id not in v: v.append(r._id) cache.set(thing_key, v)
def get_image(iden): solution = cache.get(str(iden)) if not solution: solution = make_solution() cache.set(str(iden), solution, time=300) r = RandCaptcha(solution=solution) return r.render()
def __iter__(self): used_cache = False if self._stats_collector: self._stats_collector.add(self) lst = None if self._read_cache: names = cache.get(self._iden()) if names is not None: lst = Thing._by_fullname(names, data=self._data, return_dict=False) if lst is None: #hit the db lst = self._cursor().fetchall() else: used_cache = True if self._write_cache and not used_cache: names = tuple(i._fullname for i in lst) cache.set(self._iden(), names, self._cache_time) for i in lst: yield i
def get_image(iden): solution = cache.get(str(iden)) if not solution: solution = make_solution() cache.set(str(iden), solution, time = 300) r = RandCaptcha(solution=solution) return r.render()
def _other_self(self): """Load from the cached version of myself. Skip the local cache.""" l = cache.get(self._cache_key(), allow_local=False) if l and l._id != self._id: g.log.error("thing.py: Doppleganger on read: got %s for %s", (l, self)) cache.delete(self._cache_key()) return return l
def _other_self(self): """Load from the cached version of myself. Skip the local cache.""" l = cache.get(self._cache_key(), allow_local = False) if l and l._id != self._id: g.log.error("thing.py: Doppleganger on read: got %s for %s", (l, self)) cache.delete(self._cache_key()) return return l
def valid_solution(iden, solution): if (not iden or not solution or len(iden) != IDEN_LENGTH or len(solution) != SOL_LENGTH or solution.upper() != cache.get(str(iden))): solution = make_solution() cache.set(str(iden), solution, time=300) return False else: cache.delete(str(iden)) return True
def new_fn(*a, **kw): key = iden + str(a) + str(kw) #print 'CHECKING', key res = cache.get(key) if res is None: res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) if res == NoneResult: res = None return res
def new_fn(*a, **kw): key = iden + str(a) + str(kw) #print 'CHECKING', key res = cache.get(key) if res is None: res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time=time) if res == NoneResult: res = None return res
def run(self, key, name): if key: uid = cache.get(str(self.cache_prefix + "_" + key)) try: a = Account._byID(uid, data = True) except NotFound: return None if name and a.name.lower() != name.lower(): c.errors.add(errors.BAD_USERNAME) if a: return a c.errors.add(errors.EXPIRED)
def valid_solution(iden, solution): if (not iden or not solution or len(iden) != IDEN_LENGTH or len(solution) != SOL_LENGTH or solution.upper() != cache.get(str(iden))): solution = make_solution() cache.set(str(iden), solution, time = 300) return False else: cache.delete(str(iden)) return True
def run(self, key, name): if key: uid = cache.get(str(self.cache_prefix + "_" + key)) try: a = Account._byID(uid, data=True) except NotFound: return None if name and a.name.lower() != name.lower(): c.errors.add(errors.BAD_USERNAME) if a: return a c.errors.add(errors.EXPIRED)
def new_fn(*a, **kw): from r2.config import cache key = iden + hash(*a, **kw) # print 'CHECKING', key res = cache.get(key) if res is None: res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time=time) if res == NoneResult: res = None return res
def new_fn(*a, **kw): from r2.config import cache key = iden + hash(*a, **kw) #print 'CHECKING', key res = cache.get(key) if res is None: res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) if res == NoneResult: res = None return res
def __init__(self): Wrapped.__init__(self) cache_stats = cache.get('stats') if cache_stats: top_users, top_day, top_week = cache_stats #lookup user objs uids = [] uids.extend(u for u in top_users) uids.extend(u[0] for u in top_day) uids.extend(u[0] for u in top_week) users = Account._byID(uids, data = True) self.top_users = (users[u] for u in top_users) self.top_day = ((users[u[0]], u[1]) for u in top_day) self.top_week = ((users[u[0]], u[1]) for u in top_week) else: self.top_users = self.top_day = self.top_week = ()
def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = False if kw.has_key('_update'): update = kw['_update'] del kw['_update'] key = _make_key(iden, a, kw) #print 'CHECKING', key res = None if update else cache.get(key) if res is None: res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) if res == NoneResult: res = None return res
def run(self, key): if key: val = cache.get(str(self.cache_prefix + "_" + key)) if val: return val c.errors.add(errors.EXPIRED)
def is_top_link(sr, link): return cache.get(top_key(sr)) == link._fullname
def _other_self(self): """Load from the cached version of myself. Skip the local cache.""" return cache.get(self._cache_key(), local = False)