def set_amount(cls, r, amount): old_amount = int(r._name) if old_amount != amount: r._name = str(amount) r._commit() #update the cache for the amount = 0 and amount = None cases rel = cls.rels[(r._thing1.__class__, r._thing2.__class__)] for a in set((old_amount, amount, None)): # clear memoizing around this thing's author if not r._thing2._loaded: r._thing2._load() if hasattr(r._thing2, "author_id"): clear_memo('report._by_author', cls, r._thing2.author_id, amount = a) for t in (r._thing1, r._thing2): thing_key = cls._cache_prefix(rel, t.__class__, amount = a) + str(t._id) v = cache.get(thing_key) if v is not None: if a == old_amount and old_amount != amount and r._id in v: v.remove(r._id) elif r._id not in v: v.append(r._id) cache.set(thing_key, v)
def password_email(user): key = passhash(random.randint(0, 1000), user.email) passlink = 'http://' + g.domain + '/resetpassword/' + key cache.set("reset_%s" %key, user._id, time=1800) simple_email(user.email, '*****@*****.**', '%s password reset' % g.front_page_title, PasswordReset(user=user, passlink=passlink).render(style='email'))
def password_email(user): key = passhash(random.randint(0, 1000), user.email) passlink = 'http://' + c.domain + '/resetpassword/' + key cache.set("reset_%s" %key, user._id, time=1800) simple_email(user.email, '*****@*****.**', 'reddit.com password reset', Password_Reset(user=user, passlink=passlink).render())
def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = kw.pop('_update', False) key = make_key(iden, *a, **kw) res = None if update else cache.get(key, stale=stale) if res is None: # not cached, we should calculate it. with make_lock('memoize_lock(%s)' % key): # see if it was completed while we were waiting # for the lock stored = None if update else cache.get(key) if stored is not None: # it was calculated while we were waiting res = stored else: # okay now go and actually calculate it res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) if res == NoneResult: res = None return res
def _commit(self, keys=None): if not self._created: self._create() if self._dirty: if keys: keys = tup(keys) to_set = dict((k, self._dirties[k]) for k in keys if self._dirties.has_key(k)) else: to_set = self._dirties data_props = {} thing_props = {} for k, v in to_set.iteritems(): if k.startswith('_'): thing_props[k[1:]] = v else: data_props[k] = v if data_props: self._set_data(self._type_id, self._id, **data_props) if thing_props: self._set_props(self._type_id, self._id, **thing_props) if keys: for k in keys: if self._dirties.has_key(k): del self._dirties[k] else: self._dirties.clear() # always set the cache cache.set(thing_prefix(self.__class__.__name__, self._id), self)
def password_email(user): key = passhash(random.randint(0, 1000), user.email) passlink = 'http://' + g.domain + '/resetpassword/' + key cache.set("reset_%s" %key, user._id, time=1800) simple_email(user.email, '*****@*****.**', 'lesswrong.com password reset', PasswordReset(user=user, passlink=passlink).render(style='email'))
def _incr(self, prop, amt = 1): if self._dirty: raise ValueError, "cannot incr dirty thing" prefix = thing_prefix(self.__class__.__name__) key = prefix + prop + '_' + str(self._id) cache_val = old_val = cache.get(key) if old_val is None: old_val = getattr(self, prop) if self._defaults.has_key(prop) and self._defaults[prop] == old_val: #potential race condition if the same property gets incr'd #from default at the same time setattr(self, prop, old_val + amt) self._commit(prop) else: self.__setattr__(prop, old_val + amt, False) #db if prop.startswith('_'): tdb.incr_thing_prop(self._type_id, self._id, prop[1:], amt) else: self._incr_data(self._type_id, self._id, prop, amt) cache.set(prefix + str(self._id), self) #cache if cache_val: cache.incr(key, amt) else: cache.set(key, getattr(self, prop))
def cached_query(query, sr): """Returns the results from running query. The results are cached and only recomputed after 'expire_delta'""" query._limit = 150 query._write_cache = True iden = query._iden() read_cache = True #if query is in the cache, the expire flag is true, and the access #time is old, set read_cache = False if cache.get(iden) is not None: if cache.get(expire_key(sr)): access_time = cache.get(access_key(sr)) if not access_time or datetime.now() > access_time + expire_delta: cache.delete(expire_key(sr)) read_cache = False #if the query isn't in the cache, set read_cache to false so we #record the access time else: read_cache = False #set access time to the last time the query was actually run (now) if not read_cache: cache.set(access_key(sr), datetime.now()) query._read_cache = read_cache res = list(query) return res
def __iter__(self): used_cache = False if self._stats_collector: self._stats_collector.add(self) lst = None if self._read_cache: names = cache.get(self._iden()) if names: lst = Thing._by_fullname(names, data = self._data, return_dict = False) if lst is None: #hit the db try: lst = self._cursor().fetchall() except NotFound: lst = () else: used_cache = True if self._write_cache and not used_cache: names = tuple(i._fullname for i in lst) cache.set(self._iden(), names, self._cache_time) for i in lst: yield i
def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = False if kw.has_key('_update'): update = kw['_update'] del kw['_update'] key = make_key(iden, *a, **kw) res = None if update else cache.get(key) if res is None: # not cached, we should calculate it. with make_lock('memoize_lock(%s)' % key): stored = None if update else cache.get(key) if stored is None: # okay now go and actually calculate it res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) else: # it was calculated while we were waiting on # the lock res = stored if res == NoneResult: res = None return res
def __iter__(self): used_cache = False def _retrieve(): return self._cursor().fetchall() names = lst = [] names = cache.get(self._iden()) if self._read_cache else None if names is None and not self._write_cache: # it wasn't in the cache, and we're not going to # replace it, so just hit the db lst = _retrieve() elif names is None and self._write_cache: # it's not in the cache, and we have the power to # update it, which we should do in a lock to prevent # concurrent requests for the same data with g.make_lock("thing_query", "lock_%s" % self._iden()): # see if it was set while we were waiting for our # lock names = cache.get(self._iden(), allow_local = False) \ if self._read_cache else None if names is None: lst = _retrieve() cache.set(self._iden(), [ x._fullname for x in lst ], self._cache_time) if names and not lst: # we got our list of names from the cache, so we need to # turn them back into Things lst = Thing._by_fullname(names, data = self._data, return_dict = False) for item in lst: yield item
def get_hot(sr): q = Link._query(Link.c.sr_id == sr._id, sort = desc('_hot'), write_cache = True, limit = 150) iden = q._iden() read_cache = True #if query is in the cache, the expire flag is true, and the access #time is old, set read_cache = False if cache.get(iden) is not None: if cache.get(expire_key(sr)): access_time = cache.get(access_key(sr)) if not access_time or datetime.now() > access_time + expire_delta: cache.delete(expire_key(sr)) read_cache = False #if the query isn't in the cache, set read_cache to false so we #record the access time else: read_cache = False if not read_cache: cache.set(access_key(sr), datetime.now()) q._read_cache = read_cache res = list(q) #set the #1 link so we can ignore it later. expire after TOP_CACHE #just in case something happens and that sr doesn't update if res: cache.set(top_key(sr), res[0]._fullname, TOP_CACHE) return res
def get_image(iden): solution = cache.get(str(iden)) if not solution: solution = make_solution() cache.set(str(iden), solution, time = 300) r = RandCaptcha(solution=solution) return r.render()
def password_email(user): key = passhash(random.randint(0, 1000), user.email) passlink = 'http://' + g.domain + '/resetpassword/' + key print "Generated password reset link: " + passlink cache.set("reset_%s" %key, user._id, time=1800) simple_email(user.email, '*****@*****.**', 'reddit.com password reset', PasswordReset(user=user, passlink=passlink).render(style='email'))
def _commit(self): DataThing._commit(self) #if i denormalized i need to check here if denorm1: self._thing1._commit(denorm1[0]) if denorm2: self._thing2._commit(denorm2[0]) #set fast query cache cache.set(thing_prefix(self.__class__.__name__) + str((self._thing1_id, self._thing2_id, self._name)), self._id)
def password_email(user): key = passhash(random.randint(0, 1000), user.email) passlink = "http://" + g.domain + "/resetpassword/" + key cache.set("reset_%s" % key, user._id, time=1800) simple_email( user.email, "*****@*****.**", "%s password reset" % g.front_page_title, PasswordReset(user=user, passlink=passlink).render(style="email"), )
def _delete(self): tdb.del_rel(self._type_id, self._id) #clear cache prefix = thing_prefix(self.__class__.__name__) #TODO - there should be just one cache key for a rel? cache.delete(prefix + str(self._id)) #update fast query cache cache.set(prefix + str((self._thing1_id, self._thing2_id, self._name)), None)
def valid_solution(iden, solution): if (not iden or not solution or len(iden) != IDEN_LENGTH or len(solution) != SOL_LENGTH or solution.upper() != cache.get(str(iden))): solution = make_solution() cache.set(str(iden), solution, time = 300) return False else: cache.delete(str(iden)) return True
def _delete(self): tdb.del_rel(self._type_id, self._id) # clear cache prefix = thing_prefix(self.__class__.__name__) # TODO - there should be just one cache key for a rel? cache.delete(prefix + str(self._id)) # update fast query cache cache.set(prefix + str((self._thing1_id, self._thing2_id, self._name)), None) # temporarily set this property so the rest of this request # know it's deleted. save -> unsave, hide -> unhide self._name = "un" + self._name
def new_fn(*a, **kw): key = iden + str(a) + str(kw) #print 'CHECKING', key res = cache.get(key) if res is None: res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) if res == NoneResult: res = None return res
def new_fn(*a, **kw): from r2.config import cache key = iden + hash(*a, **kw) # print 'CHECKING', key res = cache.get(key) if res is None: res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time=time) if res == NoneResult: res = None return res
def incr_descendant_karma(self, comments, amount): old_val = getattr(self, '_descendant_karma') comments.append(self._id) if hasattr(self, 'parent_id') and self.parent_id: Comment._byID(self.parent_id).incr_descendant_karma(comments, amount) else: from r2.lib.db import tdb_sql as tdb tdb.incr_things_prop(self._type_id, comments, 'descendant_karma', amount) self.__setattr__('_descendant_karma', old_val + amount, False) prefix = self.__class__.__name__ + '_' cache.set(prefix + str(self._id), self)
def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = False if kw.has_key('_update'): update = kw['_update'] del kw['_update'] key = _make_key(iden, a, kw) #print 'CHECKING', key res = None if update else cache.get(key) if res is None: res = fn(*a, **kw) if res is None: res = NoneResult cache.set(key, res, time = time) if res == NoneResult: res = None return res
def __iter__(self): used_cache = False if self._stats_collector: self._stats_collector.add(self) def _retrieve(): return self._cursor().fetchall() names = lst = [] names = cache.get(self._iden()) if self._read_cache else None if names is None and not self._write_cache: # it wasn't in the cache, and we're not going to # replace it, so just hit the db lst = _retrieve() elif names is None and self._write_cache: # it's not in the cache, and we have the power to # update it, which we should do in a lock to prevent # concurrent requests for the same data with g.make_lock("lock_%s" % self._iden()): # see if it was set while we were waiting for our # lock names = cache.get(self._iden(), allow_local = False) \ if self._read_cache else None if names is None: lst = _retrieve() cache.set(self._iden(), [ x._fullname for x in lst ], self._cache_time) if names and not lst: # we got our list of names from the cache, so we need to # turn them back into Things lst = Thing._by_fullname(names, data = self._data, return_dict = False) for item in lst: yield item
def __iter__(self): used_cache = False if self._stats_collector: self._stats_collector.add(self) lst = None if self._read_cache: names = cache.get(self._iden()) if names is not None: lst = Thing._by_fullname(names, data = self._data, return_dict = False) if lst is None: #hit the db lst = self._cursor().fetchall() else: used_cache = True if self._write_cache and not used_cache: names = tuple(i._fullname for i in lst) cache.set(self._iden(), names, self._cache_time) for i in lst: yield i
def _cache_myself(self): cache.set(self._cache_key(), self)
def _cache_myself(self): ck = self._cache_key() cache.set(ck, self)
def _cache_myself(self): ck = self._cache_key() if self.__class__.__name__ in ("Link", "Comment", "Subreddit") and not self._t: log_text ("{} cache", "About to cache {} for %r" % ck, "warning") cache.set(ck, self)
def _cache_myself(self): ck = self._cache_key() if self.__class__.__name__ in ("Link", "Comment", "Subreddit") and not self._t and not self._deleted: raise ValueError("Refusing to cache {} for %r" % ck) cache.set(ck, self)
def expire_hot(sr): """Called when a subreddit should be recomputed: after a vote (hence, submit) or deletion.""" cache.set(expire_key(sr), True)
def expire_hot(sr): cache.set(expire_key(sr), True)