def add_message(message, update_recipient=True, update_modmail=True, add_to_user=None): with g.make_lock("message_tree", messages_lock_key(message.author_id)): add_message_nolock(message.author_id, message) if update_recipient and message.to_id and message.to_id != message.author_id: with g.make_lock("message_tree", messages_lock_key(message.to_id)): add_message_nolock(message.to_id, message) if update_modmail and message.sr_id: with g.make_lock("modmail_tree", sr_messages_lock_key(message.sr_id)): add_sr_message_nolock(message.sr_id, message) if add_to_user and add_to_user._id != message.to_id: with g.make_lock("message_tree", messages_lock_key(add_to_user._id)): add_message_nolock(add_to_user._id, message)
def process_message(msg): timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() vote_data = json.loads(msg.body) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def _incr(self, prop, amt=1): if self._dirty: raise ValueError, "cannot incr dirty thing" #make sure we're incr'ing an _int_prop or _data_int_prop. if prop not in self._int_props: if (prop in self._data_int_props or self._int_prop_suffix and prop.endswith(self._int_prop_suffix)): #if we're incr'ing a data_prop, make sure we're loaded if not self._loaded: self._load() else: msg = ( "cannot incr non int prop %r on %r -- it's not in %r or %r" % (prop, self, self._int_props, self._data_int_props)) raise ValueError, msg with g.make_lock("thing_commit", 'commit_' + self._fullname): self._sync_latest() old_val = getattr(self, prop) if self._defaults.has_key( prop) and self._defaults[prop] == old_val: #potential race condition if the same property gets incr'd #from default at the same time setattr(self, prop, old_val + amt) self._commit(prop) else: self.__setattr__(prop, old_val + amt, False) #db if prop.startswith('_'): tdb.incr_thing_prop(self._type_id, self._id, prop[1:], amt) else: self._incr_data(self._type_id, self._id, prop, amt) self._cache_myself()
def register(name, password, registration_ip): # get a lock for registering an Account with this name to prevent # simultaneous operations from creating multiple Accounts with the same name with g.make_lock("account_register", "register_%s" % name.lower()): try: account = Account._by_name(name) raise AccountExists except NotFound: account = Account( name=name, password=bcrypt_password(password), # new accounts keep the profanity filter settings until opting out pref_no_profanity=True, registration_ip=registration_ip, ) account._commit() if can_auto_optin_email(request, c): if feature.is_enabled("orangereds_as_emails", user=account): account.pref_email_messages = True account._commit() # update Account._by_name to pick up this new name->Account Account._by_name(name, _update=True) Account._by_name(name, allow_deleted=True, _update=True) return account
def register(name, password, registration_ip): # get a lock for registering an Account with this name to prevent # simultaneous operations from creating multiple Accounts with the same name with g.make_lock("account_register", "register_%s" % name.lower()): try: account = Account._by_name(name) raise AccountExists except NotFound: account = Account( name=name, password=bcrypt_password(password), # new accounts keep the profanity filter settings until opting out pref_no_profanity=True, registration_ip=registration_ip, # CUSTOM - Set chat defaults pref_chat_user=name, pref_chat_client_user=''.join(random.choice(string.ascii_letters+string.digits) for i in range(20)), pref_chat_client_password=''.join(random.choice(string.ascii_letters+string.digits) for i in range(20)), ) account._commit() # update Account._by_name to pick up this new name->Account Account._by_name(name, _update=True) Account._by_name(name, allow_deleted=True, _update=True) return account
def process_message(msg): timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() vote_data = json.loads(msg.body) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.error(e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def _deactivate_overdelivered(link, campaign): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): msg = '%s deactivating adzerk flight for %s - %s' g.log.info(msg % (datetime.datetime.now(g.tz), link, campaign)) az_flight = update_flight(link, campaign) PromotionLog.add(link, 'deactivated %s' % az_flight)
def register(name, password, registration_ip): # get a lock for registering an Account with this name to prevent # simultaneous operations from creating multiple Accounts with the same name with g.make_lock("account_register", "register_%s" % name.lower()): try: account = Account._by_name(name) raise AccountExists except NotFound: account = Account( name=name, password=bcrypt_password(password), # new accounts keep the profanity filter settings until opting out pref_no_profanity=True, registration_ip=registration_ip, ) account._commit() if can_auto_optin_email(request, c): if feature.is_enabled('orangereds_as_emails', user=account): account.pref_email_messages = True account._commit() # update Account._by_name to pick up this new name->Account Account._by_name(name, _update=True) Account._by_name(name, allow_deleted=True, _update=True) return account
def __iter__(self): if self._read_cache: things = self.get_from_cache() else: things = None if things is None and not self._write_cache: things = self._get_results() elif things is None: # it's not in the cache, and we have the power to # update it, which we should do in a lock to prevent # concurrent requests for the same data with g.make_lock("thing_query", "lock_%s" % self._iden()): # see if it was set while we were waiting for our # lock if self._read_cache: things = self.get_from_cache(allow_local=False) else: things = None if things is None: things = self._get_results() self.set_to_cache(things) for thing in things: yield thing
def new_fn(*a, **kw): #if the keyword param _update == True, the cache will be #overwritten no matter what update = kw.pop('_update', False) key = "memo:%s:%s" % (iden, make_key_id(*a, **kw)) res = None if update else g.memoizecache.get(key, stale=stale) if res is None: # not cached, we should calculate it. with g.make_lock("memoize", 'memoize_lock(%s)' % key, time=timeout, timeout=timeout): # see if it was completed while we were waiting # for the lock stored = None if update else g.memoizecache.get(key) if stored is not None: # it was calculated while we were waiting res = stored else: # okay now go and actually calculate it res = fn(*a, **kw) if res is None: res = NoneResult g.memoizecache.set(key, res, time=time) if res == NoneResult: res = None return res
def _incr(self, prop, amt = 1): if self._dirty: raise ValueError, "cannot incr dirty thing" #make sure we're incr'ing an _int_prop or _data_int_prop. if prop not in self._int_props: if (prop in self._data_int_props or self._int_prop_suffix and prop.endswith(self._int_prop_suffix)): #if we're incr'ing a data_prop, make sure we're loaded if not self._loaded: self._load() else: msg = ("cannot incr non int prop %r on %r -- it's not in %r or %r" % (prop, self, self._int_props, self._data_int_props)) raise ValueError, msg with g.make_lock("thing_commit", 'commit_' + self._fullname): self._sync_latest() old_val = getattr(self, prop) if self._defaults.has_key(prop) and self._defaults[prop] == old_val: #potential race condition if the same property gets incr'd #from default at the same time setattr(self, prop, old_val + amt) self._commit(prop) else: self.__setattr__(prop, old_val + amt, False) #db if prop.startswith('_'): tdb.incr_thing_prop(self._type_id, self._id, prop[1:], amt) else: self._incr_data(self._type_id, self._id, prop, amt) self._cache_myself()
def add_message(message, update_recipient=True, update_modmail=True, add_to_user=None): with g.make_lock("message_tree", messages_lock_key(message.author_id)): add_message_nolock(message.author_id, message) if (update_recipient and message.to_id and message.to_id != message.author_id): with g.make_lock("message_tree", messages_lock_key(message.to_id)): add_message_nolock(message.to_id, message) if update_modmail and message.sr_id: with g.make_lock("modmail_tree", sr_messages_lock_key(message.sr_id)): add_sr_message_nolock(message.sr_id, message) if add_to_user and add_to_user._id != message.to_id: with g.make_lock("message_tree", messages_lock_key(add_to_user._id)): add_message_nolock(add_to_user._id, message)
def _deactivate_overdelivered(link, campaign): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): msg = '%s deactivating adzerk flight for %s - %s' g.log.info(msg % (datetime.datetime.now(g.tz), link, campaign)) az_campaign = update_campaign(link) az_flight = update_flight(link, campaign, az_campaign) PromotionLog.add(link, 'deactivated %s' % az_flight)
def get_read_modify_write_lock(self): """Return the lock to be used when doing a read-modify-write. When modifying a Thing we must read its current version from cache and update that to avoid clobbering modifications made by other processes after we first read the Thing. """ return g.make_lock("thing_commit", 'commit_' + self._fullname)
def _deactivate_orphaned_flight(flight_id): with g.make_lock('adzerk_update', 'adzerk-%d' % flight_id): g.log.info('deactivating orphaned flight %d' % flight_id) az_flight = adzerk_api.Flight.get(flight_id) if not az_flight: return az_flight.IsActive = False az_flight._send()
def process_message(msg): vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("link_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) link = Link._by_fullname(vote_data.pop("thing_fullname")) # create the vote and update the voter's liked/disliked under lock so # that the vote state and cached query are consistent lock_key = "vote-%s-%s" % (user._id36, link._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, link, vote_data) try: vote = Vote( user, link, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), # CUSTOM: voting model vote_direction=vote_data["vote_direction"], ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") update_user_liked(vote) timer.intermediate("voter_likes") vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score link_valid = not (link._spam or link._deleted) if vote_valid and link_valid: add_to_author_query_q(link) add_to_subreddit_query_q(link) add_to_domain_query_q(link) timer.stop() timer.flush()
def process_message(msg): vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("link_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) link = Link._by_fullname(vote_data.pop("thing_fullname")) # create the vote and update the voter's liked/disliked under lock so # that the vote state and cached query are consistent lock_key = "vote-%s-%s" % (user._id36, link._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, link, vote_data) try: vote = Vote( user, link, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") update_user_liked(vote) timer.intermediate("voter_likes") vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score link_valid = not (link._spam or link._deleted) if vote_valid and link_valid: add_to_author_query_q(link) add_to_subreddit_query_q(link) add_to_domain_query_q(link) timer.stop() timer.flush()
def process_message(msg): # msg is *PROBABLY* json timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() # json being loaded into a python object # it has the fields "user_id", "thing_fullname" # a thing is a database object # it's a link, comment, post, whatever, everything can be upvoted/downvoted vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return # this gets the user from database/cache (either memcached or postgres, whatever) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") # this gets a servers-wide lock # I mean, a bunch of consumers might be consuming items that use the same "thing" (same database object) # so, you want a global lock to avoid them from f*****g eachother up # memcachd stores the lock, atomically lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def claim(cls, user, uid, award, description, url): with g.make_lock("claim_award", str("%s_%s" % (user.name, uid))): existing_trophy_id = user.get_trophy_id(uid) if existing_trophy_id: trophy = cls._byID(existing_trophy_id) preexisting = True else: preexisting = False trophy = cls._new(user, award, description=description, url=url) user.set_trophy_id(uid, trophy._id) user._commit() return trophy, preexisting
def process_message(msg): timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return # if it's an old-style vote, convert to the new format if "uid" in vote_data: vote_data = convert_old_vote_data(vote_data, msg.timestamp) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def create(cls, sr, name): if not name or not sr: raise ValueError name = name.lower() _id = wiki_id(sr._id36, name) lock_key = "wiki_create_%s:%s" % (sr._id36, name) with g.make_lock("wiki", lock_key): try: cls._byID(_id) except tdb_cassandra.NotFound: pass else: raise WikiPageExists page = cls(_id=_id, sr=sr._id36, name=name, permlevel=0, content='') page._commit() return page
def _update_adzerk(link, campaign, triggered_by): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): msg = '%s updating/creating adzerk objects for %s - %s' g.log.info(msg % (datetime.datetime.now(g.tz), link, campaign)) existing_promo = hasattr(link, "external_campaign_id") if not existing_promo or campaign is None: author = Account._byID(link.author_id, data=True) az_advertiser = update_advertiser(author, triggered_by) update_creative(link, az_advertiser, triggered_by) if not promote.is_external(link): update_campaign(link, az_advertiser, triggered_by) if campaign: update_flight(link, campaign, triggered_by) update_cfmap(link, campaign, triggered_by)
def _update_adzerk(link, campaign): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): msg = '%s updating/creating adzerk objects for %s - %s' g.log.info(msg % (datetime.datetime.now(g.tz), link, campaign)) author = Account._byID(link.author_id, data=True) az_advertiser = update_advertiser(author) az_campaign = update_campaign(link, az_advertiser) az_creative = update_creative(link, az_advertiser) if campaign: az_flight = update_flight(link, campaign, az_campaign) if getattr(campaign, 'external_cfmap_id', None) is not None: az_cfmap = adzerk_api.CreativeFlightMap.get(az_flight.Id, campaign.external_cfmap_id) else: az_cfmap = create_cfmap(link, campaign, az_campaign, az_creative, az_flight) PromotionLog.add(link, 'updated %s' % az_flight) else: PromotionLog.add(link, 'updated %s' % az_campaign)
def _update_adzerk(link, campaign): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): msg = '%s updating/creating adzerk objects for %s - %s' g.log.info(msg % (datetime.datetime.now(g.tz), link, campaign)) author = Account._byID(link.author_id, data=True) az_advertiser = update_advertiser(author) az_campaign = update_campaign(link, az_advertiser) az_creative = update_creative(link, az_advertiser) if campaign: az_flight = update_flight(link, campaign, az_campaign) if getattr(campaign, 'external_cfmap_id', None) is not None: az_cfmap = adzerk_api.CreativeFlightMap.get( az_flight.Id, campaign.external_cfmap_id) else: az_cfmap = create_cfmap(link, campaign, az_campaign, az_creative, az_flight) PromotionLog.add(link, 'updated %s' % az_flight) else: PromotionLog.add(link, 'updated %s' % az_campaign)
def __iter__(self): used_cache = False def _retrieve(): return self._cursor().fetchall() names = lst = [] names = self._cache.get(self._iden()) if self._read_cache else None if names is None and not self._write_cache: # it wasn't in the cache, and we're not going to # replace it, so just hit the db lst = _retrieve() elif names is None and self._write_cache: # it's not in the cache, and we have the power to # update it, which we should do in a lock to prevent # concurrent requests for the same data with g.make_lock("thing_query", "lock_%s" % self._iden()): # see if it was set while we were waiting for our # lock if self._read_cache: names = self._cache.get(self._iden(), allow_local=False) else: names = None if names is None: lst = _retrieve() _names = [x._fullname for x in lst] self._cache.set(self._iden(), _names, self._cache_time) if names and not lst: # we got our list of names from the cache, so we need to # turn them back into Things lst = Thing._by_fullname(names, data=self._data, return_dict=False, stale=self._stale) for item in lst: yield item
def register(name, password, registration_ip): # get a lock for registering an Account with this name to prevent # simultaneous operations from creating multiple Accounts with the same name with g.make_lock("account_register", "register_%s" % name.lower()): try: account = Account._by_name(name) raise AccountExists except NotFound: account = Account( name=name, password=bcrypt_password(password), # new accounts keep the profanity filter settings until opting out pref_no_profanity=True, registration_ip=registration_ip, ) account._commit() # update Account._by_name to pick up this new name->Account Account._by_name(name, _update=True) Account._by_name(name, allow_deleted=True, _update=True) return account
def process_waitinglist(msg): user_id36 = msg.body user = Account._byID36(user_id36, data=True, stale=True) if RobinRoom.get_room_for_user(user): print "%s already in room" % user.name return with g.make_lock("robin_room", "global"): current_room_id = g.gencache.get("current_robin_room") if not current_room_id: current_room = make_new_room() else: try: current_room = RobinRoom._byID(current_room_id) except tdb_cassandra.NotFoundException: current_room_id = None current_room = make_new_room() if not current_room.is_alive or current_room.is_continued: current_room_id = None current_room = make_new_room() current_room.add_participants([user]) print "added %s to %s" % (user.name, current_room.id) if current_room_id: g.gencache.delete("current_robin_room") current_room.persist_computed_name() websockets.send_broadcast( namespace="/robin/" + current_room.id, type="updated_name", payload={ "room_name": current_room.name, }, ) else: g.gencache.set("current_robin_room", current_room.id)
def process_waitinglist(msg): user_id36 = msg.body user = Account._byID36(user_id36, data=True, stale=True) if RobinRoom.get_room_for_user(user): print "%s already in room" % user.name return with g.make_lock("robin_room", "global"): current_room_id = g.cache.get("current_robin_room") if not current_room_id: current_room = make_new_room() else: try: current_room = RobinRoom._byID(current_room_id) except tdb_cassandra.NotFoundException: current_room_id = None current_room = make_new_room() if not current_room.is_alive or current_room.is_continued: current_room_id = None current_room = make_new_room() current_room.add_participants([user]) print "added %s to %s" % (user.name, current_room.id) if current_room_id: g.cache.delete("current_robin_room") current_room.persist_computed_name() websockets.send_broadcast( namespace="/robin/" + current_room.id, type="updated_name", payload={ "room_name": current_room.name, }, ) else: g.cache.set("current_robin_room", current_room.id)
def cverbifys_lock(user): return g.make_lock("sodium_cverbifys", "cverbifys_%s" % user._id)
def _commit(self, keys=None): lock = None try: if not self._created: begin() self._create() just_created = True self.record_cache_write(event="create") else: just_created = False lock = g.make_lock("thing_commit", 'commit_' + self._fullname) lock.acquire() if not just_created and not self._sync_latest(): #sync'd and we have nothing to do now, but we still cache anyway self._cache_myself() return if not just_created: self.record_cache_write(event="modify") # begin is a no-op if already done, but in the not-just-created # case we need to do this here because the else block is not # executed when the try block is exited prematurely in any way # (including the return in the above branch) begin() to_set = self._dirties.copy() if keys: keys = tup(keys) for key in to_set.keys(): if key not in keys: del to_set[key] data_props = {} thing_props = {} for k, (old_value, new_value) in to_set.iteritems(): if k.startswith('_'): thing_props[k[1:]] = new_value else: data_props[k] = new_value if data_props: self._set_data(self._type_id, self._id, just_created, **data_props) if thing_props: self._set_props(self._type_id, self._id, **thing_props) if keys: for k in keys: if self._dirties.has_key(k): del self._dirties[k] else: self._dirties.clear() except: rollback() raise else: commit() self._cache_myself() finally: if lock: lock.release() hooks.get_hook("thing.commit").call(thing=self, changes=to_set)
def _commit(self, keys=None): lock = None try: if not self._created: begin() self._create() just_created = True else: just_created = False lock = g.make_lock("thing_commit", 'commit_' + self._fullname) lock.acquire() if not just_created and not self._sync_latest(): #sync'd and we have nothing to do now, but we still cache anyway self._cache_myself() return # begin is a no-op if already done, but in the not-just-created # case we need to do this here because the else block is not # executed when the try block is exited prematurely in any way # (including the return in the above branch) begin() to_set = self._dirties.copy() if keys: keys = tup(keys) for key in to_set.keys(): if key not in keys: del to_set[key] data_props = {} thing_props = {} for k, (old_value, new_value) in to_set.iteritems(): if k.startswith('_'): thing_props[k[1:]] = new_value else: data_props[k] = new_value if data_props: self._set_data(self._type_id, self._id, just_created, **data_props) if thing_props: self._set_props(self._type_id, self._id, **thing_props) if keys: for k in keys: if self._dirties.has_key(k): del self._dirties[k] else: self._dirties.clear() except: rollback() raise else: commit() self._cache_myself() finally: if lock: lock.release() hooks.get_hook("thing.commit").call(thing=self, changes=to_set)
def _mutation_context(cls, link): """Return a lock for use during read-modify-write operations""" key = 'comment_lock_' + str(link._id) return g.make_lock("comment_tree", key)
def creddits_lock(user): return g.make_lock("gold_creddits", "creddits_%s" % user._id)
def _commit(self): lock = None try: if not self._created: begin() # write the props to the thing table and get back the id base_props = (getattr(self, prop) for prop in self._base_props) self._id = self._make_fn(self._type_id, *base_props) self._created = True just_created = True self.record_cache_write(event="create") else: just_created = False lock = g.make_lock("thing_commit", 'commit_' + self._fullname) lock.acquire() if not just_created and not self._sync_latest(): #sync'd and we have nothing to do now, but we still cache anyway self._cache_myself() return if not just_created: self.record_cache_write(event="modify") # begin is a no-op if already done, but in the not-just-created # case we need to do this here because the else block is not # executed when the try block is exited prematurely in any way # (including the return in the above branch) begin() to_set = self._dirties.copy() data_props = {} thing_props = {} for k, (old_value, new_value) in to_set.iteritems(): if k.startswith('_'): thing_props[k[1:]] = new_value else: data_props[k] = new_value if data_props: self._set_data(self._type_id, self._id, just_created, **data_props) if thing_props and not just_created: self._set_props(self._type_id, self._id, **thing_props) self._dirties.clear() except: rollback() raise else: commit() self._cache_myself() finally: if lock: lock.release() hooks.get_hook("thing.commit").call(thing=self, changes=to_set)
def mutation_context(cls, link, timeout=None): return g.make_lock("comment_tree", cls._lock_key(link._id), timeout=timeout)