def add_message(message): # add the message to the author's list and the recipient with g.make_lock(messages_lock_key(message.author_id)): add_message_nolock(message.author_id, message) if message.to_id: with g.make_lock(messages_lock_key(message.to_id)): add_message_nolock(message.to_id, message) if message.sr_id: with g.make_lock(sr_messages_lock_key(message.sr_id)): add_sr_message_nolock(message.sr_id, message)
def add_message(message, update_recipient=True, update_modmail=True): # add the message to the author's list and the recipient with g.make_lock("message_tree", messages_lock_key(message.author_id)): add_message_nolock(message.author_id, message) if update_recipient and message.to_id: with g.make_lock("message_tree", messages_lock_key(message.to_id)): add_message_nolock(message.to_id, message) if update_modmail and message.sr_id: with g.make_lock("modmail_tree", sr_messages_lock_key(message.sr_id)): add_sr_message_nolock(message.sr_id, message)
def add_message(message): # add the message to the author's list and the recipient with g.make_lock("message_tree", messages_lock_key(message.author_id)): add_message_nolock(message.author_id, message) if message.to_id: with g.make_lock("message_tree", messages_lock_key(message.to_id)): add_message_nolock(message.to_id, message) # Messages to a subreddit should end in its inbox. Messages # FROM a subreddit (currently, just ban messages) should NOT if message.sr_id and not message.from_sr: with g.make_lock("modmail_tree", sr_messages_lock_key(message.sr_id)): add_sr_message_nolock(message.sr_id, message)
def add_message(message, update_recipient=True, update_modmail=True, add_to_user=None): with g.make_lock("message_tree", messages_lock_key(message.author_id)): add_message_nolock(message.author_id, message) if update_recipient and message.to_id: with g.make_lock("message_tree", messages_lock_key(message.to_id)): add_message_nolock(message.to_id, message) if update_modmail and message.sr_id: with g.make_lock("modmail_tree", sr_messages_lock_key(message.sr_id)): add_sr_message_nolock(message.sr_id, message) if add_to_user and add_to_user._id != message.to_id: with g.make_lock("message_tree", messages_lock_key(add_to_user._id)): add_message_nolock(add_to_user._id, message)
def link_comments(link_id, _update=False): key = comments_key(link_id) r = g.permacache.get(key) if r and not _update: return r else: # This operation can take longer than most (note the inner # locks) better to time out request temporarily than to deal # with an inconsistent tree with g.make_lock(lock_key(link_id), timeout=180): r = _load_link_comments(link_id) # rebuild parent dict cids, cid_tree, depth, num_children = r g.permacache.set(parent_comments_key(link_id), _parent_dict_from_tree(cid_tree)) # rebuild the sorts for sort in ("_controversy","_date","_hot","_confidence","_score"): g.permacache.set(sort_comments_key(link_id, sort), _comment_sorter_from_cids(cids, sort)) g.permacache.set(key, r) return r
def __iter__(self): used_cache = False def _retrieve(): return self._cursor().fetchall() names = lst = [] names = cache.get(self._iden()) if self._read_cache else None if names is None and not self._write_cache: # it wasn't in the cache, and we're not going to # replace it, so just hit the db lst = _retrieve() elif names is None and self._write_cache: # it's not in the cache, and we have the power to # update it, which we should do in a lock to prevent # concurrent requests for the same data with g.make_lock("thing_query", "lock_%s" % self._iden()): # see if it was set while we were waiting for our # lock names = cache.get(self._iden(), allow_local = False) \ if self._read_cache else None if names is None: lst = _retrieve() cache.set(self._iden(), [ x._fullname for x in lst ], self._cache_time) if names and not lst: # we got our list of names from the cache, so we need to # turn them back into Things lst = Thing._by_fullname(names, data = self._data, return_dict = False) for item in lst: yield item
def _incr(self, prop, amt = 1): if self._dirty: raise ValueError, "cannot incr dirty thing" #make sure we're incr'ing an _int_prop or _data_int_prop. if prop not in self._int_props: if (prop in self._data_int_props or self._int_prop_suffix and prop.endswith(self._int_prop_suffix)): #if we're incr'ing a data_prop, make sure we're loaded if not self._loaded: self._load() else: msg = ("cannot incr non int prop %r on %r -- it's not in %r or %r" % (prop, self, self._int_props, self._data_int_props)) raise ValueError, msg with g.make_lock("thing_commit", 'commit_' + self._fullname): self._sync_latest() old_val = getattr(self, prop) if self._defaults.has_key(prop) and self._defaults[prop] == old_val: #potential race condition if the same property gets incr'd #from default at the same time setattr(self, prop, old_val + amt) self._commit(prop) else: self.__setattr__(prop, old_val + amt, False) #db if prop.startswith('_'): tdb.incr_thing_prop(self._type_id, self._id, prop[1:], amt) else: self._incr_data(self._type_id, self._id, prop, amt) self._cache_myself()
def record_violation(self, category, seconds = None, growfast=False): if seconds is None: seconds = g.RATELIMIT*60 key = "VDelay-%s-%s" % (category, request.ip) prev_violations = g.memcache.get(key) if prev_violations is None: prev_violations = dict(count=0) num_violations = prev_violations["count"] if growfast: multiplier = 3 ** num_violations else: multiplier = 1 max_duration = 8 * 3600 duration = min(seconds * multiplier, max_duration) expire_time = (datetime.now(g.tz) + timedelta(seconds = duration)) prev_violations["expire_time"] = expire_time prev_violations["duration"] = duration prev_violations["count"] += 1 with g.make_lock("lock-" + key, timeout=5, verbose=False): existing = g.memcache.get(key) if existing and existing["count"] > prev_violations["count"]: g.log.warning("Tried to set %s to count=%d, but found existing=%d" % (key, prev_violations["count"], existing["count"])) else: g.cache.set(key, prev_violations, max_duration)
def _new(cls, name, title, author_id, ip, lang=g.lang, type='public', over_18=False, **kw): with g.make_lock("create_sr", 'create_sr_' + name.lower()): try: sr = Subreddit._by_name(name) raise SubredditExists except NotFound: if "allow_top" not in kw: kw['allow_top'] = True sr = Subreddit(name=name, title=title, lang=lang, type=type, over_18=over_18, author_id=author_id, ip=ip, **kw) sr._commit() #clear cache Subreddit._by_name(name, _update=True) return sr
def new_campaign(link, dates, bid, sr): # empty string for sr_name means target to all sr_name = sr.name if sr else "" # dual-write campaigns as data Things campaign = PromoCampaign._new(link, sr_name, bid, dates[0], dates[1]) # note indx in link.campaigns is the Thing id now indx = campaign._id with g.make_lock(campaign_lock(link)): # get a copy of the attr so that it'll be # marked as dirty on the next write. campaigns = getattr(link, "campaigns", {}).copy() # add the campaign campaigns[indx] = list(dates) + [bid, sr_name, 0] PromotionWeights.add(link, indx, sr_name, dates[0], dates[1], bid) link.campaigns = {} link.campaigns = campaigns promotion_log(link, "campaign %s created" % campaign._id) link._commit() author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, indx, c.user) return indx
def link_comments(link_id, _update=False): key = comments_key(link_id) r = g.permacache.get(key) if r and not _update: return r else: # This operation can take longer than most (note the inner # locks) better to time out request temporarily than to deal # with an inconsistent tree with g.make_lock(lock_key(link_id), timeout=180): r = _load_link_comments(link_id) # rebuild parent dict cids, cid_tree, depth, num_children, num_comments = r r = r[: -1] # Remove num_comments from r; we don't need to cache it. g.permacache.set(parent_comments_key(link_id), _parent_dict_from_tree(cid_tree)) g.permacache.set(key, r) # update the link's comment count and schedule it for search # reindexing link = Link._byID(link_id, data=True) link.num_comments = num_comments link._commit() from r2.lib.db.queries import changed changed(link) return r
def link_comments(link_id, _update=False): key = comments_key(link_id) r = g.permacache.get(key) if r and not _update: return r else: # This operation can take longer than most (note the inner # locks) better to time out request temporarily than to deal # with an inconsistent tree with g.make_lock(lock_key(link_id), timeout=180): r = _load_link_comments(link_id) # rebuild parent dict cids, cid_tree, depth, num_children, num_comments = r r = r[:-1] # Remove num_comments from r; we don't need to cache it. g.permacache.set(parent_comments_key(link_id), _parent_dict_from_tree(cid_tree)) g.permacache.set(key, r) # update the link's comment count and schedule it for search # reindexing link = Link._byID(link_id, data = True) link.num_comments = num_comments link._commit() from r2.lib.db.queries import changed changed(link) return r
def edit_campaign(link, index, dates, bid, sr): sr_name = sr.name if sr else "" with g.make_lock(campaign_lock(link)): campaigns = getattr(link, "campaigns", {}).copy() if index in campaigns: trans_id = campaigns[index][CAMPAIGN.trans_id] prev_bid = campaigns[index][CAMPAIGN.bid] # store the name not the reddit campaigns[index] = list(dates) + [bid, sr_name, trans_id] PromotionWeights.reschedule(link, index, sr_name, dates[0], dates[1], bid) link.campaigns = {} link.campaigns = campaigns promotion_log(link, "updated campaign %s. (bid: %0.2f)" % (index, bid)) link._commit() #TODO cancel any existing charges if the bid has changed if prev_bid != bid: void_campaign(link, index, c.user) # dual-write update to campaign Thing if it exists try: campaign = PromoCampaign._byID(index) campaign.set_bid(sr_name, bid, dates[0], dates[1]) campaign._commit() except NotFound: g.log.debug( "Skipping update of non-existent PromoCampaign [link:%d, index:%d]" % (link._id, index)) author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, index, c.user)
def _deactivate_campaign(link, campaign): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): g.log.debug('running deactivate_campaign %s' % link) az_flight = update_flight(link, campaign) az_flight.IsActive = False az_flight._send() PromotionLog.add(link, 'deactivated %s' % az_flight)
def edit_campaign(link, index, dates, bid, sr): sr_name = sr.name if sr else "" with g.make_lock(campaign_lock(link)): campaigns = getattr(link, "campaigns", {}).copy() if index in campaigns: trans_id = campaigns[index][CAMPAIGN.trans_id] prev_bid = campaigns[index][CAMPAIGN.bid] # store the name not the reddit campaigns[index] = list(dates) + [bid, sr_name, trans_id] PromotionWeights.reschedule(link, index, sr_name, dates[0], dates[1], bid) link.campaigns = {} link.campaigns = campaigns promotion_log(link, "updated campaign %s. (bid: %0.2f)" % (index, bid)) link._commit() #TODO cancel any existing charges if the bid has changed if prev_bid != bid: void_campaign(link, index, c.user) # dual-write update to campaign Thing if it exists try: campaign = PromoCampaign._byID(index) campaign.set_bid(sr_name, bid, dates[0], dates[1]) campaign._commit() except NotFound: g.log.debug("Skipping update of non-existent PromoCampaign [link:%d, index:%d]" % (link._id, index)) author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, index, c.user)
def process_job(self, job): if job.run_at is not None and job.run_at > datetime.now(g.tz): return runner = globals().get('job_' + job.action) if not runner: print >>stderr, 'Unknown job action {0!r}'.format(job.action) return # If we can't acquire the lock, the job has already been claimed, # so we skip it. lock = g.make_lock('pending_job_{0}'.format(job._id)) if not lock.try_acquire(): return try: data = job.data or {} runner(**data) except Exception as ex: print >>stderr, 'Exception while running job id {0} ({1}): {2}'.format( job._id, job.action, ex) else: self.mark_as_completed(job) finally: lock.release()
def link_comments(link_id, _update=False): key = comments_key(link_id) r = g.permacache.get(key) if r and not _update: return r else: # This operation can take longer than most (note the inner # locks) better to time out request temporarily than to deal # with an inconsistent tree with g.make_lock(lock_key(link_id), timeout=180): r = _load_link_comments(link_id) # rebuild parent dict cids, cid_tree, depth, num_children = r g.permacache.set(parent_comments_key(link_id), _parent_dict_from_tree(cid_tree)) # rebuild the sorts for sort in ("_controversy", "_date", "_hot", "_confidence", "_score"): g.permacache.set(sort_comments_key(link_id, sort), _comment_sorter_from_cids(cids, sort)) g.permacache.set(key, r) return r
def _update_adzerk(link, campaign): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): msg = '%s updating/creating adzerk objects for %s - %s' g.log.info(msg % (datetime.datetime.now(g.tz), link, campaign)) az_campaign = update_campaign(link) az_creative = update_creative(link, campaign) az_flight = update_flight(link, campaign) az_cfmap = update_cfmap(link, campaign)
def _deactivate_overdelivered(link, campaign): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): msg = '%s deactivating adzerk flight for %s - %s' g.log.info(msg % (datetime.datetime.now(g.tz), link, campaign)) az_campaign = update_campaign(link) az_flight = update_flight(link, campaign, az_campaign) PromotionLog.add(link, 'deactivated %s' % az_flight)
def get_promoted_slow(): # to be used only by a human at a terminal with g.make_lock(promoted_lock_key): links = Link._query(Link.c.promote_status == STATUS.promoted, Link.c.promoted == True, data=True) link_names = dict((x._fullname, auction_weight(x)) for x in links) set_promoted(link_names) return link_names
def auth_campaign(link, index, user, pay_id): """ for setting up a campaign as a real bid with authorize.net """ with g.make_lock(campaign_lock(link)): campaigns = getattr(link, "campaigns", {}).copy() if index in campaigns: # void any existing campaign void_campaign(link, index, user) sd, ed, bid, sr, trans_id = campaigns[index] # create a new transaction and update the bid test = 1 if g.debug else None trans_id, reason = authorize.auth_transaction(bid, user, pay_id, link, index, test = test) if not reason and trans_id is not None and int(trans_id) != 0: promotion_log(link, "updated payment and/or bid for campaign %s: " "SUCCESS (trans_id: %d, amt: %0.2f)" % (index, trans_id, bid)) if trans_id < 0: promotion_log(link, "FREEBIE (campaign: %s)" % index) set_status(link, max(STATUS.unseen if trans_id else STATUS.unpaid, link.promote_status)) # notify of campaign creation # update the query queue if user._id == link.author_id and trans_id > 0: emailer.promo_bid(link, bid, sd) else: # something bad happend. promotion_log(link, "updated payment and/or bid for campaign %s: FAILED ('%s')" % (index, reason)) trans_id = 0 campaigns[index] = sd, ed, bid, sr, trans_id link.campaigns = {} link.campaigns = campaigns link._commit() # dual-write update to campaign Thing campaign = PromoCampaign._byID(index) if campaign: if trans_id > 0: campaign.mark_paid(trans_id) elif trans_id < 0: campaign.mark_freebie(trans_id) else: campaign.mark_payment_error(reason) campaign._commit() return bool(trans_id), reason return False, ""
def link_comments(link_id): key = comments_key(link_id) r = g.permacache.get(key) if r: return r else: with g.make_lock(lock_key(link_id)): r = load_link_comments(link_id) g.permacache.set(key, r) return r
def get_blob(code): key = "payment_blob-" + code with g.make_lock("payment_blob", "payment_blob_lock-" + code): blob = g.hardcache.get(key) if not blob: raise NotFound("No payment_blob-" + code) if blob.get("status", None) != "initialized": raise ValueError("payment_blob %s has status = %s" % (code, blob.get("status", None))) blob["status"] = "locked" g.hardcache.set(key, blob, 86400 * 30) return key, blob
def delete_campaign(link, index): with g.make_lock(campaign_lock(link)): campaigns = getattr(link, "campaigns", {}).copy() if index in campaigns: PromotionWeights.delete_unfinished(link, index) del campaigns[index] link.campaigns = {} link.campaigns = campaigns link._commit() #TODO cancel any existing charges void_campaign(link, index, c.user)
def link_comments(link_id, _update=False): key = comments_key(link_id) r = g.permacache.get(key) if r and not _update: return r else: with g.make_lock(lock_key(link_id)): r = load_link_comments(link_id) g.permacache.set(key, r) return r
def update_comment_vote(comment): link_id = comment.link_id # update the list of sorts with g.make_lock(lock_key(link_id)): for sort in ("_controversy", "_hot", "_confidence", "_score"): key = sort_comments_key(link_id, sort) r = g.permacache.get(key) # don't bother recomputing a non-existant sort dict, as # we'll catch it next time we have to render something if r: r[comment._id] = _get_sort_value(comment, sort) g.permacache.set(key, r)
def get_blob(code): key = "payment_blob-" + code with g.make_lock("payment_blob", "payment_blob_lock-" + code): blob = g.hardcache.get(key) if not blob: raise NotFound("No payment_blob-" + code) if blob.get('status', None) != 'initialized': raise ValueError("payment_blob %s has status = %s" % (code, blob.get('status', None))) blob['status'] = "locked" g.hardcache.set(key, blob, 86400 * 30) return key, blob
def delete_comment(comment): with g.make_lock(lock_key(comment.link_id)): cids, comment_tree, depth, num_children = link_comments(comment.link_id) # only completely remove comments with no children if comment._id not in comment_tree: if comment._id in cids: cids.remove(comment._id) if comment._id in depth: del depth[comment._id] if comment._id in num_children: del num_children[comment._id] g.permacache.set(comments_key(comment.link_id), (cids, comment_tree, depth, num_children))
def claim(cls, user, uid, award, description, url): with g.make_lock("claim_award", str("%s_%s" % (user.name, uid))): existing_trophy_id = user.get_trophy_id(uid) if existing_trophy_id: trophy = cls._byID(existing_trophy_id) preexisting = True else: preexisting = False trophy = cls._new(user, award, description=description, url=url) user.set_trophy_id(uid, trophy._id) user._commit() return trophy, preexisting
def edit_campaign(link, campaign_id, dates, bid, sr): sr_name = sr.name if sr else '' # empty string means target to all try: campaign = PromoCampaign._byID(campaign_id) # if the bid amount changed, cancel any pending transactions if campaign.bid != bid: void_campaign(link, campaign_id) # update the schedule PromotionWeights.reschedule(link, campaign_id, sr_name, dates[0], dates[1], bid) # update values in the db campaign.update(dates[0], dates[1], bid, sr_name, campaign.trans_id, commit=True) # dual-write to link attribute in case we need to roll back with g.make_lock("promo_campaign", campaign_lock(link)): campaigns = getattr(link, 'campaigns', {}).copy() campaigns[campaign_id] = (dates[0], dates[1], bid, sr_name, campaign.trans_id) link.campaigns = campaigns link._commit() # record the transaction promotion_log(link, "updated campaign %s. (bid: %0.2f)" % (campaign_id, bid), commit=True) # make it a freebie, if applicable author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign._id, c.user) except Exception, e: # record error and rethrow g.log.error( "Failed to update PromoCampaign %s on link %d. Error was: %r" % (campaign_id, link._id, e)) try: # wrapped in try/except so orig error won't be lost if commit fails promotion_log(link, "update FAILED. (campaign: %s, bid: %.2f)" % (campaign_id, bid), commit=True) except: pass raise e
def auth_campaign(link, index, user, pay_id): """ for setting up a campaign as a real bid with authorize.net """ with g.make_lock(campaign_lock(link)): campaigns = getattr(link, "campaigns", {}).copy() if index in campaigns: # void any existing campaign void_campaign(link, index, user) sd, ed, bid, sr, trans_id = campaigns[index] # create a new transaction and update the bid test = 1 if g.debug else None trans_id, reason = authorize.auth_transaction(bid, user, pay_id, link, index, test=test) if not reason and trans_id is not None and int(trans_id) != 0: promotion_log( link, "updated payment and/or bid: " "SUCCESS (id: %s)" % trans_id) if trans_id < 0: promotion_log(link, "FREEBIE") set_status( link, max(STATUS.unseen if trans_id else STATUS.unpaid, link.promote_status)) # notify of campaign creation # update the query queue if user._id == link.author_id and trans_id > 0: emailer.promo_bid(link, bid, sd) else: # something bad happend. promotion_log( link, "updated payment and/or bid: FAILED ('%s')" % reason) trans_id = 0 campaigns[index] = sd, ed, bid, sr, trans_id link.campaigns = {} link.campaigns = campaigns link._commit() return bool(trans_id), reason return False, ""
def new_campaign(link, dates, bid, sr): with g.make_lock(campaign_lock(link)): # get a copy of the attr so that it'll be # marked as dirty on the next write. campaigns = getattr(link, "campaigns", {}).copy() # create a new index indx = max(campaigns.keys() or [-1]) + 1 # add the campaign # store the name not the reddit sr = sr.name if sr else "" campaigns[indx] = list(dates) + [bid, sr, 0] PromotionWeights.add(link, indx, sr, dates[0], dates[1], bid) link.campaigns = {} link.campaigns = campaigns link._commit() return indx
def submitballot(cls, user, comment, pollobj, response, anonymous, ip, spam): with g.make_lock('voting_on_%s' % pollobj._id): pollid = pollobj._id oldballot = list(cls._query(cls.c._thing1_id == user._id, cls.c._thing2_id == pollid)) if len(oldballot): raise PollError('You already voted on this poll') ballot = Ballot(user, pollobj, response) ballot.ip = ip ballot.anonymous = anonymous ballot.date = datetime.datetime.now().isoformat() ballot.response = response ballot._commit() pollobj.add_response(response) return ballot
def update_comment_votes(comments): comments = tup(comments) link_map = {} for com in comments: link_map.setdefault(com.link_id, []).append(com) for link_id, coms in link_map.iteritems(): with g.make_lock(lock_key(link_id)): for sort in ("_controversy", "_hot", "_confidence", "_score"): key = sort_comments_key(link_id, sort) r = g.permacache.get(key) # don't bother recomputing a non-existant sort dict, as # we'll catch it next time we have to render something if r: for comment in coms: r[comment._id] = _get_sort_value(comment, sort) g.permacache.set(key, r)
def _new(cls, name, title, author_id, ip, lang=g.lang, type="public", over_18=False, **kw): with g.make_lock("create_sr", "create_sr_" + name.lower()): try: sr = Subreddit._by_name(name) raise SubredditExists except NotFound: if "allow_top" not in kw: kw["allow_top"] = True sr = Subreddit( name=name, title=title, lang=lang, type=type, over_18=over_18, author_id=author_id, ip=ip, **kw ) sr._commit() # clear cache Subreddit._by_name(name, _update=True) return sr
def mutate_key(key, type_=dict): """Context manager to atomically mutate an object stored in memcached. The context manager returns an object which can be mutated and will be stored back in memcached when the context ends. A lock is held while mutation is going on, so be quick! If there is currently no object in memcached, `type_` is called to make a new one. """ with g.make_lock("f2p", "f2p_%s" % key): raw_json = g.f2pcache.get(key, allow_local=False) data = json.loads(raw_json) if raw_json else type_() yield data g.f2pcache.set(key, json.dumps(data))
def link_comments_and_sort(link_id, sort): cids, cid_tree, depth, num_children = link_comments(link_id) # load the sorter key = sort_comments_key(link_id, sort) sorter = g.permacache.get(key) if sorter is None: g.log.error("comment_tree.py: sorter (%s) cache miss for Link %s" % (sort, link_id)) sorter = {} elif cids and not all(x in sorter for x in cids): g.log.error( "Error in comment_tree: sorter (%s) inconsistent for Link %s" % (sort, link_id)) sorter = {} # load the parents key = parent_comments_key(link_id) parents = g.permacache.get(key) if parents is None: g.log.error("comment_tree.py: parents cache miss for Link %s" % link_id) parents = {} elif cids and not all(x in parents for x in cids): g.log.error("Error in comment_tree: parents inconsistent for Link %s" % link_id) parents = {} if not sorter or not parents: with g.make_lock(lock_key(link_id)): # reload from the cache so the sorter and parents are # maximally consistent r = g.permacache.get(comments_key(link_id)) cids, cid_tree, depth, num_children = r key = sort_comments_key(link_id, sort) if not sorter: sorter = _comment_sorter_from_cids(cids, sort) g.permacache.set(key, sorter) key = parent_comments_key(link_id) if not parents: parents = _parent_dict_from_tree(cid_tree) g.permacache.set(key, parents) return cids, cid_tree, depth, num_children, parents, sorter
def add_comments(comments): comments = tup(comments) link_map = {} for com in comments: link_map.setdefault(com.link_id, []).append(com) for link_id, coms in link_map.iteritems(): try: with g.make_lock(lock_key(link_id)): add_comments_nolock(link_id, coms) except: # TODO: bare except? # calculate it from scratch link_comments(link_id, _update=True) update_comment_votes(coms)
def add_comments(comments): comments = tup(comments) link_map = {} for com in comments: link_map.setdefault(com.link_id, []).append(com) for link_id, coms in link_map.iteritems(): try: with g.make_lock(lock_key(link_id)): add_comments_nolock(link_id, coms) except: # TODO: bare except? # calculate it from scratch link_comments(link_id, _update = True) update_comment_votes(coms)
def submitballot(cls, user, comment, pollobj, response, anonymous, ip, spam): with g.make_lock('voting_on_%s' % pollobj._id): pollid = pollobj._id oldballot = list( cls._query(cls.c._thing1_id == user._id, cls.c._thing2_id == pollid)) if len(oldballot): raise PollError('You already voted on this poll') ballot = Ballot(user, pollobj, response) ballot.ip = ip ballot.anonymous = anonymous ballot.date = datetime.datetime.now().isoformat() ballot.response = response ballot._commit() pollobj.add_response(response) return ballot
def _update_adzerk(link, campaign): with g.make_lock('adzerk_update', 'adzerk-' + link._fullname): msg = '%s updating/creating adzerk objects for %s - %s' g.log.info(msg % (datetime.datetime.now(g.tz), link, campaign)) az_campaign = update_campaign(link) if campaign: az_creative = update_creative(link, campaign) az_flight = update_flight(link, campaign, az_campaign) if getattr(campaign, 'adzerk_cfmap_id', None) is not None: az_cfmap = adzerk_api.CreativeFlightMap.get( az_flight.Id, campaign.adzerk_cfmap_id) else: az_cfmap = create_cfmap(link, campaign, az_campaign, az_creative, az_flight) PromotionLog.add(link, 'updated %s' % az_flight) else: PromotionLog.add(link, 'updated %s' % az_campaign)
def edit_campaign(link, index, dates, bid, sr): with g.make_lock(campaign_lock(link)): campaigns = getattr(link, "campaigns", {}).copy() if index in campaigns: trans_id = campaigns[index][CAMPAIGN.trans_id] prev_bid = campaigns[index][CAMPAIGN.bid] # store the name not the reddit sr = sr.name if sr else "" campaigns[index] = list(dates) + [bid, sr, trans_id] PromotionWeights.reschedule(link, index, sr, dates[0], dates[1], bid) link.campaigns = {} link.campaigns = campaigns link._commit() #TODO cancel any existing charges if the bid has changed if prev_bid != bid: void_campaign(link, index, c.user)